diff --git a/docs/static/resources/openapi.json b/docs/static/resources/openapi.json index 9ecf94ffd..8077af91c 100644 --- a/docs/static/resources/openapi.json +++ b/docs/static/resources/openapi.json @@ -61,6 +61,94 @@ }, "description": "Not found" }, + "410": { + "content": { + "application/json": { + "schema": { + "properties": { + "errors": { + "items": { + "properties": { + "error_type": { + "enum": [ + "FRONTEND_CSRF_ERROR", + "FRONTEND_NETWORK_ERROR", + "FRONTEND_TIMEOUT_ERROR", + "GENERIC_DB_ENGINE_ERROR", + "COLUMN_DOES_NOT_EXIST_ERROR", + "TABLE_DOES_NOT_EXIST_ERROR", + "SCHEMA_DOES_NOT_EXIST_ERROR", + "CONNECTION_INVALID_USERNAME_ERROR", + "CONNECTION_INVALID_PASSWORD_ERROR", + "CONNECTION_INVALID_HOSTNAME_ERROR", + "CONNECTION_PORT_CLOSED_ERROR", + "CONNECTION_INVALID_PORT_ERROR", + "CONNECTION_HOST_DOWN_ERROR", + "CONNECTION_ACCESS_DENIED_ERROR", + "CONNECTION_UNKNOWN_DATABASE_ERROR", + "CONNECTION_DATABASE_PERMISSIONS_ERROR", + "CONNECTION_MISSING_PARAMETERS_ERROR", + "OBJECT_DOES_NOT_EXIST_ERROR", + "SYNTAX_ERROR", + "CONNECTION_DATABASE_TIMEOUT", + "VIZ_GET_DF_ERROR", + "UNKNOWN_DATASOURCE_TYPE_ERROR", + "FAILED_FETCHING_DATASOURCE_INFO_ERROR", + "TABLE_SECURITY_ACCESS_ERROR", + "DATASOURCE_SECURITY_ACCESS_ERROR", + "DATABASE_SECURITY_ACCESS_ERROR", + "QUERY_SECURITY_ACCESS_ERROR", + "MISSING_OWNERSHIP_ERROR", + "USER_ACTIVITY_SECURITY_ACCESS_ERROR", + "BACKEND_TIMEOUT_ERROR", + "DATABASE_NOT_FOUND_ERROR", + "MISSING_TEMPLATE_PARAMS_ERROR", + "INVALID_TEMPLATE_PARAMS_ERROR", + "RESULTS_BACKEND_NOT_CONFIGURED_ERROR", + "DML_NOT_ALLOWED_ERROR", + "INVALID_CTAS_QUERY_ERROR", + "INVALID_CVAS_QUERY_ERROR", + "SQLLAB_TIMEOUT_ERROR", + "RESULTS_BACKEND_ERROR", + "ASYNC_WORKERS_ERROR", + "ADHOC_SUBQUERY_NOT_ALLOWED_ERROR", + "GENERIC_COMMAND_ERROR", + "GENERIC_BACKEND_ERROR", + "INVALID_PAYLOAD_FORMAT_ERROR", + "INVALID_PAYLOAD_SCHEMA_ERROR", + "REPORT_NOTIFICATION_ERROR" + ], + "type": "string" + }, + "extra": { + "type": "object" + }, + "level": { + "enum": [ + "info", + "warning", + "error" + ], + "type": "string" + }, + "message": { + "type": "string" + } + }, + "type": "object" + }, + "type": "array" + }, + "message": { + "type": "string" + } + }, + "type": "object" + } + } + }, + "description": "Gone" + }, "422": { "content": { "application/json": { @@ -746,7 +834,7 @@ "type": "array" }, "metrics": { - "description": "Aggregate expressions. Metrics can be passed as both references to datasource metrics (strings), or ad-hoc metrics which are defined only within the query object. See `ChartDataAdhocMetricSchema` for the structure of ad-hoc metrics. When metrics is undefined or null, the query is executed without a groupby. However, when metrics is an array (length >= 0), a groupby clause is added to the query.", + "description": "Aggregate expressions. Metrics can be passed as both references to datasource metrics (strings), or ad-hoc metricswhich are defined only within the query object. See `ChartDataAdhocMetricSchema` for the structure of ad-hoc metrics. When metrics is undefined or null, the query is executed without a groupby. However, when metrics is an array (length >= 0), a groupby clause is added to the query.", "items": {}, "nullable": true, "type": "array" @@ -1309,7 +1397,7 @@ "type": "boolean" }, "metrics": { - "description": "Aggregate expressions. Metrics can be passed as both references to datasource metrics (strings), or ad-hoc metrics which are defined only within the query object. See `ChartDataAdhocMetricSchema` for the structure of ad-hoc metrics.", + "description": "Aggregate expressions. Metrics can be passed as both references to datasource metrics (strings), or ad-hoc metricswhich are defined only within the query object. See `ChartDataAdhocMetricSchema` for the structure of ad-hoc metrics.", "items": {}, "nullable": true, "type": "array" @@ -1680,7 +1768,7 @@ "type": "string" }, "changed_by": { - "$ref": "#/components/schemas/ChartDataRestApi.get_list.User" + "$ref": "#/components/schemas/ChartDataRestApi.get_list.User1" }, "changed_by_name": { "readOnly": true @@ -1695,7 +1783,7 @@ "readOnly": true }, "created_by": { - "$ref": "#/components/schemas/ChartDataRestApi.get_list.User2" + "$ref": "#/components/schemas/ChartDataRestApi.get_list.User3" }, "created_on_delta_humanized": { "readOnly": true @@ -1742,10 +1830,10 @@ "type": "string" }, "last_saved_by": { - "$ref": "#/components/schemas/ChartDataRestApi.get_list.User3" + "$ref": "#/components/schemas/ChartDataRestApi.get_list.User" }, "owners": { - "$ref": "#/components/schemas/ChartDataRestApi.get_list.User1" + "$ref": "#/components/schemas/ChartDataRestApi.get_list.User2" }, "params": { "nullable": true, @@ -1809,6 +1897,10 @@ "maxLength": 64, "type": "string" }, + "id": { + "format": "int32", + "type": "integer" + }, "last_name": { "maxLength": 64, "type": "string" @@ -1821,6 +1913,23 @@ "type": "object" }, "ChartDataRestApi.get_list.User1": { + "properties": { + "first_name": { + "maxLength": 64, + "type": "string" + }, + "last_name": { + "maxLength": 64, + "type": "string" + } + }, + "required": [ + "first_name", + "last_name" + ], + "type": "object" + }, + "ChartDataRestApi.get_list.User2": { "properties": { "first_name": { "maxLength": 64, @@ -1846,27 +1955,6 @@ ], "type": "object" }, - "ChartDataRestApi.get_list.User2": { - "properties": { - "first_name": { - "maxLength": 64, - "type": "string" - }, - "id": { - "format": "int32", - "type": "integer" - }, - "last_name": { - "maxLength": 64, - "type": "string" - } - }, - "required": [ - "first_name", - "last_name" - ], - "type": "object" - }, "ChartDataRestApi.get_list.User3": { "properties": { "first_name": { @@ -1968,7 +2056,7 @@ "type": "string" }, "query_context_generation": { - "description": "The query context generation represents whether the query_contexts user generated or not so that it does not update user modfiedstate.", + "description": "The query context generation represents whether the query_contextis user generated or not so that it does not update user modfiedstate.", "nullable": true, "type": "boolean" }, @@ -2075,7 +2163,7 @@ "type": "string" }, "query_context_generation": { - "description": "The query context generation represents whether the query_contexts user generated or not so that it does not update user modfiedstate.", + "description": "The query context generation represents whether the query_contextis user generated or not so that it does not update user modfiedstate.", "nullable": true, "type": "boolean" }, @@ -2472,7 +2560,7 @@ "type": "string" }, "changed_by": { - "$ref": "#/components/schemas/ChartRestApi.get_list.User" + "$ref": "#/components/schemas/ChartRestApi.get_list.User1" }, "changed_by_name": { "readOnly": true @@ -2487,7 +2575,7 @@ "readOnly": true }, "created_by": { - "$ref": "#/components/schemas/ChartRestApi.get_list.User2" + "$ref": "#/components/schemas/ChartRestApi.get_list.User3" }, "created_on_delta_humanized": { "readOnly": true @@ -2534,10 +2622,10 @@ "type": "string" }, "last_saved_by": { - "$ref": "#/components/schemas/ChartRestApi.get_list.User3" + "$ref": "#/components/schemas/ChartRestApi.get_list.User" }, "owners": { - "$ref": "#/components/schemas/ChartRestApi.get_list.User1" + "$ref": "#/components/schemas/ChartRestApi.get_list.User2" }, "params": { "nullable": true, @@ -2601,6 +2689,10 @@ "maxLength": 64, "type": "string" }, + "id": { + "format": "int32", + "type": "integer" + }, "last_name": { "maxLength": 64, "type": "string" @@ -2613,6 +2705,23 @@ "type": "object" }, "ChartRestApi.get_list.User1": { + "properties": { + "first_name": { + "maxLength": 64, + "type": "string" + }, + "last_name": { + "maxLength": 64, + "type": "string" + } + }, + "required": [ + "first_name", + "last_name" + ], + "type": "object" + }, + "ChartRestApi.get_list.User2": { "properties": { "first_name": { "maxLength": 64, @@ -2638,27 +2747,6 @@ ], "type": "object" }, - "ChartRestApi.get_list.User2": { - "properties": { - "first_name": { - "maxLength": 64, - "type": "string" - }, - "id": { - "format": "int32", - "type": "integer" - }, - "last_name": { - "maxLength": 64, - "type": "string" - } - }, - "required": [ - "first_name", - "last_name" - ], - "type": "object" - }, "ChartRestApi.get_list.User3": { "properties": { "first_name": { @@ -2760,7 +2848,7 @@ "type": "string" }, "query_context_generation": { - "description": "The query context generation represents whether the query_contexts user generated or not so that it does not update user modfiedstate.", + "description": "The query context generation represents whether the query_contextis user generated or not so that it does not update user modfiedstate.", "nullable": true, "type": "boolean" }, @@ -2867,7 +2955,7 @@ "type": "string" }, "query_context_generation": { - "description": "The query context generation represents whether the query_contexts user generated or not so that it does not update user modfiedstate.", + "description": "The query context generation represents whether the query_contextis user generated or not so that it does not update user modfiedstate.", "nullable": true, "type": "boolean" }, @@ -5125,7 +5213,7 @@ "DatasetRestApi.get_list": { "properties": { "changed_by": { - "$ref": "#/components/schemas/DatasetRestApi.get_list.User" + "$ref": "#/components/schemas/DatasetRestApi.get_list.User1" }, "changed_by_name": { "readOnly": true @@ -5168,7 +5256,7 @@ "readOnly": true }, "owners": { - "$ref": "#/components/schemas/DatasetRestApi.get_list.User1" + "$ref": "#/components/schemas/DatasetRestApi.get_list.User" }, "schema": { "maxLength": 255, @@ -5207,23 +5295,6 @@ "type": "object" }, "DatasetRestApi.get_list.User": { - "properties": { - "first_name": { - "maxLength": 64, - "type": "string" - }, - "username": { - "maxLength": 64, - "type": "string" - } - }, - "required": [ - "first_name", - "username" - ], - "type": "object" - }, - "DatasetRestApi.get_list.User1": { "properties": { "first_name": { "maxLength": 64, @@ -5249,6 +5320,23 @@ ], "type": "object" }, + "DatasetRestApi.get_list.User1": { + "properties": { + "first_name": { + "maxLength": 64, + "type": "string" + }, + "username": { + "maxLength": 64, + "type": "string" + } + }, + "required": [ + "first_name", + "username" + ], + "type": "object" + }, "DatasetRestApi.post": { "properties": { "database": { @@ -5518,6 +5606,71 @@ }, "type": "object" }, + "ExecutePayloadSchema": { + "properties": { + "client_id": { + "nullable": true, + "type": "string" + }, + "ctas_method": { + "nullable": true, + "type": "string" + }, + "database_id": { + "format": "int32", + "type": "integer" + }, + "expand_data": { + "nullable": true, + "type": "boolean" + }, + "json": { + "nullable": true, + "type": "boolean" + }, + "queryLimit": { + "format": "int32", + "nullable": true, + "type": "integer" + }, + "runAsync": { + "nullable": true, + "type": "boolean" + }, + "schema": { + "nullable": true, + "type": "string" + }, + "select_as_cta": { + "nullable": true, + "type": "boolean" + }, + "sql": { + "type": "string" + }, + "sql_editor_id": { + "nullable": true, + "type": "string" + }, + "tab": { + "nullable": true, + "type": "string" + }, + "templateParams": { + "nullable": true, + "type": "string" + }, + "tmp_table_name": { + "nullable": true, + "type": "string" + } + }, + "required": [ + "database_id", + "sql" + ], + "type": "object" + }, "ExploreContextSchema": { "properties": { "dataset": { @@ -5970,6 +6123,45 @@ }, "type": "object" }, + "QueryExecutionResponseSchema": { + "properties": { + "columns": { + "items": { + "type": "object" + }, + "type": "array" + }, + "data": { + "items": { + "type": "object" + }, + "type": "array" + }, + "expanded_columns": { + "items": { + "type": "object" + }, + "type": "array" + }, + "query": { + "$ref": "#/components/schemas/QueryResult" + }, + "query_id": { + "format": "int32", + "type": "integer" + }, + "selected_columns": { + "items": { + "type": "object" + }, + "type": "array" + }, + "status": { + "type": "string" + } + }, + "type": "object" + }, "QueryRestApi.get": { "properties": { "changed_on": { @@ -6170,203 +6362,177 @@ }, "type": "object" }, - "RLSRestApi.get": { + "QueryResult": { "properties": { - "clause": { - "description": "This is the condition that will be added to the WHERE clause. For example, to only return rows for a particular client, you might define a regular filter with the clause `client_id = 9`. To display no rows unless a user belongs to a RLS filter role, a base filter can be created with the clause `1 = 0` (always false).", + "changedOn": { + "format": "date-time", "type": "string" }, - "description": { - "description": "Detailed description", + "changed_on": { "type": "string" }, - "filter_type": { - "description": "Regular filters add where clauses to queries if a user belongs to a role referenced in the filter, base filters apply filters to all queries except the roles defined in the filter, and can be used to define what users can see if no RLS filters within a filter group apply to them.", - "enum": [ - "Regular", - "Base" - ], + "ctas": { + "type": "boolean" + }, + "db": { "type": "string" }, - "group_key": { - "description": "Filters with the same group key will be ORed together within the group, while different filter groups will be ANDed together. Undefined group keys are treated as unique groups, i.e. are not grouped together. For example, if a table has three filters, of which two are for departments Finance and Marketing (group key = 'department'), and one refers to the region Europe (group key = 'region'), the filter clause would apply the filter (department = 'Finance' OR department = 'Marketing') AND (region = 'Europe').", - "type": "string" - }, - "id": { - "description": "Unique if of rls filter", + "dbId": { "format": "int32", "type": "integer" }, - "name": { - "description": "Name of rls filter", + "endDttm": { + "format": "float", + "type": "number" + }, + "errorMessage": { + "nullable": true, "type": "string" }, - "roles": { - "items": { - "$ref": "#/components/schemas/Roles1" - }, - "type": "array" - }, - "tables": { - "items": { - "$ref": "#/components/schemas/Tables" - }, - "type": "array" - } - }, - "type": "object" - }, - "RLSRestApi.get_list": { - "properties": { - "changed_on_delta_humanized": { - "readOnly": true - }, - "clause": { - "description": "This is the condition that will be added to the WHERE clause. For example, to only return rows for a particular client, you might define a regular filter with the clause `client_id = 9`. To display no rows unless a user belongs to a RLS filter role, a base filter can be created with the clause `1 = 0` (always false).", + "executedSql": { "type": "string" }, - "description": { - "description": "Detailed description", - "type": "string" - }, - "filter_type": { - "description": "Regular filters add where clauses to queries if a user belongs to a role referenced in the filter, base filters apply filters to all queries except the roles defined in the filter, and can be used to define what users can see if no RLS filters within a filter group apply to them.", - "enum": [ - "Regular", - "Base" - ], - "type": "string" - }, - "group_key": { - "description": "Filters with the same group key will be ORed together within the group, while different filter groups will be ANDed together. Undefined group keys are treated as unique groups, i.e. are not grouped together. For example, if a table has three filters, of which two are for departments Finance and Marketing (group key = 'department'), and one refers to the region Europe (group key = 'region'), the filter clause would apply the filter (department = 'Finance' OR department = 'Marketing') AND (region = 'Europe').", - "type": "string" + "extra": { + "type": "object" }, "id": { - "description": "Unique if of rls filter", + "type": "string" + }, + "limit": { "format": "int32", "type": "integer" }, - "name": { - "description": "Name of rls filter", + "limitingFactor": { "type": "string" }, - "roles": { - "items": { - "$ref": "#/components/schemas/Roles1" - }, - "type": "array" + "progress": { + "format": "int32", + "type": "integer" }, - "tables": { + "queryId": { + "format": "int32", + "type": "integer" + }, + "resultsKey": { + "type": "string" + }, + "rows": { + "format": "int32", + "type": "integer" + }, + "schema": { + "type": "string" + }, + "serverId": { + "format": "int32", + "type": "integer" + }, + "sql": { + "type": "string" + }, + "sqlEditorId": { + "type": "string" + }, + "startDttm": { + "format": "float", + "type": "number" + }, + "state": { + "type": "string" + }, + "tab": { + "type": "string" + }, + "tempSchema": { + "nullable": true, + "type": "string" + }, + "tempTable": { + "nullable": true, + "type": "string" + }, + "trackingUrl": { + "nullable": true, + "type": "string" + }, + "user": { + "type": "string" + }, + "userId": { + "format": "int32", + "type": "integer" + } + }, + "type": "object" + }, + "RecentActivity": { + "properties": { + "action": { + "description": "Action taken describing type of activity", + "type": "string" + }, + "item_title": { + "description": "Title of item", + "type": "string" + }, + "item_type": { + "description": "Type of item, e.g. slice or dashboard", + "type": "string" + }, + "item_url": { + "description": "URL to item", + "type": "string" + }, + "time": { + "description": "Time of activity, in epoch milliseconds", + "format": "float", + "type": "number" + }, + "time_delta_humanized": { + "description": "Human-readable description of how long ago activity took place", + "type": "string" + } + }, + "type": "object" + }, + "RecentActivityResponseSchema": { + "properties": { + "result": { + "description": "A list of recent activity objects", "items": { - "$ref": "#/components/schemas/Tables" + "$ref": "#/components/schemas/RecentActivity" }, "type": "array" } }, "type": "object" }, - "RLSRestApi.post": { + "RecentActivitySchema": { "properties": { - "clause": { - "description": "This is the condition that will be added to the WHERE clause. For example, to only return rows for a particular client, you might define a regular filter with the clause `client_id = 9`. To display no rows unless a user belongs to a RLS filter role, a base filter can be created with the clause `1 = 0` (always false).", + "action": { + "description": "Action taken describing type of activity", "type": "string" }, - "description": { - "description": "Detailed description", - "nullable": true, + "item_title": { + "description": "Title of item", "type": "string" }, - "filter_type": { - "description": "Regular filters add where clauses to queries if a user belongs to a role referenced in the filter, base filters apply filters to all queries except the roles defined in the filter, and can be used to define what users can see if no RLS filters within a filter group apply to them.", - "enum": [ - "Regular", - "Base" - ], + "item_type": { + "description": "Type of item, e.g. slice or dashboard", "type": "string" }, - "group_key": { - "description": "Filters with the same group key will be ORed together within the group, while different filter groups will be ANDed together. Undefined group keys are treated as unique groups, i.e. are not grouped together. For example, if a table has three filters, of which two are for departments Finance and Marketing (group key = 'department'), and one refers to the region Europe (group key = 'region'), the filter clause would apply the filter (department = 'Finance' OR department = 'Marketing') AND (region = 'Europe').", - "nullable": true, + "item_url": { + "description": "URL to item", "type": "string" }, - "name": { - "description": "Name of rls filter", - "maxLength": 255, - "minLength": 1, + "time": { + "description": "Time of activity, in epoch milliseconds", + "format": "float", + "type": "number" + }, + "time_delta_humanized": { + "description": "Human-readable description of how long ago activity took place", "type": "string" - }, - "roles": { - "description": "For regular filters, these are the roles this filter will be applied to. For base filters, these are the roles that the filter DOES NOT apply to, e.g. Admin if admin should see all data.", - "items": { - "format": "int32", - "type": "integer" - }, - "type": "array" - }, - "tables": { - "description": "These are the tables this filter will be applied to.", - "items": { - "format": "int32", - "type": "integer" - }, - "minItems": 1, - "type": "array" - } - }, - "required": [ - "clause", - "filter_type", - "name", - "roles", - "tables" - ], - "type": "object" - }, - "RLSRestApi.put": { - "properties": { - "clause": { - "description": "This is the condition that will be added to the WHERE clause. For example, to only return rows for a particular client, you might define a regular filter with the clause `client_id = 9`. To display no rows unless a user belongs to a RLS filter role, a base filter can be created with the clause `1 = 0` (always false).", - "type": "string" - }, - "description": { - "description": "Detailed description", - "nullable": true, - "type": "string" - }, - "filter_type": { - "description": "Regular filters add where clauses to queries if a user belongs to a role referenced in the filter, base filters apply filters to all queries except the roles defined in the filter, and can be used to define what users can see if no RLS filters within a filter group apply to them.", - "enum": [ - "Regular", - "Base" - ], - "type": "string" - }, - "group_key": { - "description": "Filters with the same group key will be ORed together within the group, while different filter groups will be ANDed together. Undefined group keys are treated as unique groups, i.e. are not grouped together. For example, if a table has three filters, of which two are for departments Finance and Marketing (group key = 'department'), and one refers to the region Europe (group key = 'region'), the filter clause would apply the filter (department = 'Finance' OR department = 'Marketing') AND (region = 'Europe').", - "nullable": true, - "type": "string" - }, - "name": { - "description": "Name of rls filter", - "maxLength": 255, - "minLength": 1, - "type": "string" - }, - "roles": { - "description": "For regular filters, these are the roles this filter will be applied to. For base filters, these are the roles that the filter DOES NOT apply to, e.g. Admin if admin should see all data.", - "items": { - "format": "int32", - "type": "integer" - }, - "type": "array" - }, - "tables": { - "description": "These are the tables this filter will be applied to.", - "items": { - "format": "int32", - "type": "integer" - }, - "type": "array" } }, "type": "object" @@ -8427,18 +8593,6 @@ }, "type": "object" }, - "Roles1": { - "properties": { - "id": { - "format": "int32", - "type": "integer" - }, - "name": { - "type": "string" - } - }, - "type": "object" - }, "SavedQueryRestApi.get": { "properties": { "changed_on_delta_humanized": { @@ -8944,21 +9098,6 @@ }, "type": "object" }, - "Tables": { - "properties": { - "id": { - "format": "int32", - "type": "integer" - }, - "schema": { - "type": "string" - }, - "table_name": { - "type": "string" - } - }, - "type": "object" - }, "TemporaryCachePostSchema": { "properties": { "value": { @@ -9297,6 +9436,26 @@ }, "type": "object" }, + "get_recent_activity_schema": { + "properties": { + "actions": { + "items": { + "type": "string" + }, + "type": "array" + }, + "distinct": { + "type": "boolean" + }, + "page": { + "type": "number" + }, + "page_size": { + "type": "number" + } + }, + "type": "object" + }, "get_related_schema": { "properties": { "filter": { @@ -9348,6 +9507,17 @@ }, "type": "object" }, + "sql_lab_get_results_schema": { + "properties": { + "key": { + "type": "string" + } + }, + "required": [ + "key" + ], + "type": "object" + }, "thumbnail_query_schema": { "properties": { "force": { @@ -17163,6 +17333,65 @@ ] } }, + "/api/v1/log/recent_activity/{user_id}/": { + "get": { + "parameters": [ + { + "description": "The id of the user", + "in": "path", + "name": "user_id", + "required": true, + "schema": { + "type": "integer" + } + }, + { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/get_recent_activity_schema" + } + } + }, + "in": "query", + "name": "q" + } + ], + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RecentActivityResponseSchema" + } + } + }, + "description": "A List of recent activity objects" + }, + "400": { + "$ref": "#/components/responses/400" + }, + "401": { + "$ref": "#/components/responses/401" + }, + "403": { + "$ref": "#/components/responses/403" + }, + "500": { + "$ref": "#/components/responses/500" + } + }, + "security": [ + { + "jwt": [] + } + ], + "summary": "Get recent activity data for a user", + "tags": [ + "LogRestApi" + ] + } + }, "/api/v1/log/{pk}": { "get": { "description": "Get an item model", @@ -18567,590 +18796,6 @@ ] } }, - "/api/v1/rowlevelsecurity/": { - "delete": { - "description": "Deletes multiple RLS rules in a bulk operation.", - "parameters": [ - { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/get_delete_ids_schema" - } - } - }, - "in": "query", - "name": "q" - } - ], - "responses": { - "200": { - "content": { - "application/json": { - "schema": { - "properties": { - "message": { - "type": "string" - } - }, - "type": "object" - } - } - }, - "description": "RLS Rule bulk delete" - }, - "401": { - "$ref": "#/components/responses/401" - }, - "403": { - "$ref": "#/components/responses/403" - }, - "404": { - "$ref": "#/components/responses/404" - }, - "422": { - "$ref": "#/components/responses/422" - }, - "500": { - "$ref": "#/components/responses/500" - } - }, - "security": [ - { - "jwt": [] - } - ], - "tags": [ - "Row Level Security" - ] - }, - "get": { - "description": "Get a list of models", - "parameters": [ - { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/get_list_schema" - } - } - }, - "in": "query", - "name": "q" - } - ], - "responses": { - "200": { - "content": { - "application/json": { - "schema": { - "properties": { - "count": { - "description": "The total record count on the backend", - "type": "number" - }, - "description_columns": { - "properties": { - "column_name": { - "description": "The description for the column name. Will be translated by babel", - "example": "A Nice description for the column", - "type": "string" - } - }, - "type": "object" - }, - "ids": { - "description": "A list of item ids, useful when you don't know the column id", - "items": { - "type": "string" - }, - "type": "array" - }, - "label_columns": { - "properties": { - "column_name": { - "description": "The label for the column name. Will be translated by babel", - "example": "A Nice label for the column", - "type": "string" - } - }, - "type": "object" - }, - "list_columns": { - "description": "A list of columns", - "items": { - "type": "string" - }, - "type": "array" - }, - "list_title": { - "description": "A title to render. Will be translated by babel", - "example": "List Items", - "type": "string" - }, - "order_columns": { - "description": "A list of allowed columns to sort", - "items": { - "type": "string" - }, - "type": "array" - }, - "result": { - "description": "The result from the get list query", - "items": { - "$ref": "#/components/schemas/RLSRestApi.get_list" - }, - "type": "array" - } - }, - "type": "object" - } - } - }, - "description": "Items from Model" - }, - "400": { - "$ref": "#/components/responses/400" - }, - "401": { - "$ref": "#/components/responses/401" - }, - "422": { - "$ref": "#/components/responses/422" - }, - "500": { - "$ref": "#/components/responses/500" - } - }, - "security": [ - { - "jwt": [] - } - ], - "tags": [ - "Row Level Security" - ] - }, - "post": { - "description": "Create a new RLS Rule", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/RLSRestApi.post" - } - } - }, - "description": "RLS schema", - "required": true - }, - "responses": { - "201": { - "content": { - "application/json": { - "schema": { - "properties": { - "id": { - "type": "number" - }, - "result": { - "$ref": "#/components/schemas/RLSRestApi.post" - } - }, - "type": "object" - } - } - }, - "description": "RLS Rule added" - }, - "400": { - "$ref": "#/components/responses/400" - }, - "401": { - "$ref": "#/components/responses/401" - }, - "404": { - "$ref": "#/components/responses/404" - }, - "422": { - "$ref": "#/components/responses/422" - }, - "500": { - "$ref": "#/components/responses/500" - } - }, - "security": [ - { - "jwt": [] - } - ], - "tags": [ - "Row Level Security" - ] - } - }, - "/api/v1/rowlevelsecurity/_info": { - "get": { - "description": "Get metadata information about this API resource", - "parameters": [ - { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/get_info_schema" - } - } - }, - "in": "query", - "name": "q" - } - ], - "responses": { - "200": { - "content": { - "application/json": { - "schema": { - "properties": { - "add_columns": { - "type": "object" - }, - "edit_columns": { - "type": "object" - }, - "filters": { - "properties": { - "column_name": { - "items": { - "properties": { - "name": { - "description": "The filter name. Will be translated by babel", - "type": "string" - }, - "operator": { - "description": "The filter operation key to use on list filters", - "type": "string" - } - }, - "type": "object" - }, - "type": "array" - } - }, - "type": "object" - }, - "permissions": { - "description": "The user permissions for this API resource", - "items": { - "type": "string" - }, - "type": "array" - } - }, - "type": "object" - } - } - }, - "description": "Item from Model" - }, - "400": { - "$ref": "#/components/responses/400" - }, - "401": { - "$ref": "#/components/responses/401" - }, - "422": { - "$ref": "#/components/responses/422" - }, - "500": { - "$ref": "#/components/responses/500" - } - }, - "security": [ - { - "jwt": [] - } - ], - "tags": [ - "Row Level Security" - ] - } - }, - "/api/v1/rowlevelsecurity/related/{column_name}": { - "get": { - "parameters": [ - { - "in": "path", - "name": "column_name", - "required": true, - "schema": { - "type": "string" - } - }, - { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/get_related_schema" - } - } - }, - "in": "query", - "name": "q" - } - ], - "responses": { - "200": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/RelatedResponseSchema" - } - } - }, - "description": "Related column data" - }, - "400": { - "$ref": "#/components/responses/400" - }, - "401": { - "$ref": "#/components/responses/401" - }, - "404": { - "$ref": "#/components/responses/404" - }, - "500": { - "$ref": "#/components/responses/500" - } - }, - "security": [ - { - "jwt": [] - } - ], - "tags": [ - "Row Level Security" - ] - } - }, - "/api/v1/rowlevelsecurity/{pk}": { - "delete": { - "parameters": [ - { - "in": "path", - "name": "pk", - "required": true, - "schema": { - "type": "integer" - } - } - ], - "responses": { - "200": { - "content": { - "application/json": { - "schema": { - "properties": { - "message": { - "type": "string" - } - }, - "type": "object" - } - } - }, - "description": "Item deleted" - }, - "404": { - "$ref": "#/components/responses/404" - }, - "422": { - "$ref": "#/components/responses/422" - }, - "500": { - "$ref": "#/components/responses/500" - } - }, - "security": [ - { - "jwt": [] - } - ], - "tags": [ - "Row Level Security" - ] - }, - "get": { - "description": "Get an item model", - "parameters": [ - { - "in": "path", - "name": "pk", - "required": true, - "schema": { - "type": "integer" - } - }, - { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/get_item_schema" - } - } - }, - "in": "query", - "name": "q" - } - ], - "responses": { - "200": { - "content": { - "application/json": { - "schema": { - "properties": { - "description_columns": { - "properties": { - "column_name": { - "description": "The description for the column name. Will be translated by babel", - "example": "A Nice description for the column", - "type": "string" - } - }, - "type": "object" - }, - "id": { - "description": "The item id", - "type": "string" - }, - "label_columns": { - "properties": { - "column_name": { - "description": "The label for the column name. Will be translated by babel", - "example": "A Nice label for the column", - "type": "string" - } - }, - "type": "object" - }, - "result": { - "$ref": "#/components/schemas/RLSRestApi.get" - }, - "show_columns": { - "description": "A list of columns", - "items": { - "type": "string" - }, - "type": "array" - }, - "show_title": { - "description": "A title to render. Will be translated by babel", - "example": "Show Item Details", - "type": "string" - } - }, - "type": "object" - } - } - }, - "description": "Item from Model" - }, - "400": { - "$ref": "#/components/responses/400" - }, - "401": { - "$ref": "#/components/responses/401" - }, - "404": { - "$ref": "#/components/responses/404" - }, - "422": { - "$ref": "#/components/responses/422" - }, - "500": { - "$ref": "#/components/responses/500" - } - }, - "security": [ - { - "jwt": [] - } - ], - "tags": [ - "Row Level Security" - ] - }, - "put": { - "description": "Updates an RLS Rule", - "parameters": [ - { - "description": "The Rule pk", - "in": "path", - "name": "pk", - "required": true, - "schema": { - "type": "integer" - } - } - ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/RLSRestApi.put" - } - } - }, - "description": "RLS schema", - "required": true - }, - "responses": { - "200": { - "content": { - "application/json": { - "schema": { - "properties": { - "id": { - "type": "number" - }, - "result": { - "$ref": "#/components/schemas/RLSRestApi.put" - } - }, - "type": "object" - } - } - }, - "description": "Rule changed" - }, - "400": { - "$ref": "#/components/responses/400" - }, - "401": { - "$ref": "#/components/responses/401" - }, - "403": { - "$ref": "#/components/responses/403" - }, - "404": { - "$ref": "#/components/responses/404" - }, - "422": { - "$ref": "#/components/responses/422" - }, - "500": { - "$ref": "#/components/responses/500" - } - }, - "security": [ - { - "jwt": [] - } - ], - "tags": [ - "Row Level Security" - ] - } - }, "/api/v1/saved_query/": { "delete": { "description": "Deletes multiple saved queries in a bulk operation.", @@ -20093,6 +19738,123 @@ ] } }, + "/api/v1/sqllab/execute/": { + "post": { + "description": "Starts the execution of a SQL query", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ExecutePayloadSchema" + } + } + }, + "description": "SQL query and params", + "required": true + }, + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/QueryExecutionResponseSchema" + } + } + }, + "description": "Query execution result" + }, + "202": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/QueryExecutionResponseSchema" + } + } + }, + "description": "Query execution result, query still running" + }, + "400": { + "$ref": "#/components/responses/400" + }, + "401": { + "$ref": "#/components/responses/401" + }, + "403": { + "$ref": "#/components/responses/403" + }, + "404": { + "$ref": "#/components/responses/404" + }, + "500": { + "$ref": "#/components/responses/500" + } + }, + "security": [ + { + "jwt": [] + } + ], + "tags": [ + "SQL Lab" + ] + } + }, + "/api/v1/sqllab/results/": { + "get": { + "parameters": [ + { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/sql_lab_get_results_schema" + } + } + }, + "in": "query", + "name": "q" + } + ], + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/QueryExecutionResponseSchema" + } + } + }, + "description": "SQL query execution result" + }, + "400": { + "$ref": "#/components/responses/400" + }, + "401": { + "$ref": "#/components/responses/401" + }, + "403": { + "$ref": "#/components/responses/403" + }, + "404": { + "$ref": "#/components/responses/404" + }, + "410": { + "$ref": "#/components/responses/410" + }, + "500": { + "$ref": "#/components/responses/500" + } + }, + "security": [ + { + "jwt": [] + } + ], + "summary": "Gets the result of a SQL query execution", + "tags": [ + "SQL Lab" + ] + } + }, "/api/{version}/_openapi": { "get": { "description": "Get the OpenAPI spec for a specific API version", diff --git a/superset-frontend/cypress-base/cypress/integration/sqllab/query.test.ts b/superset-frontend/cypress-base/cypress/integration/sqllab/query.test.ts index f4db901bb..9ac9e91e1 100644 --- a/superset-frontend/cypress-base/cypress/integration/sqllab/query.test.ts +++ b/superset-frontend/cypress-base/cypress/integration/sqllab/query.test.ts @@ -35,7 +35,7 @@ describe('SqlLab query panel', () => { cy.intercept({ method: 'POST', - url: '/superset/sql_json/', + url: '/api/v1/sqllab/execute/', }).as('mockSQLResponse'); cy.get('.TableSelector .Select:eq(0)').click(); @@ -148,7 +148,7 @@ describe('SqlLab query panel', () => { }); it('Create a chart from a query', () => { - cy.intercept('/superset/sql_json/').as('queryFinished'); + cy.intercept('/api/v1/sqllab/execute/').as('queryFinished'); cy.intercept('**/api/v1/explore/**').as('explore'); cy.intercept('**/api/v1/chart/**').as('chart'); diff --git a/superset-frontend/src/SqlLab/actions/sqlLab.js b/superset-frontend/src/SqlLab/actions/sqlLab.js index d6447e808..a331e462d 100644 --- a/superset-frontend/src/SqlLab/actions/sqlLab.js +++ b/superset-frontend/src/SqlLab/actions/sqlLab.js @@ -17,6 +17,7 @@ * under the License. */ import shortid from 'shortid'; +import rison from 'rison'; import { SupersetClient, t } from '@superset-ui/core'; import invert from 'lodash/invert'; import mapKeys from 'lodash/mapKeys'; @@ -305,8 +306,13 @@ export function fetchQueryResults(query, displayLimit) { return function (dispatch) { dispatch(requestQueryResults(query)); + const queryParams = rison.encode({ + key: query.resultsKey, + rows: displayLimit || null, + }); + return SupersetClient.get({ - endpoint: `/superset/results/${query.resultsKey}/?rows=${displayLimit}`, + endpoint: `/api/v1/sqllab/results/?q=${queryParams}`, parseMethod: 'json-bigint', }) .then(({ json }) => dispatch(querySuccess(query, json))) @@ -347,7 +353,7 @@ export function runQuery(query) { const search = window.location.search || ''; return SupersetClient.post({ - endpoint: `/superset/sql_json/${search}`, + endpoint: `/api/v1/sqllab/execute/${search}`, body: JSON.stringify(postPayload), headers: { 'Content-Type': 'application/json' }, parseMethod: 'json-bigint', @@ -359,7 +365,11 @@ export function runQuery(query) { }) .catch(response => getClientErrorObject(response).then(error => { - let message = error.error || error.statusText || t('Unknown error'); + let message = + error.error || + error.message || + error.statusText || + t('Unknown error'); if (message.includes('CSRF token')) { message = t(COMMON_ERR_MESSAGES.SESSION_TIMED_OUT); } diff --git a/superset-frontend/src/SqlLab/actions/sqlLab.test.js b/superset-frontend/src/SqlLab/actions/sqlLab.test.js index fb6ff470b..b743f11d2 100644 --- a/superset-frontend/src/SqlLab/actions/sqlLab.test.js +++ b/superset-frontend/src/SqlLab/actions/sqlLab.test.js @@ -55,13 +55,13 @@ describe('async actions', () => { afterEach(fetchMock.resetHistory); - const fetchQueryEndpoint = 'glob:*/superset/results/*'; + const fetchQueryEndpoint = 'glob:*/api/v1/sqllab/results/*'; fetchMock.get( fetchQueryEndpoint, JSON.stringify({ data: mockBigNumber, query: { sqlEditorId: 'dfsadfs' } }), ); - const runQueryEndpoint = 'glob:*/superset/sql_json/'; + const runQueryEndpoint = 'glob:*/api/v1/sqllab/execute/'; fetchMock.post(runQueryEndpoint, `{ "data": ${mockBigNumber} }`); describe('saveQuery', () => { @@ -280,7 +280,8 @@ describe('async actions', () => { }; it('makes the fetch request', async () => { - const runQueryEndpointWithParams = 'glob:*/superset/sql_json/?foo=bar'; + const runQueryEndpointWithParams = + 'glob:*/api/v1/sqllab/execute/?foo=bar'; fetchMock.post( runQueryEndpointWithParams, `{ "data": ${mockBigNumber} }`, diff --git a/superset-frontend/src/SqlLab/components/SqlEditor/SqlEditor.test.jsx b/superset-frontend/src/SqlLab/components/SqlEditor/SqlEditor.test.jsx index 614878e49..f82a46072 100644 --- a/superset-frontend/src/SqlLab/components/SqlEditor/SqlEditor.test.jsx +++ b/superset-frontend/src/SqlLab/components/SqlEditor/SqlEditor.test.jsx @@ -55,7 +55,7 @@ const MOCKED_SQL_EDITOR_HEIGHT = 500; fetchMock.get('glob:*/api/v1/database/*', { result: [] }); fetchMock.get('glob:*/superset/tables/*', { options: [] }); -fetchMock.post('glob:*/sql_json/*', { result: [] }); +fetchMock.post('glob:*/sqllab/execute/*', { result: [] }); const middlewares = [thunk]; const mockStore = configureStore(middlewares); diff --git a/superset-frontend/src/SqlLab/fixtures.ts b/superset-frontend/src/SqlLab/fixtures.ts index 5c3b06a10..456a83a3f 100644 --- a/superset-frontend/src/SqlLab/fixtures.ts +++ b/superset-frontend/src/SqlLab/fixtures.ts @@ -688,6 +688,7 @@ export const query = { sql: 'SELECT * FROM something', description: 'test description', schema: 'test schema', + resultsKey: 'test', }; export const queryId = 'clientId2353'; diff --git a/superset/initialization/__init__.py b/superset/initialization/__init__.py index 870b06678..d90e7c43a 100644 --- a/superset/initialization/__init__.py +++ b/superset/initialization/__init__.py @@ -150,6 +150,7 @@ class SupersetAppInitializer: # pylint: disable=too-many-public-methods from superset.reports.api import ReportScheduleRestApi from superset.reports.logs.api import ReportExecutionLogRestApi from superset.security.api import SecurityRestApi + from superset.sqllab.api import SqlLabRestApi from superset.views.access_requests import AccessRequestsModelView from superset.views.alerts import AlertView, ReportView from superset.views.annotations import AnnotationLayerView @@ -219,6 +220,7 @@ class SupersetAppInitializer: # pylint: disable=too-many-public-methods appbuilder.add_api(ReportScheduleRestApi) appbuilder.add_api(ReportExecutionLogRestApi) appbuilder.add_api(SavedQueryRestApi) + appbuilder.add_api(SqlLabRestApi) # # Setup regular views # diff --git a/superset/sqllab/api.py b/superset/sqllab/api.py new file mode 100644 index 000000000..283c3ab63 --- /dev/null +++ b/superset/sqllab/api.py @@ -0,0 +1,248 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import logging +from typing import Any, cast, Dict, Optional + +import simplejson as json +from flask import request +from flask_appbuilder.api import expose, protect, rison +from flask_appbuilder.models.sqla.interface import SQLAInterface +from marshmallow import ValidationError + +from superset import app, is_feature_enabled +from superset.databases.dao import DatabaseDAO +from superset.extensions import event_logger +from superset.jinja_context import get_template_processor +from superset.models.sql_lab import Query +from superset.queries.dao import QueryDAO +from superset.sql_lab import get_sql_results +from superset.sqllab.command_status import SqlJsonExecutionStatus +from superset.sqllab.commands.execute import CommandResult, ExecuteSqlCommand +from superset.sqllab.commands.results import SqlExecutionResultsCommand +from superset.sqllab.exceptions import ( + QueryIsForbiddenToAccessException, + SqlLabException, +) +from superset.sqllab.execution_context_convertor import ExecutionContextConvertor +from superset.sqllab.query_render import SqlQueryRenderImpl +from superset.sqllab.schemas import ( + ExecutePayloadSchema, + QueryExecutionResponseSchema, + sql_lab_get_results_schema, +) +from superset.sqllab.sql_json_executer import ( + ASynchronousSqlJsonExecutor, + SqlJsonExecutor, + SynchronousSqlJsonExecutor, +) +from superset.sqllab.sqllab_execution_context import SqlJsonExecutionContext +from superset.sqllab.validators import CanAccessQueryValidatorImpl +from superset.superset_typing import FlaskResponse +from superset.utils import core as utils +from superset.views.base import json_success +from superset.views.base_api import BaseSupersetApi, requires_json, statsd_metrics + +config = app.config +logger = logging.getLogger(__name__) + + +class SqlLabRestApi(BaseSupersetApi): + datamodel = SQLAInterface(Query) + + resource_name = "sqllab" + allow_browser_login = True + + class_permission_name = "Query" + + execute_model_schema = ExecutePayloadSchema() + + apispec_parameter_schemas = { + "sql_lab_get_results_schema": sql_lab_get_results_schema, + } + openapi_spec_tag = "SQL Lab" + openapi_spec_component_schemas = ( + ExecutePayloadSchema, + QueryExecutionResponseSchema, + ) + + @expose("/results/") + @protect() + @statsd_metrics + @rison(sql_lab_get_results_schema) + @event_logger.log_this_with_context( + action=lambda self, *args, **kwargs: f"{self.__class__.__name__}" + f".get_results", + log_to_statsd=False, + ) + def get_results(self, **kwargs: Any) -> FlaskResponse: + """Gets the result of a SQL query execution + --- + get: + summary: >- + Gets the result of a SQL query execution + parameters: + - in: query + name: q + content: + application/json: + schema: + $ref: '#/components/schemas/sql_lab_get_results_schema' + responses: + 200: + description: SQL query execution result + content: + application/json: + schema: + $ref: '#/components/schemas/QueryExecutionResponseSchema' + 400: + $ref: '#/components/responses/400' + 401: + $ref: '#/components/responses/401' + 403: + $ref: '#/components/responses/403' + 404: + $ref: '#/components/responses/404' + 410: + $ref: '#/components/responses/410' + 500: + $ref: '#/components/responses/500' + """ + params = kwargs["rison"] + key = params.get("key") + rows = params.get("rows") + result = SqlExecutionResultsCommand(key=key, rows=rows).run() + # return the result without special encoding + return json_success( + json.dumps( + result, default=utils.json_iso_dttm_ser, ignore_nan=True, encoding=None + ), + 200, + ) + + @expose("/execute/", methods=["POST"]) + @protect() + @statsd_metrics + @requires_json + @event_logger.log_this_with_context( + action=lambda self, *args, **kwargs: f"{self.__class__.__name__}" + f".get_results", + log_to_statsd=False, + ) + def execute_sql_query(self) -> FlaskResponse: + """Executes a SQL query + --- + post: + description: >- + Starts the execution of a SQL query + requestBody: + description: SQL query and params + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/ExecutePayloadSchema' + responses: + 200: + description: Query execution result + content: + application/json: + schema: + $ref: '#/components/schemas/QueryExecutionResponseSchema' + 202: + description: Query execution result, query still running + content: + application/json: + schema: + $ref: '#/components/schemas/QueryExecutionResponseSchema' + 400: + $ref: '#/components/responses/400' + 401: + $ref: '#/components/responses/401' + 403: + $ref: '#/components/responses/403' + 404: + $ref: '#/components/responses/404' + 500: + $ref: '#/components/responses/500' + """ + try: + self.execute_model_schema.load(request.json) + except ValidationError as error: + return self.response_400(message=error.messages) + + try: + log_params = { + "user_agent": cast(Optional[str], request.headers.get("USER_AGENT")) + } + execution_context = SqlJsonExecutionContext(request.json) + command = self._create_sql_json_command(execution_context, log_params) + command_result: CommandResult = command.run() + + response_status = ( + 202 + if command_result["status"] == SqlJsonExecutionStatus.QUERY_IS_RUNNING + else 200 + ) + # return the execution result without special encoding + return json_success(command_result["payload"], response_status) + except SqlLabException as ex: + payload = {"errors": [ex.to_dict()]} + + response_status = ( + 403 if isinstance(ex, QueryIsForbiddenToAccessException) else ex.status + ) + return self.response(response_status, **payload) + + @staticmethod + def _create_sql_json_command( + execution_context: SqlJsonExecutionContext, log_params: Optional[Dict[str, Any]] + ) -> ExecuteSqlCommand: + query_dao = QueryDAO() + sql_json_executor = SqlLabRestApi._create_sql_json_executor( + execution_context, query_dao + ) + execution_context_convertor = ExecutionContextConvertor() + execution_context_convertor.set_max_row_in_display( + int(config.get("DISPLAY_MAX_ROW")) # type: ignore + ) + return ExecuteSqlCommand( + execution_context, + query_dao, + DatabaseDAO(), + CanAccessQueryValidatorImpl(), + SqlQueryRenderImpl(get_template_processor), + sql_json_executor, + execution_context_convertor, + config.get("SQLLAB_CTAS_NO_LIMIT"), + log_params, + ) + + @staticmethod + def _create_sql_json_executor( + execution_context: SqlJsonExecutionContext, query_dao: QueryDAO + ) -> SqlJsonExecutor: + sql_json_executor: SqlJsonExecutor + if execution_context.is_run_asynchronous(): + sql_json_executor = ASynchronousSqlJsonExecutor(query_dao, get_sql_results) + else: + sql_json_executor = SynchronousSqlJsonExecutor( + query_dao, + get_sql_results, + config.get("SQLLAB_TIMEOUT"), # type: ignore + is_feature_enabled("SQLLAB_BACKEND_PERSISTENCE"), + ) + return sql_json_executor diff --git a/superset/sqllab/command.py b/superset/sqllab/commands/execute.py similarity index 100% rename from superset/sqllab/command.py rename to superset/sqllab/commands/execute.py diff --git a/superset/sqllab/commands/results.py b/superset/sqllab/commands/results.py new file mode 100644 index 000000000..9aef5ab46 --- /dev/null +++ b/superset/sqllab/commands/results.py @@ -0,0 +1,131 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# pylint: disable=too-few-public-methods, too-many-arguments +from __future__ import annotations + +import logging +from typing import Any, cast, Dict, Optional + +from flask_babel import gettext as __, lazy_gettext as _ + +from superset import app, db, results_backend, results_backend_use_msgpack +from superset.commands.base import BaseCommand +from superset.errors import ErrorLevel, SupersetError, SupersetErrorType +from superset.exceptions import SerializationError, SupersetErrorException +from superset.models.sql_lab import Query +from superset.sqllab.utils import apply_display_max_row_configuration_if_require +from superset.utils import core as utils +from superset.utils.dates import now_as_float +from superset.views.utils import _deserialize_results_payload + +config = app.config +SQLLAB_QUERY_COST_ESTIMATE_TIMEOUT = config["SQLLAB_QUERY_COST_ESTIMATE_TIMEOUT"] +stats_logger = config["STATS_LOGGER"] + +logger = logging.getLogger(__name__) + + +class SqlExecutionResultsCommand(BaseCommand): + _key: str + _rows: Optional[int] + _blob: Any + _query: Query + + def __init__( + self, + key: str, + rows: Optional[int] = None, + ) -> None: + self._key = key + self._rows = rows + + def validate(self) -> None: + if not results_backend: + raise SupersetErrorException( + SupersetError( + message=__("Results backend is not configured."), + error_type=SupersetErrorType.RESULTS_BACKEND_NOT_CONFIGURED_ERROR, + level=ErrorLevel.ERROR, + ) + ) + + read_from_results_backend_start = now_as_float() + self._blob = results_backend.get(self._key) + stats_logger.timing( + "sqllab.query.results_backend_read", + now_as_float() - read_from_results_backend_start, + ) + + if not self._blob: + raise SupersetErrorException( + SupersetError( + message=__( + "Data could not be retrieved from the results backend. You " + "need to re-run the original query." + ), + error_type=SupersetErrorType.RESULTS_BACKEND_ERROR, + level=ErrorLevel.ERROR, + ), + status=410, + ) + + self._query = ( + db.session.query(Query).filter_by(results_key=self._key).one_or_none() + ) + if self._query is None: + raise SupersetErrorException( + SupersetError( + message=__( + "The query associated with these results could not be found. " + "You need to re-run the original query." + ), + error_type=SupersetErrorType.RESULTS_BACKEND_ERROR, + level=ErrorLevel.ERROR, + ), + status=404, + ) + + def run( + self, + ) -> Dict[str, Any]: + """Runs arbitrary sql and returns data as json""" + self.validate() + payload = utils.zlib_decompress( + self._blob, decode=not results_backend_use_msgpack + ) + try: + obj = _deserialize_results_payload( + payload, self._query, cast(bool, results_backend_use_msgpack) + ) + except SerializationError as ex: + raise SupersetErrorException( + SupersetError( + message=__( + "Data could not be deserialized from the results backend. The " + "storage format might have changed, rendering the old data " + "stake. You need to re-run the original query." + ), + error_type=SupersetErrorType.RESULTS_BACKEND_ERROR, + level=ErrorLevel.ERROR, + ), + status=404, + ) from ex + + if self._rows: + obj = apply_display_max_row_configuration_if_require(obj, self._rows) + + return obj diff --git a/superset/sqllab/query_render.py b/superset/sqllab/query_render.py index df631784e..2854a7e39 100644 --- a/superset/sqllab/query_render.py +++ b/superset/sqllab/query_render.py @@ -25,7 +25,7 @@ from jinja2.meta import find_undeclared_variables from superset import is_feature_enabled from superset.errors import SupersetErrorType -from superset.sqllab.command import SqlQueryRender +from superset.sqllab.commands.execute import SqlQueryRender from superset.sqllab.exceptions import SqlLabException from superset.utils import core as utils diff --git a/superset/sqllab/schemas.py b/superset/sqllab/schemas.py new file mode 100644 index 000000000..f238fda5c --- /dev/null +++ b/superset/sqllab/schemas.py @@ -0,0 +1,83 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from marshmallow import fields, Schema + +sql_lab_get_results_schema = { + "type": "object", + "properties": { + "key": {"type": "string"}, + }, + "required": ["key"], +} + + +class ExecutePayloadSchema(Schema): + database_id = fields.Integer(required=True) + sql = fields.String(required=True) + client_id = fields.String(allow_none=True) + queryLimit = fields.Integer(allow_none=True) + sql_editor_id = fields.String(allow_none=True) + schema = fields.String(allow_none=True) + tab = fields.String(allow_none=True) + ctas_method = fields.String(allow_none=True) + templateParams = fields.String(allow_none=True) + tmp_table_name = fields.String(allow_none=True) + select_as_cta = fields.Boolean(allow_none=True) + json = fields.Boolean(allow_none=True) + runAsync = fields.Boolean(allow_none=True) + expand_data = fields.Boolean(allow_none=True) + + +class QueryResultSchema(Schema): + changedOn = fields.DateTime() + changed_on = fields.String() + dbId = fields.Integer() + db = fields.String() # pylint: disable=invalid-name + endDttm = fields.Float() + errorMessage = fields.String(allow_none=True) + executedSql = fields.String() + id = fields.String() + queryId = fields.Integer() + limit = fields.Integer() + limitingFactor = fields.String() + progress = fields.Integer() + rows = fields.Integer() + schema = fields.String() + ctas = fields.Boolean() + serverId = fields.Integer() + sql = fields.String() + sqlEditorId = fields.String() + startDttm = fields.Float() + state = fields.String() + tab = fields.String() + tempSchema = fields.String(allow_none=True) + tempTable = fields.String(allow_none=True) + userId = fields.Integer() + user = fields.String() + resultsKey = fields.String() + trackingUrl = fields.String(allow_none=True) + extra = fields.Dict(keys=fields.String()) + + +class QueryExecutionResponseSchema(Schema): + status = fields.String() + data = fields.List(fields.Dict()) + columns = fields.List(fields.Dict()) + selected_columns = fields.List(fields.Dict()) + expanded_columns = fields.List(fields.Dict()) + query = fields.Nested(QueryResultSchema) + query_id = fields.Integer() diff --git a/superset/sqllab/validators.py b/superset/sqllab/validators.py index 726a2760e..5bc8a6225 100644 --- a/superset/sqllab/validators.py +++ b/superset/sqllab/validators.py @@ -20,7 +20,7 @@ from __future__ import annotations from typing import TYPE_CHECKING from superset import security_manager -from superset.sqllab.command import CanAccessQueryValidator +from superset.sqllab.commands.execute import CanAccessQueryValidator if TYPE_CHECKING: from superset.models.sql_lab import Query diff --git a/superset/translations/de/LC_MESSAGES/messages.json b/superset/translations/de/LC_MESSAGES/messages.json index 96dab1f54..08678e6d0 100644 --- a/superset/translations/de/LC_MESSAGES/messages.json +++ b/superset/translations/de/LC_MESSAGES/messages.json @@ -3988,7 +3988,7 @@ "The query associated with the results was deleted.": [ "Die den Ergebnissen zugeordnete Abfrage wurde gelöscht." ], - "The query associated with these results could not be find. You need to re-run the original query.": [ + "The query associated with these results could not be found. You need to re-run the original query.": [ "Die mit diesen Ergebnissen verknüpfte Abfrage konnte nicht gefunden werden. Sie müssen die ursprüngliche Abfrage erneut ausführen." ], "The query contains one or more malformed template parameters.": [ diff --git a/superset/translations/de/LC_MESSAGES/messages.po b/superset/translations/de/LC_MESSAGES/messages.po index 0ea2edb9a..54c069c1a 100644 --- a/superset/translations/de/LC_MESSAGES/messages.po +++ b/superset/translations/de/LC_MESSAGES/messages.po @@ -12573,7 +12573,7 @@ msgstr "Die den Ergebnissen zugeordnete Abfrage wurde gelöscht." #: superset/views/core.py:2297 msgid "" -"The query associated with these results could not be find. You need to " +"The query associated with these results could not be found. You need to " "re-run the original query." msgstr "" "Die mit diesen Ergebnissen verknüpfte Abfrage konnte nicht gefunden " diff --git a/superset/translations/en/LC_MESSAGES/messages.po b/superset/translations/en/LC_MESSAGES/messages.po index fe5deee37..ec9782437 100644 --- a/superset/translations/en/LC_MESSAGES/messages.po +++ b/superset/translations/en/LC_MESSAGES/messages.po @@ -11715,7 +11715,7 @@ msgstr "" #: superset/views/core.py:2280 msgid "" -"The query associated with these results could not be find. You need to " +"The query associated with these results could not be found. You need to " "re-run the original query." msgstr "" diff --git a/superset/translations/es/LC_MESSAGES/messages.po b/superset/translations/es/LC_MESSAGES/messages.po index 8a8711001..0b1d2361f 100644 --- a/superset/translations/es/LC_MESSAGES/messages.po +++ b/superset/translations/es/LC_MESSAGES/messages.po @@ -12358,7 +12358,7 @@ msgstr "" #: superset/views/core.py:2280 msgid "" -"The query associated with these results could not be find. You need to " +"The query associated with these results could not be found. You need to " "re-run the original query." msgstr "" diff --git a/superset/translations/fr/LC_MESSAGES/messages.json b/superset/translations/fr/LC_MESSAGES/messages.json index d0ddca087..c7d20179c 100644 --- a/superset/translations/fr/LC_MESSAGES/messages.json +++ b/superset/translations/fr/LC_MESSAGES/messages.json @@ -1380,7 +1380,7 @@ "Data could not be retrieved from the results backend. You need to re-run the original query.": [ "Impossible de récupérer les données depuis le backend. Rejouez la requête originale." ], - "The query associated with these results could not be find. You need to re-run the original query.": [ + "The query associated with these results could not be found. You need to re-run the original query.": [ "La requête associée à ces résultats n'a pu être trouvée. Rejouez la requête originale." ], "You are not authorized to see this query. If you think this is an error, please reach out to your administrator.": [ diff --git a/superset/translations/fr/LC_MESSAGES/messages.po b/superset/translations/fr/LC_MESSAGES/messages.po index 342e2af60..694b6c4b1 100644 --- a/superset/translations/fr/LC_MESSAGES/messages.po +++ b/superset/translations/fr/LC_MESSAGES/messages.po @@ -12626,7 +12626,7 @@ msgstr "La requête associée aux résutlats a été supprimée." #: superset/views/core.py:2280 msgid "" -"The query associated with these results could not be find. You need to " +"The query associated with these results could not be found. You need to " "re-run the original query." msgstr "" "La requête associée à ces résultats n'a pu être trouvée. Rejouez la " diff --git a/superset/translations/it/LC_MESSAGES/messages.po b/superset/translations/it/LC_MESSAGES/messages.po index f57fdcd6b..d11c7b002 100644 --- a/superset/translations/it/LC_MESSAGES/messages.po +++ b/superset/translations/it/LC_MESSAGES/messages.po @@ -12065,7 +12065,7 @@ msgstr "" #: superset/views/core.py:2280 msgid "" -"The query associated with these results could not be find. You need to " +"The query associated with these results could not be found. You need to " "re-run the original query." msgstr "" diff --git a/superset/translations/ja/LC_MESSAGES/messages.po b/superset/translations/ja/LC_MESSAGES/messages.po index 52914a8f5..1d95ed1af 100644 --- a/superset/translations/ja/LC_MESSAGES/messages.po +++ b/superset/translations/ja/LC_MESSAGES/messages.po @@ -12033,7 +12033,7 @@ msgstr "" #: superset/views/core.py:2280 msgid "" -"The query associated with these results could not be find. You need to " +"The query associated with these results could not be found. You need to " "re-run the original query." msgstr "" diff --git a/superset/translations/ko/LC_MESSAGES/messages.po b/superset/translations/ko/LC_MESSAGES/messages.po index acd8bf48d..bfcb59867 100644 --- a/superset/translations/ko/LC_MESSAGES/messages.po +++ b/superset/translations/ko/LC_MESSAGES/messages.po @@ -11953,7 +11953,7 @@ msgstr "" #: superset/views/core.py:2280 msgid "" -"The query associated with these results could not be find. You need to " +"The query associated with these results could not be found. You need to " "re-run the original query." msgstr "" diff --git a/superset/translations/messages.pot b/superset/translations/messages.pot index 693f5c414..f5279935a 100644 --- a/superset/translations/messages.pot +++ b/superset/translations/messages.pot @@ -11721,7 +11721,7 @@ msgstr "" #: superset/views/core.py:2280 msgid "" -"The query associated with these results could not be find. You need to " +"The query associated with these results could not be found. You need to " "re-run the original query." msgstr "" diff --git a/superset/translations/nl/LC_MESSAGES/messages.json b/superset/translations/nl/LC_MESSAGES/messages.json index 9d94c13ef..4851bcd03 100644 --- a/superset/translations/nl/LC_MESSAGES/messages.json +++ b/superset/translations/nl/LC_MESSAGES/messages.json @@ -1263,7 +1263,7 @@ "Data could not be retrieved from the results backend. You need to re-run the original query.": [ "" ], - "The query associated with these results could not be find. You need to re-run the original query.": [ + "The query associated with these results could not be found. You need to re-run the original query.": [ "" ], "You are not authorized to see this query. If you think this is an error, please reach out to your administrator.": [ diff --git a/superset/translations/nl/LC_MESSAGES/messages.po b/superset/translations/nl/LC_MESSAGES/messages.po index 8bf6fc972..06147b0a8 100644 --- a/superset/translations/nl/LC_MESSAGES/messages.po +++ b/superset/translations/nl/LC_MESSAGES/messages.po @@ -3372,7 +3372,7 @@ msgstr "" #: superset/views/core.py:2321 msgid "" -"The query associated with these results could not be find. You need to " +"The query associated with these results could not be found. You need to " "re-run the original query." msgstr "" diff --git a/superset/translations/pt_BR/LC_MESSAGES/messages.po b/superset/translations/pt_BR/LC_MESSAGES/messages.po index 1194ac5b7..77695a25b 100644 --- a/superset/translations/pt_BR/LC_MESSAGES/messages.po +++ b/superset/translations/pt_BR/LC_MESSAGES/messages.po @@ -12597,7 +12597,7 @@ msgstr "" #: superset/views/core.py:2280 msgid "" -"The query associated with these results could not be find. You need to " +"The query associated with these results could not be found. You need to " "re-run the original query." msgstr "" diff --git a/superset/translations/ru/LC_MESSAGES/messages.json b/superset/translations/ru/LC_MESSAGES/messages.json index e21b1ef84..e6cb23f2c 100644 --- a/superset/translations/ru/LC_MESSAGES/messages.json +++ b/superset/translations/ru/LC_MESSAGES/messages.json @@ -4506,7 +4506,7 @@ "The query associated with the results was deleted.": [ "Запрос, связанный с результатами, был удален." ], - "The query associated with these results could not be find. You need to re-run the original query.": [ + "The query associated with these results could not be found. You need to re-run the original query.": [ "" ], "The query contains one or more malformed template parameters.": [""], diff --git a/superset/translations/ru/LC_MESSAGES/messages.po b/superset/translations/ru/LC_MESSAGES/messages.po index 993c4670f..3c9606aa7 100644 --- a/superset/translations/ru/LC_MESSAGES/messages.po +++ b/superset/translations/ru/LC_MESSAGES/messages.po @@ -14931,7 +14931,7 @@ msgstr "Запрос, связанный с результатами, был у #: superset/views/core.py:2222 msgid "" -"The query associated with these results could not be find. You need to " +"The query associated with these results could not be found. You need to " "re-run the original query." msgstr "" diff --git a/superset/translations/sk/LC_MESSAGES/messages.po b/superset/translations/sk/LC_MESSAGES/messages.po index aa8b175ad..9e8eea66a 100644 --- a/superset/translations/sk/LC_MESSAGES/messages.po +++ b/superset/translations/sk/LC_MESSAGES/messages.po @@ -11738,7 +11738,7 @@ msgstr "" #: superset/views/core.py:2280 msgid "" -"The query associated with these results could not be find. You need to " +"The query associated with these results could not be found. You need to " "re-run the original query." msgstr "" diff --git a/superset/translations/sl/LC_MESSAGES/messages.json b/superset/translations/sl/LC_MESSAGES/messages.json index 232b978d4..ca9a838bf 100644 --- a/superset/translations/sl/LC_MESSAGES/messages.json +++ b/superset/translations/sl/LC_MESSAGES/messages.json @@ -1299,7 +1299,7 @@ "Data could not be retrieved from the results backend. You need to re-run the original query.": [ "Podatkov ni bilo mogoče pridobiti iz zalednega sistema rezultatov. Ponovno morate zagnati izvorno poizvedbo." ], - "The query associated with these results could not be find. You need to re-run the original query.": [ + "The query associated with these results could not be found. You need to re-run the original query.": [ "Poizvedbe, povezane s temi rezultati, ni bilo mogoče najti. Ponovno morate zagnati izvorno poizvedbo." ], "You are not authorized to see this query. If you think this is an error, please reach out to your administrator.": [ diff --git a/superset/translations/sl/LC_MESSAGES/messages.po b/superset/translations/sl/LC_MESSAGES/messages.po index 8c24c390a..c9b76c8af 100644 --- a/superset/translations/sl/LC_MESSAGES/messages.po +++ b/superset/translations/sl/LC_MESSAGES/messages.po @@ -3254,7 +3254,7 @@ msgstr "" #: superset/views/core.py:2207 msgid "" -"The query associated with these results could not be find. You need to re-run the " +"The query associated with these results could not be found. You need to re-run the " "original query." msgstr "" "Poizvedbe, povezane s temi rezultati, ni bilo mogoče najti. Ponovno morate " diff --git a/superset/translations/zh/LC_MESSAGES/messages.po b/superset/translations/zh/LC_MESSAGES/messages.po index 06fbae003..3abb36400 100644 --- a/superset/translations/zh/LC_MESSAGES/messages.po +++ b/superset/translations/zh/LC_MESSAGES/messages.po @@ -12235,7 +12235,7 @@ msgstr "" #: superset/views/core.py:2280 msgid "" -"The query associated with these results could not be find. You need to " +"The query associated with these results could not be found. You need to " "re-run the original query." msgstr "" diff --git a/superset/views/base_api.py b/superset/views/base_api.py index d27fad7eb..29bac574a 100644 --- a/superset/views/base_api.py +++ b/superset/views/base_api.py @@ -165,6 +165,7 @@ class BaseSupersetApiMixin: "401": {"description": "Unauthorized", "content": error_payload_content}, "403": {"description": "Forbidden", "content": error_payload_content}, "404": {"description": "Not found", "content": error_payload_content}, + "410": {"description": "Gone", "content": error_payload_content}, "422": { "description": "Could not process entity", "content": error_payload_content, @@ -210,7 +211,7 @@ class BaseSupersetApiMixin: self.timing_stats("time", key, time_delta) -class BaseSupersetApi(BaseApi, BaseSupersetApiMixin): +class BaseSupersetApi(BaseSupersetApiMixin, BaseApi): ... diff --git a/superset/views/core.py b/superset/views/core.py index 79c725014..8d632dcde 100755 --- a/superset/views/core.py +++ b/superset/views/core.py @@ -107,8 +107,8 @@ from superset.security.analytics_db_safety import check_sqlalchemy_uri from superset.sql_lab import get_sql_results from superset.sql_parse import ParsedQuery from superset.sql_validators import get_validator_by_name -from superset.sqllab.command import CommandResult, ExecuteSqlCommand from superset.sqllab.command_status import SqlJsonExecutionStatus +from superset.sqllab.commands.execute import CommandResult, ExecuteSqlCommand from superset.sqllab.exceptions import ( QueryIsForbiddenToAccessException, SqlLabException, @@ -2090,6 +2090,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods @has_access_api @expose("/results//") @event_logger.log_this + @deprecated() def results(self, key: str) -> FlaskResponse: return self.results_exec(key) @@ -2133,7 +2134,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods raise SupersetErrorException( SupersetError( message=__( - "The query associated with these results could not be find. " + "The query associated with these results could not be found. " "You need to re-run the original query." ), error_type=SupersetErrorType.RESULTS_BACKEND_ERROR, @@ -2313,6 +2314,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods @handle_api_exception @event_logger.log_this @expose("/sql_json/", methods=["POST"]) + @deprecated() def sql_json(self) -> FlaskResponse: errors = SqlJsonPayloadSchema().validate(request.json) if errors: @@ -2352,7 +2354,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods SqlQueryRenderImpl(get_template_processor), sql_json_executor, execution_context_convertor, - config.get("SQLLAB_CTAS_NO_LIMIT"), # type: ignore + config.get("SQLLAB_CTAS_NO_LIMIT"), log_params, ) diff --git a/tests/integration_tests/base_tests.py b/tests/integration_tests/base_tests.py index 20e324559..999f22dd2 100644 --- a/tests/integration_tests/base_tests.py +++ b/tests/integration_tests/base_tests.py @@ -347,7 +347,7 @@ class SupersetTestCase(TestCase): json_payload["schema"] = schema resp = self.get_json_resp( - "/superset/sql_json/", raise_on_error=False, json_=json_payload + "/api/v1/sqllab/execute/", raise_on_error=False, json_=json_payload ) if raise_on_error and "error" in resp: raise Exception("run_sql failed") diff --git a/tests/integration_tests/celery_tests.py b/tests/integration_tests/celery_tests.py index d017f9f99..8693a8888 100644 --- a/tests/integration_tests/celery_tests.py +++ b/tests/integration_tests/celery_tests.py @@ -96,7 +96,7 @@ def run_sql( ): db_id = get_example_database().id return test_client.post( - "/superset/sql_json/", + "/api/v1/sqllab/execute/", json=dict( database_id=db_id, sql=sql, diff --git a/tests/integration_tests/sql_lab/api_tests.py b/tests/integration_tests/sql_lab/api_tests.py new file mode 100644 index 000000000..4c2080ad4 --- /dev/null +++ b/tests/integration_tests/sql_lab/api_tests.py @@ -0,0 +1,178 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +"""Unit tests for Superset""" +import datetime +import json +import random + +import pytest +import prison +from sqlalchemy.sql import func +from unittest import mock + +from tests.integration_tests.test_app import app +from superset import sql_lab +from superset.common.db_query_status import QueryStatus +from superset.models.core import Database +from superset.utils.database import get_example_database, get_main_database +from superset.utils import core as utils +from superset.models.sql_lab import Query + +from tests.integration_tests.base_tests import SupersetTestCase + +QUERIES_FIXTURE_COUNT = 10 + + +class TestSqlLabApi(SupersetTestCase): + @mock.patch("superset.sqllab.commands.results.results_backend_use_msgpack", False) + def test_execute_required_params(self): + self.login() + client_id = "{}".format(random.getrandbits(64))[:10] + + data = {"client_id": client_id} + rv = self.client.post( + "/api/v1/sqllab/execute/", + json=data, + ) + failed_resp = { + "message": { + "sql": ["Missing data for required field."], + "database_id": ["Missing data for required field."], + } + } + resp_data = json.loads(rv.data.decode("utf-8")) + self.assertDictEqual(resp_data, failed_resp) + self.assertEqual(rv.status_code, 400) + + data = {"sql": "SELECT 1", "client_id": client_id} + rv = self.client.post( + "/api/v1/sqllab/execute/", + json=data, + ) + failed_resp = {"message": {"database_id": ["Missing data for required field."]}} + resp_data = json.loads(rv.data.decode("utf-8")) + self.assertDictEqual(resp_data, failed_resp) + self.assertEqual(rv.status_code, 400) + + data = {"database_id": 1, "client_id": client_id} + rv = self.client.post( + "/api/v1/sqllab/execute/", + json=data, + ) + failed_resp = {"message": {"sql": ["Missing data for required field."]}} + resp_data = json.loads(rv.data.decode("utf-8")) + self.assertDictEqual(resp_data, failed_resp) + self.assertEqual(rv.status_code, 400) + + @mock.patch("superset.sqllab.commands.results.results_backend_use_msgpack", False) + def test_execute_valid_request(self) -> None: + from superset import sql_lab as core + + core.results_backend = mock.Mock() + core.results_backend.get.return_value = {} + + self.login() + client_id = "{}".format(random.getrandbits(64))[:10] + + data = {"sql": "SELECT 1", "database_id": 1, "client_id": client_id} + rv = self.client.post( + "/api/v1/sqllab/execute/", + json=data, + ) + resp_data = json.loads(rv.data.decode("utf-8")) + self.assertEqual(resp_data.get("status"), "success") + self.assertEqual(rv.status_code, 200) + + @mock.patch( + "tests.integration_tests.superset_test_custom_template_processors.datetime" + ) + @mock.patch("superset.sqllab.api.get_sql_results") + def test_execute_custom_templated(self, sql_lab_mock, mock_dt) -> None: + mock_dt.utcnow = mock.Mock(return_value=datetime.datetime(1970, 1, 1)) + self.login() + sql = "SELECT '$DATE()' as test" + resp = { + "status": QueryStatus.SUCCESS, + "query": {"rows": 1}, + "data": [{"test": "'1970-01-01'"}], + } + sql_lab_mock.return_value = resp + + dbobj = self.create_fake_db_for_macros() + json_payload = dict(database_id=dbobj.id, sql=sql) + self.get_json_resp( + "/api/v1/sqllab/execute/", raise_on_error=False, json_=json_payload + ) + assert sql_lab_mock.called + self.assertEqual(sql_lab_mock.call_args[0][1], "SELECT '1970-01-01' as test") + + self.delete_fake_db_for_macros() + + @mock.patch("superset.sqllab.commands.results.results_backend_use_msgpack", False) + def test_get_results_with_display_limit(self): + from superset.sqllab.commands import results as command + + command.results_backend = mock.Mock() + self.login() + + data = [{"col_0": i} for i in range(100)] + payload = { + "status": QueryStatus.SUCCESS, + "query": {"rows": 100}, + "data": data, + } + # limit results to 1 + expected_key = {"status": "success", "query": {"rows": 100}, "data": data} + limited_data = data[:1] + expected_limited = { + "status": "success", + "query": {"rows": 100}, + "data": limited_data, + "displayLimitReached": True, + } + + query_mock = mock.Mock() + query_mock.sql = "SELECT *" + query_mock.database = 1 + query_mock.schema = "superset" + + # do not apply msgpack serialization + use_msgpack = app.config["RESULTS_BACKEND_USE_MSGPACK"] + app.config["RESULTS_BACKEND_USE_MSGPACK"] = False + serialized_payload = sql_lab._serialize_payload(payload, False) + compressed = utils.zlib_compress(serialized_payload) + command.results_backend.get.return_value = compressed + + with mock.patch("superset.sqllab.commands.results.db") as mock_superset_db: + mock_superset_db.session.query().filter_by().one_or_none.return_value = ( + query_mock + ) + # get all results + arguments = {"key": "key"} + result_key = json.loads( + self.get_resp(f"/api/v1/sqllab/results/?q={prison.dumps(arguments)}") + ) + arguments = {"key": "key", "rows": 1} + result_limited = json.loads( + self.get_resp(f"/api/v1/sqllab/results/?q={prison.dumps(arguments)}") + ) + + self.assertEqual(result_key, expected_key) + self.assertEqual(result_limited, expected_limited) + + app.config["RESULTS_BACKEND_USE_MSGPACK"] = use_msgpack diff --git a/tests/integration_tests/sql_lab/commands_tests.py b/tests/integration_tests/sql_lab/commands_tests.py new file mode 100644 index 000000000..74c1fe708 --- /dev/null +++ b/tests/integration_tests/sql_lab/commands_tests.py @@ -0,0 +1,161 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from unittest import mock, skip +from unittest.mock import patch + +import pytest + +from superset import db, sql_lab +from superset.common.db_query_status import QueryStatus +from superset.errors import SupersetErrorType +from superset.exceptions import SerializationError, SupersetErrorException +from superset.models.core import Database +from superset.models.sql_lab import Query +from superset.sqllab.commands import results +from superset.utils import core as utils +from tests.integration_tests.base_tests import SupersetTestCase + + +class TestSqlExecutionResultsCommand(SupersetTestCase): + @mock.patch("superset.sqllab.commands.results.results_backend_use_msgpack", False) + def test_validation_no_results_backend(self) -> None: + results.results_backend = None + + command = results.SqlExecutionResultsCommand("test", 1000) + + with pytest.raises(SupersetErrorException) as ex_info: + command.run() + assert ( + ex_info.value.error.error_type + == SupersetErrorType.RESULTS_BACKEND_NOT_CONFIGURED_ERROR + ) + + @mock.patch("superset.sqllab.commands.results.results_backend_use_msgpack", False) + def test_validation_data_cannot_be_retrieved(self) -> None: + results.results_backend = mock.Mock() + results.results_backend.get.return_value = None + + command = results.SqlExecutionResultsCommand("test", 1000) + + with pytest.raises(SupersetErrorException) as ex_info: + command.run() + assert ex_info.value.error.error_type == SupersetErrorType.RESULTS_BACKEND_ERROR + + @mock.patch("superset.sqllab.commands.results.results_backend_use_msgpack", False) + def test_validation_query_not_found(self) -> None: + data = [{"col_0": i} for i in range(100)] + payload = { + "status": QueryStatus.SUCCESS, + "query": {"rows": 100}, + "data": data, + } + serialized_payload = sql_lab._serialize_payload(payload, False) + compressed = utils.zlib_compress(serialized_payload) + + results.results_backend = mock.Mock() + results.results_backend.get.return_value = compressed + + command = results.SqlExecutionResultsCommand("test", 1000) + + with pytest.raises(SupersetErrorException) as ex_info: + command.run() + assert ex_info.value.error.error_type == SupersetErrorType.RESULTS_BACKEND_ERROR + + @mock.patch("superset.sqllab.commands.results.results_backend_use_msgpack", False) + def test_validation_query_not_found2(self) -> None: + data = [{"col_0": i} for i in range(104)] + payload = { + "status": QueryStatus.SUCCESS, + "query": {"rows": 104}, + "data": data, + } + serialized_payload = sql_lab._serialize_payload(payload, False) + compressed = utils.zlib_compress(serialized_payload) + + results.results_backend = mock.Mock() + results.results_backend.get.return_value = compressed + + database = Database(database_name="my_database", sqlalchemy_uri="sqlite://") + query_obj = Query( + client_id="foo", + database=database, + tab_name="test_tab", + sql_editor_id="test_editor_id", + sql="select * from bar", + select_sql="select * from bar", + executed_sql="select * from bar", + limit=100, + select_as_cta=False, + rows=104, + error_message="none", + results_key="test_abc", + ) + + db.session.add(database) + db.session.add(query_obj) + + with mock.patch( + "superset.views.utils._deserialize_results_payload", + side_effect=SerializationError(), + ): + with pytest.raises(SupersetErrorException) as ex_info: + command = results.SqlExecutionResultsCommand("test", 1000) + command.run() + assert ( + ex_info.value.error.error_type + == SupersetErrorType.RESULTS_BACKEND_ERROR + ) + + @mock.patch("superset.sqllab.commands.results.results_backend_use_msgpack", False) + def test_run_succeeds(self) -> None: + data = [{"col_0": i} for i in range(104)] + payload = { + "status": QueryStatus.SUCCESS, + "query": {"rows": 104}, + "data": data, + } + serialized_payload = sql_lab._serialize_payload(payload, False) + compressed = utils.zlib_compress(serialized_payload) + + results.results_backend = mock.Mock() + results.results_backend.get.return_value = compressed + + database = Database(database_name="my_database", sqlalchemy_uri="sqlite://") + query_obj = Query( + client_id="foo", + database=database, + tab_name="test_tab", + sql_editor_id="test_editor_id", + sql="select * from bar", + select_sql="select * from bar", + executed_sql="select * from bar", + limit=100, + select_as_cta=False, + rows=104, + error_message="none", + results_key="test_abc", + ) + + db.session.add(database) + db.session.add(query_obj) + + command = results.SqlExecutionResultsCommand("test_abc", 1000) + result = command.run() + + assert result.get("status") == "success" + assert result.get("query").get("rows") == 104 + assert result.get("data") == data