feat(sip-95): new endpoint for extra table metadata (#28063)

This commit is contained in:
Beto Dealmeida 2024-04-18 10:42:53 -04:00 committed by GitHub
parent 69a7bfc88d
commit 68a982dfe6
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
27 changed files with 474 additions and 74 deletions

View File

@ -511,7 +511,7 @@ describe('async actions', () => {
const getTableMetadataEndpoint = 'glob:**/api/v1/database/*/table/*/*/';
fetchMock.get(getTableMetadataEndpoint, {});
const getExtraTableMetadataEndpoint =
'glob:**/api/v1/database/*/table_extra/*/*/';
'glob:**/api/v1/database/*/table_metadata/extra/';
fetchMock.get(getExtraTableMetadataEndpoint, {});
let isFeatureEnabledMock;

View File

@ -67,7 +67,7 @@ beforeEach(() => {
columns: table.columns,
},
});
fetchMock.get('glob:*/api/v1/database/*/table_extra/*/*', {
fetchMock.get('glob:*/api/v1/database/*/table_metadata/extra/', {
status: 200,
body: {},
});

View File

@ -49,7 +49,7 @@ jest.mock(
);
const getTableMetadataEndpoint = 'glob:**/api/v1/database/*/table/*/*/';
const getExtraTableMetadataEndpoint =
'glob:**/api/v1/database/*/table_extra/*/*/';
'glob:**/api/v1/database/*/table_metadata/extra/*';
const updateTableSchemaEndpoint = 'glob:*/tableschemaview/*/expanded';
beforeEach(() => {

View File

@ -181,7 +181,7 @@ const DndFilterSelect = (props: DndFilterSelectProps) => {
if (!isSqllabView && dbId && name && schema) {
SupersetClient.get({
endpoint: `/api/v1/database/${dbId}/table_extra/${name}/${schema}/`,
endpoint: `/api/v1/database/${dbId}/table_metadata/extra/?name=${name}&schema=${schema}`,
})
.then(({ json }: { json: Record<string, any> }) => {
if (json?.partitions) {

View File

@ -143,7 +143,7 @@ class AdhocFilterControl extends React.Component {
if (!isSqllabView && dbId && name && schema) {
SupersetClient.get({
endpoint: `/api/v1/database/${dbId}/table_extra/${name}/${schema}/`,
endpoint: `/api/v1/database/${dbId}/table_metadata/extra/?name=${name}&schema=${schema}`,
})
.then(({ json }) => {
if (json && json.partitions) {

View File

@ -125,9 +125,9 @@ const tableApi = api.injectEndpoints({
FetchTableMetadataQueryParams
>({
query: ({ dbId, schema, table }) => ({
endpoint: `/api/v1/database/${dbId}/table_extra/${encodeURIComponent(
table,
)}/${encodeURIComponent(schema)}/`,
endpoint: schema
? `/api/v1/database/${dbId}/table_metadata/extra/?name=${table}&schema=${schema}`
: `/api/v1/database/${dbId}/table_metadata/extra/?name=${table}`,
transformResponse: ({ json }: JsonResponse) => json,
}),
}),

View File

@ -135,6 +135,7 @@ MODEL_API_RW_METHOD_PERMISSION_MAP = {
"select_star": "read",
"table_metadata": "read",
"table_extra_metadata": "read",
"table_extra_metadata_deprecated": "read",
"test_connection": "write",
"validate_parameters": "write",
"favorite_status": "read",

View File

@ -15,11 +15,14 @@
# specific language governing permissions and limitations
# under the License.
# pylint: disable=too-many-lines
from __future__ import annotations
import json
import logging
from datetime import datetime, timedelta
from io import BytesIO
from typing import Any, cast, Optional
from typing import Any, cast
from zipfile import is_zipfile, ZipFile
from deprecation import deprecated
@ -82,6 +85,7 @@ from superset.databases.schemas import (
get_export_ids_schema,
OAuth2ProviderResponseSchema,
openapi_spec_methods_override,
QualifiedTableSchema,
SchemasResponseSchema,
SelectStarResponseSchema,
TableExtraMetadataResponseSchema,
@ -92,9 +96,18 @@ from superset.databases.schemas import (
from superset.databases.utils import get_table_metadata
from superset.db_engine_specs import get_available_engine_specs
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
from superset.exceptions import OAuth2Error, SupersetErrorsException, SupersetException
from superset.exceptions import (
DatabaseNotFoundException,
InvalidPayloadSchemaError,
OAuth2Error,
SupersetErrorsException,
SupersetException,
SupersetSecurityException,
TableNotFoundException,
)
from superset.extensions import security_manager
from superset.models.core import Database
from superset.sql_parse import Table
from superset.superset_typing import FlaskResponse
from superset.utils.core import error_msg_from_exception, parse_js_uri_path_item
from superset.utils.oauth2 import decode_oauth2_state
@ -121,6 +134,7 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
"tables",
"table_metadata",
"table_extra_metadata",
"table_extra_metadata_deprecated",
"select_star",
"schemas",
"test_connection",
@ -764,15 +778,20 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
@check_table_access
@safe
@statsd_metrics
@deprecated(deprecated_in="4.0", removed_in="5.0")
@event_logger.log_this_with_context(
action=lambda self, *args, **kwargs: f"{self.__class__.__name__}"
f".table_extra_metadata",
f".table_extra_metadata_deprecated",
log_to_statsd=False,
)
def table_extra_metadata(
def table_extra_metadata_deprecated(
self, database: Database, table_name: str, schema_name: str
) -> FlaskResponse:
"""Get table extra metadata.
A newer API was introduced between 4.0 and 5.0, with support for catalogs for
SIP-95. This method was kept to prevent breaking API integrations, but will be
removed in 5.0.
---
get:
summary: Get table extra metadata
@ -812,13 +831,92 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
500:
$ref: '#/components/responses/500'
"""
self.incr_stats("init", self.table_metadata.__name__)
self.incr_stats("init", self.table_extra_metadata_deprecated.__name__)
parsed_schema = parse_js_uri_path_item(schema_name, eval_undefined=True)
table_name = cast(str, parse_js_uri_path_item(table_name))
payload = database.db_engine_spec.extra_table_metadata(
database, table_name, parsed_schema
)
table = Table(table_name, parsed_schema)
payload = database.db_engine_spec.get_extra_table_metadata(database, table)
return self.response(200, **payload)
@expose("/<int:pk>/table_metadata/extra/", methods=("GET",))
@protect()
@statsd_metrics
@event_logger.log_this_with_context(
action=lambda self, *args, **kwargs: f"{self.__class__.__name__}"
f".table_extra_metadata",
log_to_statsd=False,
)
def table_extra_metadata(self, pk: int) -> FlaskResponse:
"""
Get extra metadata for a given table.
Optionally, a schema and a catalog can be passed, if different from the default
ones.
---
get:
summary: Get table extra metadata
description: >-
Extra metadata associated with the table (partitions, description, etc.)
parameters:
- in: path
schema:
type: integer
name: pk
description: The database id
- in: query
schema:
type: string
name: name
required: true
description: Table name
- in: query
schema:
type: string
name: schema
description: >-
Optional table schema, if not passed the schema configured in the database
will be used
- in: query
schema:
type: string
name: catalog
description: >-
Optional table catalog, if not passed the catalog configured in the
database will be used
responses:
200:
description: Table extra metadata information
content:
application/json:
schema:
$ref: "#/components/schemas/TableExtraMetadataResponseSchema"
401:
$ref: '#/components/responses/401'
404:
$ref: '#/components/responses/404'
500:
$ref: '#/components/responses/500'
"""
self.incr_stats("init", self.table_extra_metadata.__name__)
if not (database := DatabaseDAO.find_by_id(pk)):
raise DatabaseNotFoundException("No such database")
try:
parameters = QualifiedTableSchema().load(request.args)
except ValidationError as ex:
raise InvalidPayloadSchemaError(ex) from ex
table = Table(parameters["name"], parameters["schema"], parameters["catalog"])
try:
security_manager.raise_for_access(database=database, table=table)
except SupersetSecurityException as ex:
# instead of raising 403, raise 404 to hide table existence
raise TableNotFoundException("No such table") from ex
payload = database.db_engine_spec.get_extra_table_metadata(database, table)
return self.response(200, **payload)
@expose("/<int:pk>/select_star/<path:table_name>/", methods=("GET",))
@ -832,7 +930,7 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
log_to_statsd=False,
)
def select_star(
self, database: Database, table_name: str, schema_name: Optional[str] = None
self, database: Database, table_name: str, schema_name: str | None = None
) -> FlaskResponse:
"""Get database select star for table.
---

View File

@ -1195,3 +1195,27 @@ class OAuth2ProviderResponseSchema(Schema):
class Meta: # pylint: disable=too-few-public-methods
# Ignore unknown fields that might be sent by the OAuth2 provider
unknown = EXCLUDE
class QualifiedTableSchema(Schema):
"""
Schema for a qualified table reference.
Catalog and schema can be ommited, to fallback to default values. Table name must be
present.
"""
name = fields.String(
required=True,
metadata={"description": "The table name"},
)
schema = fields.String(
required=False,
load_default=None,
metadata={"description": "The table schema"},
)
catalog = fields.String(
required=False,
load_default=None,
metadata={"description": "The table catalog"},
)

View File

@ -603,7 +603,7 @@ For some databases the `df_to_sql` classmethod needs to be implemented. For exam
### Extra table metadata
DB engine specs can return additional metadata associated with a table. This is done via the `extra_table_metadata` class method. Trino uses this to return information about the latest partition, for example, and Bigquery returns clustering information. This information is then surfaced in the SQL Lab UI, when browsing tables in the metadata explorer (on the left panel).
DB engine specs can return additional metadata associated with a table. This is done via the `get_extra_table_metadata` class method. Trino uses this to return information about the latest partition, for example, and Bigquery returns clustering information. This information is then surfaced in the SQL Lab UI, when browsing tables in the metadata explorer (on the left panel).
### DB API exception mapping

View File

@ -21,6 +21,7 @@ from __future__ import annotations
import json
import logging
import re
import warnings
from datetime import datetime
from re import Match, Pattern
from typing import (
@ -1034,21 +1035,33 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
return indexes
@classmethod
def extra_table_metadata( # pylint: disable=unused-argument
def get_extra_table_metadata( # pylint: disable=unused-argument
cls,
database: Database,
table_name: str,
schema_name: str | None,
table: Table,
) -> dict[str, Any]:
"""
Returns engine-specific table metadata
:param database: Database instance
:param table_name: Table name
:param schema_name: Schema name
:param table: A Table instance
:return: Engine-specific table metadata
"""
# TODO: Fix circular import caused by importing Database
# old method that doesn't work with catalogs
if hasattr(cls, "extra_table_metadata"):
warnings.warn(
"The `extra_table_metadata` method is deprecated, please implement "
"the `get_extra_table_metadata` method in the DB engine spec.",
DeprecationWarning,
)
# If a catalog is passed, return nothing, since we don't know the exact
# table that is being requested.
if table.catalog:
return {}
return cls.extra_table_metadata(database, table.table, table.schema)
return {}
@classmethod

View File

@ -320,10 +320,12 @@ class BigQueryEngineSpec(BaseEngineSpec): # pylint: disable=too-many-public-met
return cls.normalize_indexes(inspector.get_indexes(table_name, schema))
@classmethod
def extra_table_metadata(
cls, database: "Database", table_name: str, schema_name: Optional[str]
def get_extra_table_metadata(
cls,
database: "Database",
table: Table,
) -> dict[str, Any]:
indexes = database.get_indexes(table_name, schema_name)
indexes = database.get_indexes(table.table, table.schema)
if not indexes:
return {}
partitions_columns = [

View File

@ -137,15 +137,14 @@ class GSheetsEngineSpec(ShillelaghEngineSpec):
return url
@classmethod
def extra_table_metadata(
def get_extra_table_metadata(
cls,
database: Database,
table_name: str,
schema_name: str | None,
table: Table,
) -> dict[str, Any]:
with database.get_raw_connection(schema=schema_name) as conn:
with database.get_raw_connection(schema=table.schema) as conn:
cursor = conn.cursor()
cursor.execute(f'SELECT GET_METADATA("{table_name}")')
cursor.execute(f'SELECT GET_METADATA("{table.table}")')
results = cursor.fetchone()[0]
try:
metadata = json.loads(results)

View File

@ -63,7 +63,7 @@ BASIC_FEATURES = {
NICE_TO_HAVE_FEATURES = {
"user_impersonation": "Supports user impersonation",
"file_upload": "Support file upload",
"extra_table_metadata": "Returns extra table metadata",
"get_extra_table_metadata": "Returns extra table metadata",
"dbapi_exception_mapping": "Maps driver exceptions to Superset exceptions",
"custom_errors": "Parses error messages and returns Superset errors",
"dynamic_schema": "Supports changing the schema per-query",
@ -142,7 +142,9 @@ def diagnose(spec: type[BaseEngineSpec]) -> dict[str, Any]:
or has_custom_method(spec, "get_url_for_impersonation")
),
"file_upload": spec.supports_file_upload,
"extra_table_metadata": has_custom_method(spec, "extra_table_metadata"),
"get_extra_table_metadata": has_custom_method(
spec, "get_extra_table_metadata"
),
"dbapi_exception_mapping": has_custom_method(
spec, "get_dbapi_exception_mapping"
),
@ -177,7 +179,7 @@ def diagnose(spec: type[BaseEngineSpec]) -> dict[str, Any]:
nice_to_have = [
"user_impersonation",
"file_upload",
"extra_table_metadata",
"get_extra_table_metadata",
"dbapi_exception_mapping",
"custom_errors",
"dynamic_schema",
@ -264,7 +266,7 @@ def generate_table() -> list[list[Any]]:
keys = [
"user_impersonation",
"file_upload",
"extra_table_metadata",
"get_extra_table_metadata",
"dbapi_exception_mapping",
"custom_errors",
"dynamic_schema",

View File

@ -66,6 +66,7 @@ from superset.utils.core import GenericDataType
if TYPE_CHECKING:
# prevent circular imports
from superset.models.core import Database
from superset.sql_parse import Table
with contextlib.suppress(ImportError): # pyhive may not be installed
from pyhive.presto import Cursor
@ -1224,14 +1225,20 @@ class PrestoEngineSpec(PrestoBaseEngineSpec):
return all_columns, data, expanded_columns
@classmethod
def extra_table_metadata(
cls, database: Database, table_name: str, schema_name: str | None
def get_extra_table_metadata(
cls,
database: Database,
table: Table,
) -> dict[str, Any]:
metadata = {}
if indexes := database.get_indexes(table_name, schema_name):
if indexes := database.get_indexes(table.table, table.schema):
col_names, latest_parts = cls.latest_partition(
table_name, schema_name, database, show_first=True, indexes=indexes
table.table,
table.schema,
database,
show_first=True,
indexes=indexes,
)
if not latest_parts:
@ -1241,8 +1248,8 @@ class PrestoEngineSpec(PrestoBaseEngineSpec):
"cols": sorted(indexes[0].get("column_names", [])),
"latest": dict(zip(col_names, latest_parts)),
"partitionQuery": cls._partition_query(
table_name=table_name,
schema=schema_name,
table_name=table.table,
schema=table.schema,
indexes=indexes,
database=database,
),
@ -1250,7 +1257,8 @@ class PrestoEngineSpec(PrestoBaseEngineSpec):
# flake8 is not matching `Optional[str]` to `Any` for some reason...
metadata["view"] = cast(
Any, cls.get_create_view(database, schema_name, table_name)
Any,
cls.get_create_view(database, table.schema, table.table),
)
return metadata

View File

@ -45,6 +45,7 @@ from superset.utils import core as utils
if TYPE_CHECKING:
from superset.models.core import Database
from superset.sql_parse import Table
with contextlib.suppress(ImportError): # trino may not be installed
from trino.dbapi import Cursor
@ -58,18 +59,17 @@ class TrinoEngineSpec(PrestoBaseEngineSpec):
allows_alias_to_source_column = False
@classmethod
def extra_table_metadata(
def get_extra_table_metadata(
cls,
database: Database,
table_name: str,
schema_name: str | None,
table: Table,
) -> dict[str, Any]:
metadata = {}
if indexes := database.get_indexes(table_name, schema_name):
if indexes := database.get_indexes(table.table, table.schema):
col_names, latest_parts = cls.latest_partition(
table_name,
schema_name,
table.table,
table.schema,
database,
show_first=True,
indexes=indexes,
@ -91,17 +91,18 @@ class TrinoEngineSpec(PrestoBaseEngineSpec):
),
"latest": dict(zip(col_names, latest_parts)),
"partitionQuery": cls._partition_query(
table_name=table_name,
schema=schema_name,
table_name=table.table,
schema=table.schema,
indexes=indexes,
database=database,
),
}
if database.has_view_by_name(table_name, schema_name):
if database.has_view_by_name(table.table, table.schema):
with database.get_inspector_with_context() as inspector:
metadata["view"] = inspector.get_view_definition(
table_name, schema_name
table.table,
table.schema,
)
return metadata

View File

@ -73,6 +73,7 @@ class SupersetErrorType(StrEnum):
# Other errors
BACKEND_TIMEOUT_ERROR = "BACKEND_TIMEOUT_ERROR"
DATABASE_NOT_FOUND_ERROR = "DATABASE_NOT_FOUND_ERROR"
TABLE_NOT_FOUND_ERROR = "TABLE_NOT_FOUND_ERROR"
# Sql Lab errors
MISSING_TEMPLATE_PARAMS_ERROR = "MISSING_TEMPLATE_PARAMS_ERROR"

View File

@ -362,3 +362,29 @@ class CreateKeyValueDistributedLockFailedException(Exception):
"""
Exception to signalize failure to acquire lock.
"""
class DatabaseNotFoundException(SupersetErrorException):
status = 404
def __init__(self, message: str):
super().__init__(
SupersetError(
message=message,
error_type=SupersetErrorType.DATABASE_NOT_FOUND_ERROR,
level=ErrorLevel.ERROR,
)
)
class TableNotFoundException(SupersetErrorException):
status = 404
def __init__(self, message: str):
super().__init__(
SupersetError(
message=message,
error_type=SupersetErrorType.TABLE_NOT_FOUND_ERROR,
level=ErrorLevel.ERROR,
)
)

View File

@ -1507,9 +1507,9 @@ class TestDatabaseApi(SupersetTestCase):
self.assertEqual(rv.status_code, 404)
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_get_table_extra_metadata(self):
def test_get_table_extra_metadata_deprecated(self):
"""
Database API: Test get table extra metadata info
Database API: Test deprecated get table extra metadata info
"""
example_db = get_example_database()
self.login(ADMIN_USERNAME)
@ -1519,9 +1519,9 @@ class TestDatabaseApi(SupersetTestCase):
response = json.loads(rv.data.decode("utf-8"))
self.assertEqual(response, {})
def test_get_invalid_database_table_extra_metadata(self):
def test_get_invalid_database_table_extra_metadata_deprecated(self):
"""
Database API: Test get invalid database from table extra metadata
Database API: Test get invalid database from deprecated table extra metadata
"""
database_id = 1000
self.login(ADMIN_USERNAME)
@ -1533,9 +1533,9 @@ class TestDatabaseApi(SupersetTestCase):
rv = self.client.get(uri)
self.assertEqual(rv.status_code, 404)
def test_get_invalid_table_table_extra_metadata(self):
def test_get_invalid_table_table_extra_metadata_deprecated(self):
"""
Database API: Test get invalid table from table extra metadata
Database API: Test get invalid table from deprecated table extra metadata
"""
example_db = get_example_database()
uri = f"api/v1/database/{example_db.id}/table_extra/wrong_table/null/"

View File

@ -14,6 +14,9 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
from datetime import datetime

View File

@ -111,15 +111,16 @@ class TestBigQueryDbEngineSpec(TestDbEngineSpec):
result = BigQueryEngineSpec.fetch_data(None, 0)
self.assertEqual(result, [1, 2])
def test_extra_table_metadata(self):
def test_get_extra_table_metadata(self):
"""
DB Eng Specs (bigquery): Test extra table metadata
"""
database = mock.Mock()
# Test no indexes
database.get_indexes = mock.MagicMock(return_value=None)
result = BigQueryEngineSpec.extra_table_metadata(
database, "some_table", "some_schema"
result = BigQueryEngineSpec.get_extra_table_metadata(
database,
Table("some_table", "some_schema"),
)
self.assertEqual(result, {})
@ -138,8 +139,9 @@ class TestBigQueryDbEngineSpec(TestDbEngineSpec):
"clustering": {"cols": [["c_col1", "c_col2", "c_col3"]]},
}
database.get_indexes = mock.MagicMock(return_value=index_metadata)
result = BigQueryEngineSpec.extra_table_metadata(
database, "some_table", "some_schema"
result = BigQueryEngineSpec.get_extra_table_metadata(
database,
Table("some_table", "some_schema"),
)
self.assertEqual(result, expected_result)

View File

@ -25,7 +25,7 @@ from sqlalchemy.sql import select
from superset.db_engine_specs.presto import PrestoEngineSpec
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
from superset.sql_parse import ParsedQuery
from superset.sql_parse import ParsedQuery, Table
from superset.utils.database import get_example_database
from tests.integration_tests.db_engine_specs.base_tests import TestDbEngineSpec
@ -549,7 +549,7 @@ class TestPrestoDbEngineSpec(TestDbEngineSpec):
self.assertEqual(actual_data, expected_data)
self.assertEqual(actual_expanded_cols, expected_expanded_cols)
def test_presto_extra_table_metadata(self):
def test_presto_get_extra_table_metadata(self):
database = mock.Mock()
database.get_indexes = mock.Mock(
return_value=[{"column_names": ["ds", "hour"]}]
@ -558,8 +558,9 @@ class TestPrestoDbEngineSpec(TestDbEngineSpec):
df = pd.DataFrame({"ds": ["01-01-19"], "hour": [1]})
database.get_df = mock.Mock(return_value=df)
PrestoEngineSpec.get_create_view = mock.Mock(return_value=None)
result = PrestoEngineSpec.extra_table_metadata(
database, "test_table", "test_schema"
result = PrestoEngineSpec.get_extra_table_metadata(
database,
Table("test_table", "test_schema"),
)
assert result["partitions"]["cols"] == ["ds", "hour"]
assert result["partitions"]["latest"] == {"ds": "01-01-19", "hour": 1}

View File

@ -87,6 +87,7 @@ def app(request: SubRequest) -> Iterator[SupersetApp]:
app.config["WTF_CSRF_ENABLED"] = False
app.config["PREVENT_UNSAFE_DB_CONNECTIONS"] = False
app.config["TESTING"] = True
app.config["RATELIMIT_ENABLED"] = False
# loop over extra configs passed in by tests
# and update the app config
@ -146,6 +147,7 @@ def full_api_access(mocker: MockFixture) -> Iterator[None]:
"flask_appbuilder.security.decorators.verify_jwt_in_request",
return_value=True,
)
mocker.patch.object(security_manager, "is_item_public", return_value=True)
mocker.patch.object(security_manager, "has_access", return_value=True)
mocker.patch.object(security_manager, "can_access_all_databases", return_value=True)

View File

@ -14,6 +14,11 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=unused-argument, import-outside-toplevel, line-too-long, invalid-name
from __future__ import annotations
import json
from datetime import datetime
from io import BytesIO
@ -30,10 +35,11 @@ from sqlalchemy.orm.session import Session
from superset import db
from superset.commands.database.csv_import import CSVImportCommand
from superset.db_engine_specs.sqlite import SqliteEngineSpec
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
from superset.exceptions import SupersetSecurityException
from superset.sql_parse import Table
from tests.unit_tests.fixtures.common import create_csv_file
# pylint: disable=unused-argument, import-outside-toplevel, line-too-long
def test_filter_by_uuid(
session: Session,
@ -1167,3 +1173,167 @@ def test_csv_upload_file_extension_valid(
content_type="multipart/form-data",
)
assert response.status_code == 200
def test_table_extra_metadata_happy_path(
mocker: MockFixture,
client: Any,
full_api_access: None,
) -> None:
"""
Test the `table_extra_metadata` endpoint.
"""
database = mocker.MagicMock()
database.db_engine_spec.get_extra_table_metadata.return_value = {"hello": "world"}
mocker.patch("superset.databases.api.DatabaseDAO.find_by_id", return_value=database)
mocker.patch("superset.databases.api.security_manager.raise_for_access")
response = client.get("/api/v1/database/1/table_metadata/extra/?name=t")
assert response.json == {"hello": "world"}
database.db_engine_spec.get_extra_table_metadata.assert_called_with(
database,
Table("t"),
)
response = client.get("/api/v1/database/1/table_metadata/extra/?name=t&schema=s")
database.db_engine_spec.get_extra_table_metadata.assert_called_with(
database,
Table("t", "s"),
)
response = client.get("/api/v1/database/1/table_metadata/extra/?name=t&catalog=c")
database.db_engine_spec.get_extra_table_metadata.assert_called_with(
database,
Table("t", None, "c"),
)
response = client.get(
"/api/v1/database/1/table_metadata/extra/?name=t&schema=s&catalog=c"
)
database.db_engine_spec.get_extra_table_metadata.assert_called_with(
database,
Table("t", "s", "c"),
)
def test_table_extra_metadata_no_table(
mocker: MockFixture,
client: Any,
full_api_access: None,
) -> None:
"""
Test the `table_extra_metadata` endpoint when no table name is passed.
"""
database = mocker.MagicMock()
mocker.patch("superset.databases.api.DatabaseDAO.find_by_id", return_value=database)
response = client.get("/api/v1/database/1/table_metadata/extra/?schema=s&catalog=c")
assert response.status_code == 422
assert response.json == {
"errors": [
{
"message": "An error happened when validating the request",
"error_type": "INVALID_PAYLOAD_SCHEMA_ERROR",
"level": "error",
"extra": {
"messages": {"name": ["Missing data for required field."]},
"issue_codes": [
{
"code": 1020,
"message": "Issue 1020 - The submitted payload has the incorrect schema.",
}
],
},
}
]
}
def test_table_extra_metadata_slashes(
mocker: MockFixture,
client: Any,
full_api_access: None,
) -> None:
"""
Test the `table_extra_metadata` endpoint with names that have slashes.
"""
database = mocker.MagicMock()
database.db_engine_spec.get_extra_table_metadata.return_value = {"hello": "world"}
mocker.patch("superset.databases.api.DatabaseDAO.find_by_id", return_value=database)
mocker.patch("superset.databases.api.security_manager.raise_for_access")
client.get("/api/v1/database/1/table_metadata/extra/?name=foo/bar")
database.db_engine_spec.get_extra_table_metadata.assert_called_with(
database,
Table("foo/bar"),
)
def test_table_extra_metadata_invalid_database(
mocker: MockFixture,
client: Any,
full_api_access: None,
) -> None:
"""
Test the `table_extra_metadata` endpoint when the database is invalid.
"""
mocker.patch("superset.databases.api.DatabaseDAO.find_by_id", return_value=None)
response = client.get("/api/v1/database/1/table_metadata/extra/?name=t")
assert response.status_code == 404
assert response.json == {
"errors": [
{
"message": "No such database",
"error_type": "DATABASE_NOT_FOUND_ERROR",
"level": "error",
"extra": {
"issue_codes": [
{
"code": 1011,
"message": "Issue 1011 - Superset encountered an unexpected error.",
},
{
"code": 1036,
"message": "Issue 1036 - The database was deleted.",
},
]
},
}
]
}
def test_table_extra_metadata_unauthorized(
mocker: MockFixture,
client: Any,
full_api_access: None,
) -> None:
"""
Test the `table_extra_metadata` endpoint when the user is unauthorized.
"""
database = mocker.MagicMock()
mocker.patch("superset.databases.api.DatabaseDAO.find_by_id", return_value=database)
mocker.patch(
"superset.databases.api.security_manager.raise_for_access",
side_effect=SupersetSecurityException(
SupersetError(
error_type=SupersetErrorType.TABLE_SECURITY_ACCESS_ERROR,
message="You don't have access to the table",
level=ErrorLevel.ERROR,
)
),
)
response = client.get("/api/v1/database/1/table_metadata/extra/?name=t")
assert response.status_code == 404
assert response.json == {
"errors": [
{
"message": "No such table",
"error_type": "TABLE_NOT_FOUND_ERROR",
"level": "error",
"extra": None,
}
]
}

View File

@ -14,10 +14,13 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=import-outside-toplevel, protected-access
from __future__ import annotations
from textwrap import dedent
from typing import Any, Optional
from typing import Any
import pytest
from pytest_mock import MockFixture
@ -26,6 +29,7 @@ from sqlalchemy.dialects import sqlite
from sqlalchemy.engine.url import URL
from sqlalchemy.sql import sqltypes
from superset.sql_parse import Table
from superset.superset_typing import ResultSetColumnType, SQLAColumnType
from superset.utils.core import GenericDataType
from tests.unit_tests.db_engine_specs.utils import assert_column_spec
@ -155,7 +159,7 @@ def test_cte_query_parsing(original: types.TypeEngine, expected: str) -> None:
def test_get_column_spec(
native_type: str,
sqla_type: type[types.TypeEngine],
attrs: Optional[dict[str, Any]],
attrs: dict[str, Any] | None,
generic_type: GenericDataType,
is_dttm: bool,
) -> None:
@ -263,3 +267,39 @@ OFFSET ?"""
a
FROM my_table"""
)
def test_extra_table_metadata(mocker: MockFixture) -> None:
"""
Test the deprecated `extra_table_metadata` method.
"""
from superset.db_engine_specs.base import BaseEngineSpec
from superset.models.core import Database
class ThirdPartyDBEngineSpec(BaseEngineSpec):
@classmethod
def extra_table_metadata(
cls,
database: Database,
table_name: str,
schema_name: str | None,
) -> dict[str, Any]:
return {"table": table_name, "schema": schema_name}
database = mocker.MagicMock()
warnings = mocker.patch("superset.db_engine_specs.base.warnings")
assert ThirdPartyDBEngineSpec.get_extra_table_metadata(
database,
Table("table", "schema"),
) == {"table": "table", "schema": "schema"}
assert (
ThirdPartyDBEngineSpec.get_extra_table_metadata(
database,
Table("table", "schema", "catalog"),
)
== {}
)
warnings.warn.assert_called()

View File

@ -37,6 +37,7 @@ from superset.db_engine_specs.exceptions import (
SupersetDBAPIOperationalError,
SupersetDBAPIProgrammingError,
)
from superset.sql_parse import Table
from superset.superset_typing import ResultSetColumnType, SQLAColumnType
from superset.utils.core import GenericDataType
from tests.unit_tests.db_engine_specs.utils import (
@ -310,7 +311,7 @@ def test_convert_dttm(
assert_convert_dttm(TrinoEngineSpec, target_type, expected_result, dttm)
def test_extra_table_metadata() -> None:
def test_get_extra_table_metadata() -> None:
from superset.db_engine_specs.trino import TrinoEngineSpec
db_mock = Mock()
@ -320,7 +321,10 @@ def test_extra_table_metadata() -> None:
db_mock.get_extra = Mock(return_value={})
db_mock.has_view_by_name = Mock(return_value=None)
db_mock.get_df = Mock(return_value=pd.DataFrame({"ds": ["01-01-19"], "hour": [1]}))
result = TrinoEngineSpec.extra_table_metadata(db_mock, "test_table", "test_schema")
result = TrinoEngineSpec.get_extra_table_metadata(
db_mock,
Table("test_table", "test_schema"),
)
assert result["partitions"]["cols"] == ["ds", "hour"]
assert result["partitions"]["latest"] == {"ds": "01-01-19", "hour": 1}

View File

@ -14,6 +14,9 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import csv
from datetime import datetime
from io import BytesIO, StringIO