refactor move ChartDataResult enums to common (#17399)

This commit is contained in:
ofekisr 2021-11-11 11:41:37 +02:00 committed by GitHub
parent 0257cf774d
commit 45480f7ae5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 74 additions and 72 deletions

View File

@ -67,17 +67,14 @@ from superset.charts.schemas import (
)
from superset.commands.importers.exceptions import NoValidFilesFoundError
from superset.commands.importers.v1.utils import get_contents_from_bundle
from superset.common.chart_data import ChartDataResultFormat, ChartDataResultType
from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod
from superset.exceptions import QueryObjectValidationError
from superset.extensions import event_logger, security_manager
from superset.models.slice import Slice
from superset.tasks.thumbnails import cache_chart_thumbnail
from superset.utils.async_query_manager import AsyncQueryTokenException
from superset.utils.core import (
ChartDataResultFormat,
ChartDataResultType,
json_int_dttm_ser,
)
from superset.utils.core import json_int_dttm_ser
from superset.utils.screenshots import ChartScreenshot
from superset.utils.urls import get_url_path
from superset.views.base_api import (

View File

@ -31,12 +31,8 @@ from typing import Any, Dict, List, Optional, Tuple
import pandas as pd
from superset.utils.core import (
ChartDataResultFormat,
DTTM_ALIAS,
extract_dataframe_dtypes,
get_metric_name,
)
from superset.common.chart_data import ChartDataResultFormat
from superset.utils.core import DTTM_ALIAS, extract_dataframe_dtypes, get_metric_name
def get_column_key(label: Tuple[str, ...], metrics: List[str]) -> Tuple[Any, ...]:

View File

@ -23,13 +23,12 @@ from marshmallow.validate import Length, Range
from marshmallow_enum import EnumField
from superset import app
from superset.common.chart_data import ChartDataResultFormat, ChartDataResultType
from superset.common.query_context import QueryContext
from superset.db_engine_specs.base import builtin_time_grains
from superset.utils import schema as utils
from superset.utils.core import (
AnnotationType,
ChartDataResultFormat,
ChartDataResultType,
FilterOperator,
PostProcessingBoxplotWhiskerType,
PostProcessingContributionOrientation,

View File

@ -0,0 +1,40 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from enum import Enum
class ChartDataResultFormat(str, Enum):
"""
Chart data response format
"""
CSV = "csv"
JSON = "json"
class ChartDataResultType(str, Enum):
"""
Chart data response type
"""
COLUMNS = "columns"
FULL = "full"
QUERY = "query"
RESULTS = "results"
SAMPLES = "samples"
TIMEGRAINS = "timegrains"
POST_PROCESSED = "post_processed"

View File

@ -20,11 +20,11 @@ from typing import Any, Callable, cast, Dict, List, Optional, TYPE_CHECKING
from flask_babel import _
from superset import app
from superset.common.chart_data import ChartDataResultType
from superset.common.db_query_status import QueryStatus
from superset.connectors.base.models import BaseDatasource
from superset.exceptions import QueryObjectValidationError
from superset.utils.core import (
ChartDataResultType,
extract_column_dtype,
extract_dataframe_dtypes,
ExtraFiltersReasonType,

View File

@ -29,6 +29,7 @@ from typing_extensions import TypedDict
from superset import app, db, is_feature_enabled
from superset.annotation_layers.dao import AnnotationLayerDAO
from superset.charts.dao import ChartDAO
from superset.common.chart_data import ChartDataResultFormat, ChartDataResultType
from superset.common.db_query_status import QueryStatus
from superset.common.query_actions import get_query_results
from superset.common.query_object import QueryObject
@ -42,8 +43,6 @@ from superset.models.helpers import QueryResult
from superset.utils import csv
from superset.utils.cache import generate_cache_key, set_and_log_cache
from superset.utils.core import (
ChartDataResultFormat,
ChartDataResultType,
DatasourceDict,
DTTM_ALIAS,
error_msg_from_exception,

View File

@ -23,6 +23,7 @@ from flask_babel import gettext as _
from pandas import DataFrame
from superset import app, db
from superset.common.chart_data import ChartDataResultType
from superset.connectors.base.models import BaseDatasource
from superset.connectors.connector_registry import ConnectorRegistry
from superset.exceptions import QueryObjectValidationError
@ -30,7 +31,6 @@ from superset.typing import Metric, OrderBy
from superset.utils import pandas_postprocessing
from superset.utils.core import (
apply_max_row_limit,
ChartDataResultType,
DatasourceDict,
DTTM_ALIAS,
find_duplicates,

View File

@ -28,6 +28,7 @@ from sqlalchemy.orm import Session
from superset import app
from superset.commands.base import BaseCommand
from superset.commands.exceptions import CommandException
from superset.common.chart_data import ChartDataResultFormat, ChartDataResultType
from superset.extensions import feature_flag_manager, machine_auth_provider_factory
from superset.models.reports import (
ReportDataFormat,
@ -64,7 +65,6 @@ from superset.reports.notifications import create_notification
from superset.reports.notifications.base import NotificationContent
from superset.reports.notifications.exceptions import NotificationError
from superset.utils.celery import session_scope
from superset.utils.core import ChartDataResultFormat, ChartDataResultType
from superset.utils.csv import get_chart_csv_data, get_chart_dataframe
from superset.utils.screenshots import (
BaseScreenshot,

View File

@ -174,29 +174,6 @@ class GenericDataType(IntEnum):
# ROW = 7
class ChartDataResultFormat(str, Enum):
"""
Chart data response format
"""
CSV = "csv"
JSON = "json"
class ChartDataResultType(str, Enum):
"""
Chart data response type
"""
COLUMNS = "columns"
FULL = "full"
QUERY = "query"
RESULTS = "results"
SAMPLES = "samples"
TIMEGRAINS = "timegrains"
POST_PROCESSED = "post_processed"
class DatasourceDict(TypedDict):
type: str
id: int

View File

@ -60,6 +60,7 @@ from superset import (
viz,
)
from superset.charts.dao import ChartDAO
from superset.common.chart_data import ChartDataResultFormat, ChartDataResultType
from superset.common.db_query_status import QueryStatus
from superset.connectors.base.models import BaseDatasource
from superset.connectors.connector_registry import ConnectorRegistry
@ -459,18 +460,18 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
def generate_json(
self, viz_obj: BaseViz, response_type: Optional[str] = None
) -> FlaskResponse:
if response_type == utils.ChartDataResultFormat.CSV:
if response_type == ChartDataResultFormat.CSV:
return CsvResponse(
viz_obj.get_csv(), headers=generate_download_headers("csv")
)
if response_type == utils.ChartDataResultType.QUERY:
if response_type == ChartDataResultType.QUERY:
return self.get_query_string_response(viz_obj)
if response_type == utils.ChartDataResultType.RESULTS:
if response_type == ChartDataResultType.RESULTS:
return self.get_raw_results(viz_obj)
if response_type == utils.ChartDataResultType.SAMPLES:
if response_type == ChartDataResultType.SAMPLES:
return self.get_samples(viz_obj)
payload = viz_obj.get_payload()
@ -598,11 +599,11 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
TODO: break into one endpoint for each return shape"""
response_type = utils.ChartDataResultFormat.JSON.value
responses: List[
Union[utils.ChartDataResultFormat, utils.ChartDataResultType]
] = list(utils.ChartDataResultFormat)
responses.extend(list(utils.ChartDataResultType))
response_type = ChartDataResultFormat.JSON.value
responses: List[Union[ChartDataResultFormat, ChartDataResultType]] = list(
ChartDataResultFormat
)
responses.extend(list(ChartDataResultType))
for response_option in responses:
if request.args.get(response_option) == "true":
response_type = response_option
@ -610,7 +611,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
# Verify user has permission to export CSV file
if (
response_type == utils.ChartDataResultFormat.CSV
response_type == ChartDataResultFormat.CSV
and not security_manager.can_access("can_csv", "Superset")
):
return json_error_response(
@ -628,7 +629,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
# TODO: support CSV, SQL query and other non-JSON types
if (
is_feature_enabled("GLOBAL_ASYNC_QUERIES")
and response_type == utils.ChartDataResultFormat.JSON
and response_type == ChartDataResultFormat.JSON
):
# First, look for the chart query results in the cache.
try:

View File

@ -51,15 +51,13 @@ from superset.models.core import Database, FavStar, FavStarClassName
from superset.models.dashboard import Dashboard
from superset.models.reports import ReportSchedule, ReportScheduleType
from superset.models.slice import Slice
from superset.utils import core as utils
from superset.utils.core import (
AnnotationType,
ChartDataResultFormat,
get_example_database,
get_example_default_schema,
get_main_database,
)
from superset.common.chart_data import ChartDataResultFormat, ChartDataResultType
from tests.integration_tests.base_api_tests import ApiOwnersTestCaseMixin
from tests.integration_tests.base_tests import (
@ -1239,7 +1237,7 @@ class TestChartApi(SupersetTestCase, ApiOwnersTestCaseMixin, InsertChartMixin):
"""
self.login(username="admin")
request_payload = get_query_context("birth_names")
request_payload["result_type"] = utils.ChartDataResultType.SAMPLES
request_payload["result_type"] = ChartDataResultType.SAMPLES
del request_payload["queries"][0]["row_limit"]
rv = self.post_assert_metric(CHART_DATA_URI, request_payload, "data")
response_payload = json.loads(rv.data.decode("utf-8"))
@ -1258,7 +1256,7 @@ class TestChartApi(SupersetTestCase, ApiOwnersTestCaseMixin, InsertChartMixin):
"""
self.login(username="admin")
request_payload = get_query_context("birth_names")
request_payload["result_type"] = utils.ChartDataResultType.SAMPLES
request_payload["result_type"] = ChartDataResultType.SAMPLES
request_payload["queries"][0]["row_limit"] = 10
rv = self.post_assert_metric(CHART_DATA_URI, request_payload, "data")
response_payload = json.loads(rv.data.decode("utf-8"))
@ -1276,7 +1274,7 @@ class TestChartApi(SupersetTestCase, ApiOwnersTestCaseMixin, InsertChartMixin):
"""
self.login(username="admin")
request_payload = get_query_context("birth_names")
request_payload["result_type"] = utils.ChartDataResultType.SAMPLES
request_payload["result_type"] = ChartDataResultType.SAMPLES
request_payload["queries"][0]["row_limit"] = 10000000
rv = self.post_assert_metric(CHART_DATA_URI, request_payload, "data")
response_payload = json.loads(rv.data.decode("utf-8"))
@ -1326,7 +1324,7 @@ class TestChartApi(SupersetTestCase, ApiOwnersTestCaseMixin, InsertChartMixin):
"""
self.login(username="admin")
request_payload = get_query_context("birth_names")
request_payload["result_type"] = utils.ChartDataResultType.QUERY
request_payload["result_type"] = ChartDataResultType.QUERY
rv = self.post_assert_metric(CHART_DATA_URI, request_payload, "data")
self.assertEqual(rv.status_code, 200)
@ -1453,7 +1451,7 @@ class TestChartApi(SupersetTestCase, ApiOwnersTestCaseMixin, InsertChartMixin):
request_payload["queries"][0]["filters"] = [
{"col": "non_existent_filter", "op": "==", "val": "foo"},
]
request_payload["result_type"] = utils.ChartDataResultType.QUERY
request_payload["result_type"] = ChartDataResultType.QUERY
rv = self.post_assert_metric(CHART_DATA_URI, request_payload, "data")
self.assertEqual(rv.status_code, 200)
response_payload = json.loads(rv.data.decode("utf-8"))
@ -1532,7 +1530,7 @@ class TestChartApi(SupersetTestCase, ApiOwnersTestCaseMixin, InsertChartMixin):
"""
self.login(username="admin")
request_payload = get_query_context("birth_names")
request_payload["result_type"] = utils.ChartDataResultType.QUERY
request_payload["result_type"] = ChartDataResultType.QUERY
request_payload["queries"][0]["filters"] = [
{"col": "gender", "op": "==", "val": "boy"}
]
@ -1574,7 +1572,7 @@ class TestChartApi(SupersetTestCase, ApiOwnersTestCaseMixin, InsertChartMixin):
class QueryContext:
result_format = ChartDataResultFormat.JSON
result_type = utils.ChartDataResultType.FULL
result_type = ChartDataResultType.FULL
cmd_run_val = {
"query_context": QueryContext(),
@ -1585,7 +1583,7 @@ class TestChartApi(SupersetTestCase, ApiOwnersTestCaseMixin, InsertChartMixin):
ChartDataCommand, "run", return_value=cmd_run_val
) as patched_run:
request_payload = get_query_context("birth_names")
request_payload["result_type"] = utils.ChartDataResultType.FULL
request_payload["result_type"] = ChartDataResultType.FULL
rv = self.post_assert_metric(CHART_DATA_URI, request_payload, "data")
self.assertEqual(rv.status_code, 200)
data = json.loads(rv.data.decode("utf-8"))
@ -1997,8 +1995,8 @@ class TestChartApi(SupersetTestCase, ApiOwnersTestCaseMixin, InsertChartMixin):
self.login(username="admin")
request_payload = get_query_context("birth_names")
request_payload["queries"] = [
{"result_type": utils.ChartDataResultType.TIMEGRAINS},
{"result_type": utils.ChartDataResultType.COLUMNS},
{"result_type": ChartDataResultType.TIMEGRAINS},
{"result_type": ChartDataResultType.COLUMNS},
]
rv = self.post_assert_metric(CHART_DATA_URI, request_payload, "data")
response_payload = json.loads(rv.data.decode("utf-8"))

View File

@ -24,18 +24,13 @@ from pandas import DateOffset
from superset import db
from superset.charts.schemas import ChartDataQueryContextSchema
from superset.common.chart_data import ChartDataResultFormat, ChartDataResultType
from superset.common.query_context import QueryContext
from superset.common.query_object import QueryObject
from superset.connectors.connector_registry import ConnectorRegistry
from superset.connectors.sqla.models import SqlMetric
from superset.extensions import cache_manager
from superset.utils.core import (
AdhocMetricExpressionType,
backend,
ChartDataResultFormat,
ChartDataResultType,
TimeRangeEndpoint,
)
from superset.utils.core import AdhocMetricExpressionType, backend, TimeRangeEndpoint
from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.fixtures.birth_names_dashboard import (
load_birth_names_dashboard_with_slices,