refactor: Removes the legacy dataset editor (#31976)

This commit is contained in:
Michael S. Molina 2025-01-24 14:52:22 -03:00 committed by GitHub
parent 6eb87e04c0
commit 687f762457
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
25 changed files with 25 additions and 1283 deletions

View File

@ -64,8 +64,6 @@ These features flags are **safe for production**. They have been tested and will
### Flags on the path to feature launch and flag deprecation/removal ### Flags on the path to feature launch and flag deprecation/removal
- DASHBOARD_VIRTUALIZATION - DASHBOARD_VIRTUALIZATION
- DRILL_BY
- DISABLE_LEGACY_DATASOURCE_EDITOR
### Flags retained for runtime configuration ### Flags retained for runtime configuration
@ -79,6 +77,7 @@ independently. This new framework will also allow for non-boolean configurations
- ALLOW_ADHOC_SUBQUERY - ALLOW_ADHOC_SUBQUERY
- DASHBOARD_RBAC [(docs)](https://superset.apache.org/docs/using-superset/creating-your-first-dashboard#manage-access-to-dashboards) - DASHBOARD_RBAC [(docs)](https://superset.apache.org/docs/using-superset/creating-your-first-dashboard#manage-access-to-dashboards)
- DATAPANEL_CLOSED_BY_DEFAULT - DATAPANEL_CLOSED_BY_DEFAULT
- DRILL_BY
- DRUID_JOINS - DRUID_JOINS
- EMBEDDABLE_CHARTS - EMBEDDABLE_CHARTS
- EMBEDDED_SUPERSET - EMBEDDED_SUPERSET

View File

@ -80,7 +80,6 @@ describe('SqlLab query panel', () => {
it.skip('successfully saves a query', () => { it.skip('successfully saves a query', () => {
cy.intercept('api/v1/database/**/tables/**').as('getTables'); cy.intercept('api/v1/database/**/tables/**').as('getTables');
cy.intercept('savedqueryviewapi/**').as('getSavedQuery');
const query = const query =
'SELECT ds, gender, name, num FROM main.birth_names ORDER BY name LIMIT 3'; 'SELECT ds, gender, name, num FROM main.birth_names ORDER BY name LIMIT 3';

View File

@ -20,6 +20,7 @@ import '@cypress/code-coverage/support';
import '@applitools/eyes-cypress/commands'; import '@applitools/eyes-cypress/commands';
import failOnConsoleError from 'cypress-fail-on-console-error'; import failOnConsoleError from 'cypress-fail-on-console-error';
import { expect } from 'chai'; import { expect } from 'chai';
import rison from 'rison';
/* eslint-disable @typescript-eslint/no-explicit-any */ /* eslint-disable @typescript-eslint/no-explicit-any */
@ -183,8 +184,12 @@ Cypress.Commands.add('login', () => {
}); });
Cypress.Commands.add('visitChartByName', name => { Cypress.Commands.add('visitChartByName', name => {
cy.request(`/chart/api/read?_flt_3_slice_name=${name}`).then(response => { const query = rison.encode({
cy.visit(`${BASE_EXPLORE_URL}{"slice_id": ${response.body.pks[0]}}`); columns: ['id'],
filters: [{ col: 'slice_name', opr: 'eq', value: name }],
});
cy.request(`/api/v1/chart?q=${query}`).then(response => {
cy.visit(`${BASE_EXPLORE_URL}{"slice_id": ${response.body.result[0].id}}`);
}); });
}); });

View File

@ -35,7 +35,6 @@ export enum FeatureFlag {
DashboardVirtualization = 'DASHBOARD_VIRTUALIZATION', DashboardVirtualization = 'DASHBOARD_VIRTUALIZATION',
DashboardRbac = 'DASHBOARD_RBAC', DashboardRbac = 'DASHBOARD_RBAC',
DatapanelClosedByDefault = 'DATAPANEL_CLOSED_BY_DEFAULT', DatapanelClosedByDefault = 'DATAPANEL_CLOSED_BY_DEFAULT',
DisableLegacyDatasourceEditor = 'DISABLE_LEGACY_DATASOURCE_EDITOR',
/** @deprecated */ /** @deprecated */
DrillToDetail = 'DRILL_TO_DETAIL', DrillToDetail = 'DRILL_TO_DETAIL',
DrillBy = 'DRILL_BY', DrillBy = 'DRILL_BY',

View File

@ -30,7 +30,6 @@
"cache_timeout": null, "cache_timeout": null,
"params": null, "params": null,
"perm": "[examples].[birth_names](id:3)", "perm": "[examples].[birth_names](id:3)",
"edit_url": "/tablemodelview/edit/3",
"sql": null, "sql": null,
"columns": [ "columns": [
{ {

View File

@ -199,6 +199,5 @@ export default {
['["num_girls", false]', 'num_girls [desc]'], ['["num_girls", false]', 'num_girls [desc]'],
], ],
type: 'table', type: 'table',
edit_url: '/tablemodelview/edit/7',
}, },
}; };

View File

@ -101,11 +101,6 @@ describe('DatasourceModal', () => {
expect(screen.getByTestId('datasource-editor')).toBeInTheDocument(); expect(screen.getByTestId('datasource-editor')).toBeInTheDocument();
}); });
it('renders a legacy data source btn', () => {
const button = screen.getByTestId('datasource-modal-legacy-edit');
expect(button).toBeInTheDocument();
});
it('disables the save button when the datasource is managed externally', () => { it('disables the save button when the datasource is managed externally', () => {
// the render is currently in a before operation, so it needs to be cleaned up // the render is currently in a before operation, so it needs to be cleaned up
// we could alternatively move all the renders back into the tests or find a better // we could alternatively move all the renders back into the tests or find a better

View File

@ -20,9 +20,7 @@ import { FunctionComponent, useState, useRef } from 'react';
import Alert from 'src/components/Alert'; import Alert from 'src/components/Alert';
import Button from 'src/components/Button'; import Button from 'src/components/Button';
import { import {
FeatureFlag,
isDefined, isDefined,
isFeatureEnabled,
Metric, Metric,
styled, styled,
SupersetClient, SupersetClient,
@ -271,10 +269,6 @@ const DatasourceModal: FunctionComponent<DatasourceModalProps> = ({
}); });
}; };
const showLegacyDatasourceEditor = !isFeatureEnabled(
FeatureFlag.DisableLegacyDatasourceEditor,
);
return ( return (
<StyledDatasourceModal <StyledDatasourceModal
show={show} show={show}
@ -288,20 +282,6 @@ const DatasourceModal: FunctionComponent<DatasourceModalProps> = ({
maskClosable={!isEditing} maskClosable={!isEditing}
footer={ footer={
<> <>
{showLegacyDatasourceEditor && (
<Button
buttonSize="small"
buttonStyle="default"
data-test="datasource-modal-legacy-edit"
className="m-r-5"
onClick={() => {
window.location.href =
currentDatasource.edit_url || currentDatasource.url;
}}
>
{t('Use legacy datasource editor')}
</Button>
)}
<Button <Button
data-test="datasource-modal-cancel" data-test="datasource-modal-cancel"
buttonSize="small" buttonSize="small"

View File

@ -462,10 +462,6 @@ DEFAULT_FEATURE_FLAGS: dict[str, bool] = {
# When using a recent version of Druid that supports JOINs turn this on # When using a recent version of Druid that supports JOINs turn this on
"DRUID_JOINS": False, "DRUID_JOINS": False,
"DYNAMIC_PLUGINS": False, "DYNAMIC_PLUGINS": False,
# With Superset 2.0, we are updating the default so that the legacy datasource
# editor no longer shows. Currently this is set to false so that the editor
# option does show, but we will be depreciating it.
"DISABLE_LEGACY_DATASOURCE_EDITOR": True,
"ENABLE_TEMPLATE_PROCESSING": False, "ENABLE_TEMPLATE_PROCESSING": False,
# Allow for javascript controls components # Allow for javascript controls components
# this enables programmers to customize certain charts (like the # this enables programmers to customize certain charts (like the

View File

@ -16,265 +16,15 @@
# under the License. # under the License.
"""Views used by the SqlAlchemy connector""" """Views used by the SqlAlchemy connector"""
import logging from flask_appbuilder import expose
import re
from flask import flash, redirect
from flask_appbuilder import CompactCRUDMixin, expose
from flask_appbuilder.fields import QuerySelectField
from flask_appbuilder.fieldwidgets import Select2Widget
from flask_appbuilder.models.sqla.interface import SQLAInterface
from flask_appbuilder.security.decorators import ( from flask_appbuilder.security.decorators import (
has_access, has_access,
permission_name, permission_name,
) )
from flask_babel import lazy_gettext as _
from markupsafe import Markup
from wtforms.validators import DataRequired, Regexp
from superset import db from superset.constants import MODEL_VIEW_RW_METHOD_PERMISSION_MAP
from superset.connectors.sqla import models
from superset.constants import MODEL_VIEW_RW_METHOD_PERMISSION_MAP, RouteMethod
from superset.superset_typing import FlaskResponse from superset.superset_typing import FlaskResponse
from superset.utils import core as utils from superset.views.base import BaseSupersetView
from superset.views.base import (
BaseSupersetView,
DatasourceFilter,
DeleteMixin,
DeprecateModelViewMixin,
ListWidgetWithCheckboxes,
SupersetModelView,
YamlExportMixin,
)
logger = logging.getLogger(__name__)
class SelectDataRequired(DataRequired): # pylint: disable=too-few-public-methods
"""
Select required flag on the input field will not work well on Chrome
Console error:
An invalid form control with name='tables' is not focusable.
This makes a simple override to the DataRequired to be used specifically with
select fields
"""
field_flags = ()
class TableColumnInlineView( # pylint: disable=too-many-ancestors
DeprecateModelViewMixin,
CompactCRUDMixin,
SupersetModelView,
):
datamodel = SQLAInterface(models.TableColumn)
# TODO TODO, review need for this on related_views
class_permission_name = "Dataset"
method_permission_name = MODEL_VIEW_RW_METHOD_PERMISSION_MAP
include_route_methods = RouteMethod.RELATED_VIEW_SET | RouteMethod.API_SET
list_title = _("Columns")
show_title = _("Show Column")
add_title = _("Add Column")
edit_title = _("Edit Column")
can_delete = False
list_widget = ListWidgetWithCheckboxes
edit_columns = [
"column_name",
"verbose_name",
"description",
"type",
"advanced_data_type",
"groupby",
"filterable",
"table",
"expression",
"is_dttm",
"python_date_format",
"extra",
]
add_columns = edit_columns
list_columns = [
"column_name",
"verbose_name",
"type",
"advanced_data_type",
"groupby",
"filterable",
"is_dttm",
"extra",
]
page_size = 500
description_columns = {
"is_dttm": _(
"Whether to make this column available as a "
"[Time Granularity] option, column has to be DATETIME or "
"DATETIME-like"
),
"filterable": _(
"Whether this column is exposed in the `Filters` section "
"of the explore view."
),
"type": _(
"The data type that was inferred by the database. "
"It may be necessary to input a type manually for "
"expression-defined columns in some cases. In most case "
"users should not need to alter this."
),
"expression": utils.markdown(
"a valid, *non-aggregating* SQL expression as supported by the "
"underlying backend. Example: `substr(name, 1, 1)`",
True,
),
"python_date_format": utils.markdown(
Markup(
"The pattern of timestamp format. For strings use "
'<a href="https://docs.python.org/2/library/'
'datetime.html#strftime-strptime-behavior">'
"python datetime string pattern</a> expression which needs to "
'adhere to the <a href="https://en.wikipedia.org/wiki/ISO_8601">'
"ISO 8601</a> standard to ensure that the lexicographical ordering "
"coincides with the chronological ordering. If the timestamp "
"format does not adhere to the ISO 8601 standard you will need to "
"define an expression and type for transforming the string into a "
"date or timestamp. Note currently time zones are not supported. "
"If time is stored in epoch format, put `epoch_s` or `epoch_ms`."
"If no pattern is specified we fall back to using the optional "
"defaults on a per database/column name level via the extra parameter."
""
),
True,
),
"extra": utils.markdown(
"Extra data to specify column metadata. Currently supports "
'certification data of the format: `{ "certification": "certified_by": '
'"Taylor Swift", "details": "This column is the source of truth." '
"} }`. This should be modified from the edit datasource model in "
"Explore to ensure correct formatting.",
True,
),
}
label_columns = {
"column_name": _("Column"),
"verbose_name": _("Verbose Name"),
"description": _("Description"),
"groupby": _("Groupable"),
"filterable": _("Filterable"),
"table": _("Table"),
"expression": _("Expression"),
"is_dttm": _("Is temporal"),
"python_date_format": _("Datetime Format"),
"type": _("Type"),
"advanced_data_type": _("Business Data Type"),
}
validators_columns = {
"python_date_format": [
# Restrict viable values to epoch_s, epoch_ms, or a strftime format
# which adhere's to the ISO 8601 format (without time zone).
Regexp(
re.compile(
r"""
^(
epoch_s|epoch_ms|
(?P<date>%Y(-%m(-%d)?)?)([\sT](?P<time>%H(:%M(:%S(\.%f)?)?)?))?
)$
""",
re.VERBOSE,
),
message=_("Invalid date/timestamp format"),
)
]
}
add_form_extra_fields = {
"table": QuerySelectField(
"Table",
query_func=lambda: db.session.query(models.SqlaTable),
allow_blank=True,
widget=Select2Widget(extra_classes="readonly"),
)
}
edit_form_extra_fields = add_form_extra_fields
class SqlMetricInlineView( # pylint: disable=too-many-ancestors
DeprecateModelViewMixin,
CompactCRUDMixin,
SupersetModelView,
):
datamodel = SQLAInterface(models.SqlMetric)
class_permission_name = "Dataset"
method_permission_name = MODEL_VIEW_RW_METHOD_PERMISSION_MAP
include_route_methods = RouteMethod.RELATED_VIEW_SET | RouteMethod.API_SET
list_title = _("Metrics")
show_title = _("Show Metric")
add_title = _("Add Metric")
edit_title = _("Edit Metric")
list_columns = ["metric_name", "verbose_name", "metric_type", "extra"]
edit_columns = [
"metric_name",
"description",
"verbose_name",
"metric_type",
"expression",
"table",
"d3format",
"currency",
"extra",
"warning_text",
]
description_columns = {
"expression": utils.markdown(
"a valid, *aggregating* SQL expression as supported by the "
"underlying backend. Example: `count(DISTINCT userid)`",
True,
),
"d3format": utils.markdown(
"d3 formatting string as defined [here]"
"(https://github.com/d3/d3-format/blob/master/README.md#format). "
"For instance, this default formatting applies in the Table "
"visualization and allow for different metric to use different "
"formats",
True,
),
"extra": utils.markdown(
"Extra data to specify metric metadata. Currently supports "
'metadata of the format: `{ "certification": { "certified_by": '
'"Data Platform Team", "details": "This metric is the source of truth." '
'}, "warning_markdown": "This is a warning." }`. This should be modified '
"from the edit datasource model in Explore to ensure correct formatting.",
True,
),
}
add_columns = edit_columns
page_size = 500
label_columns = {
"metric_name": _("Metric"),
"description": _("Description"),
"verbose_name": _("Verbose Name"),
"metric_type": _("Type"),
"expression": _("SQL Expression"),
"table": _("Table"),
"d3format": _("D3 Format"),
"extra": _("Extra"),
"warning_text": _("Warning Message"),
}
add_form_extra_fields = {
"table": QuerySelectField(
"Table",
query_func=lambda: db.session.query(models.SqlaTable),
allow_blank=True,
widget=Select2Widget(extra_classes="readonly"),
)
}
edit_form_extra_fields = add_form_extra_fields
class RowLevelSecurityView(BaseSupersetView): class RowLevelSecurityView(BaseSupersetView):
@ -288,187 +38,9 @@ class RowLevelSecurityView(BaseSupersetView):
return super().render_app_template() return super().render_app_template()
class TableModelView( # pylint: disable=too-many-ancestors class TableModelView(BaseSupersetView):
DeprecateModelViewMixin, SupersetModelView, DeleteMixin, YamlExportMixin
):
datamodel = SQLAInterface(models.SqlaTable)
class_permission_name = "Dataset" class_permission_name = "Dataset"
method_permission_name = MODEL_VIEW_RW_METHOD_PERMISSION_MAP method_permission_name = MODEL_VIEW_RW_METHOD_PERMISSION_MAP
include_route_methods = RouteMethod.CRUD_SET
list_title = _("Tables")
show_title = _("Show Table")
add_title = _("Import a table definition")
edit_title = _("Edit Table")
list_columns = ["link", "database_name", "changed_by_", "modified"]
order_columns = ["modified"]
add_columns = ["database", "schema", "table_name"]
edit_columns = [
"table_name",
"sql",
"filter_select_enabled",
"fetch_values_predicate",
"database",
"schema",
"description",
"owners",
"main_dttm_col",
"default_endpoint",
"offset",
"cache_timeout",
"is_sqllab_view",
"template_params",
"extra",
"normalize_columns",
"always_filter_main_dttm",
]
base_filters = [["id", DatasourceFilter, lambda: []]]
show_columns = edit_columns + ["perm", "slices"]
related_views = [
TableColumnInlineView,
SqlMetricInlineView,
]
base_order = ("changed_on", "desc")
search_columns = ("database", "schema", "table_name", "owners", "is_sqllab_view")
description_columns = {
"slices": _(
"The list of charts associated with this table. By "
"altering this datasource, you may change how these associated "
"charts behave. "
"Also note that charts need to point to a datasource, so "
"this form will fail at saving if removing charts from a "
"datasource. If you want to change the datasource for a chart, "
"overwrite the chart from the 'explore view'"
),
"offset": _("Timezone offset (in hours) for this datasource"),
"table_name": _("Name of the table that exists in the source database"),
"schema": _(
"Schema, as used only in some databases like Postgres, Redshift and DB2"
),
"description": Markup(
'Supports <a href="https://daringfireball.net/projects/markdown/">'
"markdown</a>"
),
"sql": _(
"This fields acts a Superset view, meaning that Superset will "
"run a query against this string as a subquery."
),
"fetch_values_predicate": _(
"Predicate applied when fetching distinct value to "
"populate the filter control component. Supports "
"jinja template syntax. Applies only when "
"`Enable Filter Select` is on."
),
"default_endpoint": _(
"Redirects to this endpoint when clicking on the table "
"from the table list"
),
"filter_select_enabled": _(
"Whether to populate the filter's dropdown in the explore "
"view's filter section with a list of distinct values fetched "
"from the backend on the fly"
),
"is_sqllab_view": _(
"Whether the table was generated by the 'Visualize' flow in SQL Lab"
),
"template_params": _(
"A set of parameters that become available in the query using "
"Jinja templating syntax"
),
"cache_timeout": _(
"Duration (in seconds) of the caching timeout for this table. "
"A timeout of 0 indicates that the cache never expires. "
"Note this defaults to the database timeout if undefined."
),
"extra": utils.markdown(
"Extra data to specify table metadata. Currently supports "
'metadata of the format: `{ "certification": { "certified_by": '
'"Data Platform Team", "details": "This table is the source of truth." '
'}, "warning_markdown": "This is a warning." }`.',
True,
),
"normalize_columns": _(
"Allow column names to be changed to case insensitive format, "
"if supported (e.g. Oracle, Snowflake)."
),
"always_filter_main_dttm": _(
"Datasets can have a main temporal column (main_dttm_col), "
"but can also have secondary time columns. "
"When this attribute is true, whenever the secondary columns are filtered, "
"the same filter is applied to the main datetime column."
),
}
label_columns = {
"slices": _("Associated Charts"),
"link": _("Table"),
"changed_by_": _("Changed By"),
"database": _("Database"),
"database_name": _("Database"),
"changed_on_": _("Last Changed"),
"filter_select_enabled": _("Enable Filter Select"),
"schema": _("Schema"),
"default_endpoint": _("Default Endpoint"),
"offset": _("Offset"),
"cache_timeout": _("Cache Timeout"),
"table_name": _("Table Name"),
"fetch_values_predicate": _("Fetch Values Predicate"),
"owners": _("Owners"),
"main_dttm_col": _("Main Datetime Column"),
"description": _("Description"),
"is_sqllab_view": _("SQL Lab View"),
"template_params": _("Template parameters"),
"extra": _("Extra"),
"modified": _("Modified"),
}
edit_form_extra_fields = {
"database": QuerySelectField(
"Database",
query_func=lambda: db.session.query(models.Database),
get_pk_func=lambda item: item.id,
widget=Select2Widget(extra_classes="readonly"),
)
}
def post_add(
self,
item: "TableModelView",
flash_message: bool = True,
fetch_metadata: bool = True,
) -> None:
if fetch_metadata:
item.fetch_metadata()
if flash_message:
flash(
_(
"The table was created. "
"As part of this two-phase configuration "
"process, you should now click the edit button by "
"the new table to configure it."
),
"info",
)
def post_update(self, item: "TableModelView") -> None:
self.post_add(item, flash_message=False, fetch_metadata=False)
def _delete(self, pk: int) -> None:
DeleteMixin._delete(self, pk)
@expose(
"/edit/<pk>",
methods=(
"GET",
"POST",
),
)
@has_access
def edit(self, pk: str) -> FlaskResponse:
"""Simple hack to redirect to explore view after saving"""
resp = super().edit(pk)
if isinstance(resp, str):
return resp
return redirect(f"/explore/?datasource_type=table&datasource_id={pk}")
@expose("/list/") @expose("/list/")
@has_access @has_access

View File

@ -882,7 +882,7 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
@check_table_access @check_table_access
@safe @safe
@statsd_metrics @statsd_metrics
@deprecated(deprecated_in="4.0", removed_in="5.0") @deprecated(deprecated_in="4.0")
@event_logger.log_this_with_context( @event_logger.log_this_with_context(
action=lambda self, *args, **kwargs: f"{self.__class__.__name__}" action=lambda self, *args, **kwargs: f"{self.__class__.__name__}"
f".table_extra_metadata_deprecated", f".table_extra_metadata_deprecated",

View File

@ -131,8 +131,6 @@ class SupersetAppInitializer: # pylint: disable=too-many-public-methods
from superset.charts.data.api import ChartDataRestApi from superset.charts.data.api import ChartDataRestApi
from superset.connectors.sqla.views import ( from superset.connectors.sqla.views import (
RowLevelSecurityView, RowLevelSecurityView,
SqlMetricInlineView,
TableColumnInlineView,
TableModelView, TableModelView,
) )
from superset.css_templates.api import CssTemplateRestApi from superset.css_templates.api import CssTemplateRestApi
@ -163,7 +161,7 @@ class SupersetAppInitializer: # pylint: disable=too-many-public-methods
from superset.views.all_entities import TaggedObjectsModelView from superset.views.all_entities import TaggedObjectsModelView
from superset.views.annotations import AnnotationLayerView from superset.views.annotations import AnnotationLayerView
from superset.views.api import Api from superset.views.api import Api
from superset.views.chart.views import SliceAsync, SliceModelView from superset.views.chart.views import SliceModelView
from superset.views.core import Superset from superset.views.core import Superset
from superset.views.css_templates import CssTemplateModelView from superset.views.css_templates import CssTemplateModelView
from superset.views.dashboard.views import ( from superset.views.dashboard.views import (
@ -179,7 +177,6 @@ class SupersetAppInitializer: # pylint: disable=too-many-public-methods
from superset.views.log.views import LogModelView from superset.views.log.views import LogModelView
from superset.views.sql_lab.views import ( from superset.views.sql_lab.views import (
SavedQueryView, SavedQueryView,
SavedQueryViewApi,
TableSchemaView, TableSchemaView,
TabStateView, TabStateView,
) )
@ -300,12 +297,8 @@ class SupersetAppInitializer: # pylint: disable=too-many-public-methods
appbuilder.add_view_no_menu(ExploreView) appbuilder.add_view_no_menu(ExploreView)
appbuilder.add_view_no_menu(ExplorePermalinkView) appbuilder.add_view_no_menu(ExplorePermalinkView)
appbuilder.add_view_no_menu(SavedQueryView) appbuilder.add_view_no_menu(SavedQueryView)
appbuilder.add_view_no_menu(SavedQueryViewApi)
appbuilder.add_view_no_menu(SliceAsync)
appbuilder.add_view_no_menu(SqllabView) appbuilder.add_view_no_menu(SqllabView)
appbuilder.add_view_no_menu(SqlMetricInlineView)
appbuilder.add_view_no_menu(Superset) appbuilder.add_view_no_menu(Superset)
appbuilder.add_view_no_menu(TableColumnInlineView)
appbuilder.add_view_no_menu(TableModelView) appbuilder.add_view_no_menu(TableModelView)
appbuilder.add_view_no_menu(TableSchemaView) appbuilder.add_view_no_menu(TableSchemaView)
appbuilder.add_view_no_menu(TabStateView) appbuilder.add_view_no_menu(TabStateView)

View File

@ -410,7 +410,7 @@ class SQLStatement(BaseSQLStatement[exp.Expression]):
return self._fallback_formatting() return self._fallback_formatting()
@deprecated(deprecated_in="4.0", removed_in="5.0") @deprecated(deprecated_in="4.0")
def _fallback_formatting(self) -> str: def _fallback_formatting(self) -> str:
""" """
Format SQL without a specific dialect. Format SQL without a specific dialect.

View File

@ -23,7 +23,6 @@ import traceback
from datetime import datetime from datetime import datetime
from typing import Any, Callable from typing import Any, Callable
import yaml
from babel import Locale from babel import Locale
from flask import ( from flask import (
abort, abort,
@ -34,16 +33,10 @@ from flask import (
Response, Response,
session, session,
) )
from flask_appbuilder import BaseView, expose, Model, ModelView from flask_appbuilder import BaseView, Model, ModelView
from flask_appbuilder.actions import action from flask_appbuilder.actions import action
from flask_appbuilder.baseviews import expose_api
from flask_appbuilder.forms import DynamicForm from flask_appbuilder.forms import DynamicForm
from flask_appbuilder.models.sqla.filters import BaseFilter from flask_appbuilder.models.sqla.filters import BaseFilter
from flask_appbuilder.security.decorators import (
has_access,
has_access_api,
permission_name,
)
from flask_appbuilder.security.sqla.models import User from flask_appbuilder.security.sqla.models import User
from flask_appbuilder.widgets import ListWidget from flask_appbuilder.widgets import ListWidget
from flask_babel import get_locale, gettext as __ from flask_babel import get_locale, gettext as __
@ -65,7 +58,6 @@ from superset.connectors.sqla import models
from superset.db_engine_specs import get_available_engine_specs from superset.db_engine_specs import get_available_engine_specs
from superset.db_engine_specs.gsheets import GSheetsEngineSpec from superset.db_engine_specs.gsheets import GSheetsEngineSpec
from superset.extensions import cache_manager from superset.extensions import cache_manager
from superset.models.helpers import ImportExportMixin
from superset.reports.models import ReportRecipientType from superset.reports.models import ReportRecipientType
from superset.superset_typing import FlaskResponse from superset.superset_typing import FlaskResponse
from superset.translations.utils import get_language_pack from superset.translations.utils import get_language_pack
@ -369,65 +361,6 @@ class SupersetListWidget(ListWidget): # pylint: disable=too-few-public-methods
template = "superset/fab_overrides/list.html" template = "superset/fab_overrides/list.html"
class DeprecateModelViewMixin:
@expose("/add", methods=["GET", "POST"])
@has_access
@deprecated(eol_version="5.0.0")
def add(self) -> FlaskResponse:
return super().add() # type: ignore
@expose("/show/<pk>", methods=["GET"])
@has_access
@deprecated(eol_version="5.0.0")
def show(self, pk: int) -> FlaskResponse:
return super().show(pk) # type: ignore
@expose("/edit/<pk>", methods=["GET", "POST"])
@has_access
@deprecated(eol_version="5.0.0")
def edit(self, pk: int) -> FlaskResponse:
return super().edit(pk) # type: ignore
@expose("/delete/<pk>", methods=["GET", "POST"])
@has_access
@deprecated(eol_version="5.0.0")
def delete(self, pk: int) -> FlaskResponse:
return super().delete(pk) # type: ignore
@expose_api(name="read", url="/api/read", methods=["GET"])
@has_access_api
@permission_name("list")
@deprecated(eol_version="5.0.0")
def api_read(self) -> FlaskResponse:
return super().api_read() # type: ignore
@expose_api(name="get", url="/api/get/<pk>", methods=["GET"])
@has_access_api
@permission_name("show")
def api_get(self, pk: int) -> FlaskResponse:
return super().api_get(pk) # type: ignore
@expose_api(name="create", url="/api/create", methods=["POST"])
@has_access_api
@permission_name("add")
def api_create(self) -> FlaskResponse:
return super().api_create() # type: ignore
@expose_api(name="update", url="/api/update/<pk>", methods=["PUT"])
@has_access_api
@permission_name("write")
@deprecated(eol_version="5.0.0")
def api_update(self, pk: int) -> FlaskResponse:
return super().api_update(pk) # type: ignore
@expose_api(name="delete", url="/api/delete/<pk>", methods=["DELETE"])
@has_access_api
@permission_name("delete")
@deprecated(eol_version="5.0.0")
def api_delete(self, pk: int) -> FlaskResponse:
return super().delete(pk) # type: ignore
class SupersetModelView(ModelView): class SupersetModelView(ModelView):
page_size = 100 page_size = 100
list_widget = SupersetListWidget list_widget = SupersetListWidget
@ -446,38 +379,6 @@ class SupersetModelView(ModelView):
) )
class ListWidgetWithCheckboxes(ListWidget): # pylint: disable=too-few-public-methods
"""An alternative to list view that renders Boolean fields as checkboxes
Works in conjunction with the `checkbox` view."""
template = "superset/fab_overrides/list_with_checkboxes.html"
class YamlExportMixin: # pylint: disable=too-few-public-methods
"""
Override this if you want a dict response instead, with a certain key.
Used on DatabaseView for cli compatibility
"""
yaml_dict_key: str | None = None
@action("yaml_export", __("Export to YAML"), __("Export to YAML?"), "fa-download")
def yaml_export(
self, items: ImportExportMixin | list[ImportExportMixin]
) -> FlaskResponse:
if not isinstance(items, list):
items = [items]
data = [t.export_to_dict() for t in items]
return Response(
yaml.safe_dump({self.yaml_dict_key: data} if self.yaml_dict_key else data),
headers=generate_download_headers("yaml"),
mimetype="application/text",
)
class DeleteMixin: # pylint: disable=too-few-public-methods class DeleteMixin: # pylint: disable=too-few-public-methods
def _delete(self: BaseView, primary_key: int) -> None: def _delete(self: BaseView, primary_key: int) -> None:
""" """

View File

@ -1,92 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from flask_babel import lazy_gettext as _
from markupsafe import Markup
from superset.dashboards.filters import DashboardAccessFilter
from superset.views.chart.filters import SliceFilter
class SliceMixin: # pylint: disable=too-few-public-methods
list_title = _("Charts")
show_title = _("Show Chart")
add_title = _("Add Chart")
edit_title = _("Edit Chart")
can_add = False
search_columns = (
"slice_name",
"description",
"viz_type",
"datasource_name",
"owners",
)
list_columns = ["slice_link", "viz_type", "datasource_link", "creator", "modified"]
order_columns = [
"slice_name",
"viz_type",
"datasource_link",
"modified",
"changed_on",
]
edit_columns = [
"slice_name",
"description",
"viz_type",
"owners",
"dashboards",
"params",
"cache_timeout",
]
base_order = ("changed_on", "desc")
description_columns = {
"description": Markup(
"The content here can be displayed as widget headers in the "
"dashboard view. Supports "
'<a href="https://daringfireball.net/projects/markdown/"">'
"markdown</a>"
),
"params": _(
"These parameters are generated dynamically when clicking "
"the save or overwrite button in the explore view. This JSON "
"object is exposed here for reference and for power users who may "
"want to alter specific parameters."
),
"cache_timeout": _(
"Duration (in seconds) of the caching timeout for this chart. "
"Note this defaults to the datasource/table timeout if undefined."
),
}
base_filters = [["id", SliceFilter, lambda: []]]
label_columns = {
"cache_timeout": _("Cache Timeout"),
"creator": _("Creator"),
"dashboards": _("Dashboards"),
"datasource_link": _("Datasource"),
"description": _("Description"),
"modified": _("Last Modified"),
"owners": _("Owners"),
"params": _("Parameters"),
"slice_link": _("Chart"),
"slice_name": _("Name"),
"table": _("Table"),
"viz_type": _("Visualization Type"),
}
add_form_query_rel_fields = {"dashboards": [["name", DashboardAccessFilter, None]]}
edit_form_query_rel_fields = add_form_query_rel_fields

View File

@ -15,48 +15,18 @@
# specific language governing permissions and limitations # specific language governing permissions and limitations
# under the License. # under the License.
from flask_appbuilder import expose, has_access from flask_appbuilder import expose, has_access
from flask_appbuilder.models.sqla.interface import SQLAInterface
from flask_babel import lazy_gettext as _
from superset import security_manager from superset.constants import MODEL_VIEW_RW_METHOD_PERMISSION_MAP
from superset.constants import MODEL_VIEW_RW_METHOD_PERMISSION_MAP, RouteMethod
from superset.models.slice import Slice
from superset.superset_typing import FlaskResponse from superset.superset_typing import FlaskResponse
from superset.utils import json from superset.views.base import BaseSupersetView
from superset.views.base import DeleteMixin, DeprecateModelViewMixin, SupersetModelView
from superset.views.chart.mixin import SliceMixin
class SliceModelView( class SliceModelView(BaseSupersetView):
DeprecateModelViewMixin, SliceMixin, SupersetModelView, DeleteMixin
): # pylint: disable=too-many-ancestors
route_base = "/chart" route_base = "/chart"
datamodel = SQLAInterface(Slice)
include_route_methods = RouteMethod.CRUD_SET | {
RouteMethod.DOWNLOAD,
RouteMethod.API_READ,
RouteMethod.API_DELETE,
}
class_permission_name = "Chart" class_permission_name = "Chart"
method_permission_name = MODEL_VIEW_RW_METHOD_PERMISSION_MAP method_permission_name = MODEL_VIEW_RW_METHOD_PERMISSION_MAP
def pre_add(self, item: "SliceModelView") -> None: @expose("/add")
json.validate_json(item.params)
def pre_update(self, item: "SliceModelView") -> None:
json.validate_json(item.params)
security_manager.raise_for_ownership(item)
def pre_delete(self, item: "SliceModelView") -> None:
security_manager.raise_for_ownership(item)
@expose(
"/add",
methods=(
"GET",
"POST",
),
)
@has_access @has_access
def add(self) -> FlaskResponse: def add(self) -> FlaskResponse:
return super().render_app_template() return super().render_app_template()
@ -65,32 +35,3 @@ class SliceModelView(
@has_access @has_access
def list(self) -> FlaskResponse: def list(self) -> FlaskResponse:
return super().render_app_template() return super().render_app_template()
class SliceAsync(SliceModelView): # pylint: disable=too-many-ancestors
route_base = "/sliceasync"
include_route_methods = {RouteMethod.API_READ}
list_columns = [
"changed_on",
"changed_on_humanized",
"creator",
"datasource_id",
"datasource_link",
"datasource_url",
"datasource_name_text",
"datasource_type",
"description",
"description_markeddown",
"edit_url",
"icons",
"id",
"modified",
"owners",
"params",
"slice_link",
"slice_name",
"slice_url",
"viz_type",
]
label_columns = {"icons": " ", "slice_link": _("Chart")}

View File

@ -1,285 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import inspect
from flask_babel import lazy_gettext as _
from markupsafe import Markup
from sqlalchemy import MetaData
from superset import app, security_manager
from superset.databases.filters import DatabaseFilter
from superset.databases.utils import make_url_safe
from superset.exceptions import SupersetException
from superset.models.core import Database
from superset.security.analytics_db_safety import check_sqlalchemy_uri
from superset.utils import core as utils
class DatabaseMixin:
list_title = _("Databases")
show_title = _("Show Database")
add_title = _("Add Database")
edit_title = _("Edit Database")
list_columns = [
"database_name",
"backend",
"expose_in_sqllab",
"allow_run_async",
"creator",
"modified",
]
order_columns = [
"database_name",
"allow_run_async",
"allow_dml",
"modified",
"allow_file_upload",
"expose_in_sqllab",
]
add_columns = [
"database_name",
"sqlalchemy_uri",
"cache_timeout",
"expose_in_sqllab",
"allow_run_async",
"allow_file_upload",
"allow_ctas",
"allow_cvas",
"allow_dml",
"force_ctas_schema",
"impersonate_user",
"extra",
"encrypted_extra",
"server_cert",
]
search_exclude_columns = (
"password",
"tables",
"created_by",
"changed_by",
"queries",
"saved_queries",
"encrypted_extra",
"server_cert",
)
edit_columns = add_columns
show_columns = [
"tables",
"cache_timeout",
"extra",
"database_name",
"sqlalchemy_uri",
"perm",
"created_by",
"created_on",
"changed_by",
"changed_on",
]
base_order = ("changed_on", "desc")
description_columns = {
"sqlalchemy_uri": utils.markdown(
"Refer to the "
"[SqlAlchemy docs]"
"(https://docs.sqlalchemy.org/en/rel_1_2/core/engines.html#"
"database-urls) "
"for more information on how to structure your URI.",
True,
),
"expose_in_sqllab": _("Expose this DB in SQL Lab"),
"allow_run_async": _(
"Operate the database in asynchronous mode, meaning "
"that the queries are executed on remote workers as opposed "
"to on the web server itself. "
"This assumes that you have a Celery worker setup as well "
"as a results backend. Refer to the installation docs "
"for more information."
),
"allow_ctas": _("Allow CREATE TABLE AS option in SQL Lab"),
"allow_cvas": _("Allow CREATE VIEW AS option in SQL Lab"),
"allow_dml": _(
"Allow users to run non-SELECT statements "
"(UPDATE, DELETE, CREATE, ...) "
"in SQL Lab"
),
"force_ctas_schema": _(
"When allowing CREATE TABLE AS option in SQL Lab, "
"this option forces the table to be created in this schema"
),
"extra": utils.markdown(
"JSON string containing extra configuration elements.<br/>"
"1. The ``engine_params`` object gets unpacked into the "
"[sqlalchemy.create_engine]"
"(https://docs.sqlalchemy.org/en/latest/core/engines.html#"
"sqlalchemy.create_engine) call, while the ``metadata_params`` "
"gets unpacked into the [sqlalchemy.MetaData]"
"(https://docs.sqlalchemy.org/en/rel_1_0/core/metadata.html"
"#sqlalchemy.schema.MetaData) call.<br/>"
"2. The ``metadata_cache_timeout`` is a cache timeout setting "
"in seconds for metadata fetch of this database. Specify it as "
'**"metadata_cache_timeout": {"schema_cache_timeout": 600, '
'"table_cache_timeout": 600}**. '
"If unset, cache will not be enabled for the functionality. "
"A timeout of 0 indicates that the cache never expires.<br/>"
"3. The ``schemas_allowed_for_file_upload`` is a comma separated list "
"of schemas that CSVs are allowed to upload to. "
'Specify it as **"schemas_allowed_for_file_upload": '
'["public", "csv_upload"]**. '
"If database flavor does not support schema or any schema is allowed "
"to be accessed, just leave the list empty<br/>"
"4. the ``version`` field is a string specifying the this db's version. "
"This should be used with Presto DBs so that the syntax is correct<br/>"
"5. The ``allows_virtual_table_explore`` field is a boolean specifying "
"whether or not the Explore button in SQL Lab results is shown<br/>"
"6. The ``disable_data_preview`` field is a boolean specifying whether or"
"not data preview queries will be run when fetching table metadata in"
"SQL Lab."
"7. The ``disable_drill_to_detail`` field is a boolean specifying whether or" # noqa: E501
"not drill to detail is disabled for the database."
"8. The ``allow_multi_catalog`` indicates if the database allows changing "
"the default catalog when running queries and creating datasets.",
True,
),
"encrypted_extra": utils.markdown(
"JSON string containing additional connection configuration.<br/>"
"This is used to provide connection information for systems like "
"Hive, Presto, and BigQuery, which do not conform to the username:password "
"syntax normally used by SQLAlchemy.",
True,
),
"server_cert": utils.markdown(
"Optional CA_BUNDLE contents to validate HTTPS requests. Only available "
"on certain database engines.",
True,
),
"impersonate_user": _(
"If Presto, all the queries in SQL Lab are going to be executed as the "
"currently logged on user who must have permission to run them.<br/>"
"If Hive and hive.server2.enable.doAs is enabled, will run the queries as "
"service account, but impersonate the currently logged on user "
"via hive.server2.proxy.user property."
),
"cache_timeout": _(
"Duration (in seconds) of the caching timeout for charts of this database. "
"A timeout of 0 indicates that the cache never expires. "
"Note this defaults to the global timeout if undefined."
),
"allow_file_upload": _(
"If selected, please set the schemas allowed for csv upload in Extra."
),
}
base_filters = [["id", DatabaseFilter, lambda: []]]
label_columns = {
"expose_in_sqllab": _("Expose in SQL Lab"),
"allow_ctas": _("Allow CREATE TABLE AS"),
"allow_cvas": _("Allow CREATE VIEW AS"),
"allow_dml": _("Allow DDL/DML"),
"force_ctas_schema": _("CTAS Schema"),
"database_name": _("Database"),
"creator": _("Creator"),
"changed_on_": _("Last Changed"),
"sqlalchemy_uri": _("SQLAlchemy URI"),
"cache_timeout": _("Chart Cache Timeout"),
"extra": _("Extra"),
"encrypted_extra": _("Secure Extra"),
"server_cert": _("Root certificate"),
"allow_run_async": _("Async Execution"),
"impersonate_user": _("Impersonate the logged on user"),
"allow_file_upload": _("Allow Csv Upload"),
"modified": _("Modified"),
"backend": _("Backend"),
}
def _pre_add_update(self, database: Database) -> None:
if app.config["PREVENT_UNSAFE_DB_CONNECTIONS"]:
check_sqlalchemy_uri(make_url_safe(database.sqlalchemy_uri))
self.check_extra(database)
self.check_encrypted_extra(database)
if database.server_cert:
utils.parse_ssl_cert(database.server_cert)
database.set_sqlalchemy_uri(database.sqlalchemy_uri)
security_manager.add_permission_view_menu("database_access", database.perm)
# add catalog/schema permissions
if database.db_engine_spec.supports_catalog:
catalogs = database.get_all_catalog_names()
for catalog in catalogs:
security_manager.add_permission_view_menu(
"catalog_access",
security_manager.get_catalog_perm(database.database_name, catalog),
)
else:
# add a dummy catalog for DBs that don't support them
catalogs = [None]
for catalog in catalogs:
for schema in database.get_all_schema_names(catalog=catalog):
security_manager.add_permission_view_menu(
"schema_access",
security_manager.get_schema_perm(
database.database_name,
catalog,
schema,
),
)
def pre_add(self, database: Database) -> None:
self._pre_add_update(database)
def pre_update(self, database: Database) -> None:
self._pre_add_update(database)
def pre_delete(self, database: Database) -> None:
if database.tables:
raise SupersetException(
Markup(
"Cannot delete a database that has tables attached. "
"Here's the list of associated tables: "
+ ", ".join(f"{table}" for table in database.tables)
)
)
def check_extra(self, database: Database) -> None:
# this will check whether json.loads(extra) can succeed
try:
extra = database.get_extra()
except Exception as ex:
raise Exception( # pylint: disable=broad-exception-raised
_("Extra field cannot be decoded by JSON. %(msg)s", msg=str(ex))
) from ex
# this will check whether 'metadata_params' is configured correctly
metadata_signature = inspect.signature(MetaData)
for key in extra.get("metadata_params", {}):
if key not in metadata_signature.parameters:
raise Exception( # pylint: disable=broad-exception-raised
_(
"The metadata_params in Extra field "
"is not configured correctly. The key "
"%{key}s is invalid.",
key=key,
)
)
def check_encrypted_extra(self, database: Database) -> None:
# this will check whether json.loads(secure_extra) can succeed
try:
database.get_encrypted_extra()
except Exception as ex:
raise Exception( # pylint: disable=broad-exception-raised
_("Extra field cannot be decoded by JSON. %(msg)s", msg=str(ex))
) from ex

View File

@ -17,27 +17,12 @@
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from flask_appbuilder import expose from flask_appbuilder import expose
from flask_appbuilder.models.sqla.interface import SQLAInterface
from flask_appbuilder.security.decorators import has_access from flask_appbuilder.security.decorators import has_access
from flask_babel import lazy_gettext as _
from wtforms.fields import StringField
from wtforms.validators import ValidationError
import superset.models.core as models
from superset import app from superset import app
from superset.constants import MODEL_VIEW_RW_METHOD_PERMISSION_MAP, RouteMethod from superset.constants import MODEL_VIEW_RW_METHOD_PERMISSION_MAP
from superset.exceptions import CertificateException
from superset.superset_typing import FlaskResponse from superset.superset_typing import FlaskResponse
from superset.utils import core as utils from superset.views.base import BaseSupersetView
from superset.views.base import (
DeleteMixin,
DeprecateModelViewMixin,
SupersetModelView,
YamlExportMixin,
)
from .mixins import DatabaseMixin
from .validators import sqlalchemy_uri_validator
if TYPE_CHECKING: if TYPE_CHECKING:
from werkzeug.datastructures import FileStorage from werkzeug.datastructures import FileStorage
@ -46,25 +31,6 @@ config = app.config
stats_logger = config["STATS_LOGGER"] stats_logger = config["STATS_LOGGER"]
def sqlalchemy_uri_form_validator(_: _, field: StringField) -> None:
"""
Check if user has submitted a valid SQLAlchemy URI
"""
sqlalchemy_uri_validator(field.data, exception=ValidationError)
def certificate_form_validator(_: _, field: StringField) -> None:
"""
Check if user has submitted a valid SSL certificate
"""
if field.data:
try:
utils.parse_ssl_cert(field.data)
except CertificateException as ex:
raise ValidationError(ex.message) from ex
def upload_stream_write(form_file_field: "FileStorage", path: str) -> None: def upload_stream_write(form_file_field: "FileStorage", path: str) -> None:
chunk_size = app.config["UPLOAD_CHUNK_SIZE"] chunk_size = app.config["UPLOAD_CHUNK_SIZE"]
with open(path, "bw") as file_description: with open(path, "bw") as file_description:
@ -75,29 +41,10 @@ def upload_stream_write(form_file_field: "FileStorage", path: str) -> None:
file_description.write(chunk) file_description.write(chunk)
class DatabaseView( class DatabaseView(BaseSupersetView):
DeprecateModelViewMixin,
DatabaseMixin,
SupersetModelView,
DeleteMixin,
YamlExportMixin,
): # pylint: disable=too-many-ancestors
datamodel = SQLAInterface(models.Database)
class_permission_name = "Database" class_permission_name = "Database"
method_permission_name = MODEL_VIEW_RW_METHOD_PERMISSION_MAP method_permission_name = MODEL_VIEW_RW_METHOD_PERMISSION_MAP
include_route_methods = RouteMethod.CRUD_SET
add_template = "superset/models/database/add.html"
edit_template = "superset/models/database/edit.html"
validators_columns = {
"sqlalchemy_uri": [sqlalchemy_uri_form_validator],
"server_cert": [certificate_form_validator],
}
yaml_dict_key = "databases"
@expose("/list/") @expose("/list/")
@has_access @has_access
def list(self) -> FlaskResponse: def list(self) -> FlaskResponse:

View File

@ -19,30 +19,25 @@ import logging
from flask import request, Response from flask import request, Response
from flask_appbuilder import expose from flask_appbuilder import expose
from flask_appbuilder.models.sqla.interface import SQLAInterface
from flask_appbuilder.security.decorators import has_access, has_access_api from flask_appbuilder.security.decorators import has_access, has_access_api
from flask_babel import gettext as __ from flask_babel import gettext as __
from sqlalchemy import and_ from sqlalchemy import and_
from superset import db from superset import db
from superset.constants import MODEL_VIEW_RW_METHOD_PERMISSION_MAP, RouteMethod from superset.models.sql_lab import Query, TableSchema, TabState
from superset.models.sql_lab import Query, SavedQuery, TableSchema, TabState
from superset.superset_typing import FlaskResponse from superset.superset_typing import FlaskResponse
from superset.utils import json from superset.utils import json
from superset.utils.core import error_msg_from_exception, get_user_id from superset.utils.core import error_msg_from_exception, get_user_id
from superset.views.base import ( from superset.views.base import (
BaseSupersetView, BaseSupersetView,
DeleteMixin,
DeprecateModelViewMixin,
json_error_response, json_error_response,
json_success, json_success,
SupersetModelView,
) )
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class SavedQueryView(DeprecateModelViewMixin, BaseSupersetView): class SavedQueryView(BaseSupersetView):
route_base = "/savedqueryview" route_base = "/savedqueryview"
class_permission_name = "SavedQuery" class_permission_name = "SavedQuery"
@ -52,31 +47,6 @@ class SavedQueryView(DeprecateModelViewMixin, BaseSupersetView):
return super().render_app_template() return super().render_app_template()
class SavedQueryViewApi(DeprecateModelViewMixin, SupersetModelView, DeleteMixin): # pylint: disable=too-many-ancestors
datamodel = SQLAInterface(SavedQuery)
include_route_methods = RouteMethod.CRUD_SET
route_base = "/savedqueryviewapi"
class_permission_name = "SavedQuery"
include_route_methods = {
RouteMethod.API_READ,
RouteMethod.API_CREATE,
RouteMethod.API_UPDATE,
RouteMethod.API_GET,
}
method_permission_name = MODEL_VIEW_RW_METHOD_PERMISSION_MAP
add_columns = ["label", "db_id", "schema", "description", "sql", "extra_json"]
edit_columns = add_columns
show_columns = add_columns + ["id"]
@has_access_api
@expose("show/<pk>")
def show(self, pk: int) -> FlaskResponse:
return super().show(pk)
def _get_owner_id(tab_state_id: int) -> int: def _get_owner_id(tab_state_id: int) -> int:
return db.session.query(TabState.user_id).filter_by(id=tab_state_id).scalar() return db.session.query(TabState.user_id).filter_by(id=tab_state_id).scalar()

View File

@ -52,7 +52,6 @@ from superset.sql_parse import Table
from superset.utils import core as utils, json from superset.utils import core as utils, json
from superset.utils.core import backend from superset.utils.core import backend
from superset.utils.database import get_example_database from superset.utils.database import get_example_database
from superset.views.database.views import DatabaseView
from tests.integration_tests.constants import ADMIN_USERNAME, GAMMA_USERNAME from tests.integration_tests.constants import ADMIN_USERNAME, GAMMA_USERNAME
from tests.integration_tests.fixtures.birth_names_dashboard import ( from tests.integration_tests.fixtures.birth_names_dashboard import (
load_birth_names_dashboard_with_slices, # noqa: F401 load_birth_names_dashboard_with_slices, # noqa: F401
@ -270,13 +269,6 @@ class TestCore(SupersetTestCase):
resp = self.client.get(url) resp = self.client.get(url)
assert resp.status_code == 200 assert resp.status_code == 200
def test_get_user_slices(self):
self.login(ADMIN_USERNAME)
userid = security_manager.find_user("admin").id
url = f"/sliceasync/api/read?_flt_0_created_by={userid}"
resp = self.client.get(url)
assert resp.status_code == 200
@pytest.mark.usefixtures("load_energy_table_with_slice") @pytest.mark.usefixtures("load_energy_table_with_slice")
def test_slices_V2(self): # noqa: N802 def test_slices_V2(self): # noqa: N802
# Add explore-v2-beta role to admin user # Add explore-v2-beta role to admin user
@ -328,25 +320,6 @@ class TestCore(SupersetTestCase):
# Disable for password store for later tests # Disable for password store for later tests
models.custom_password_store = None models.custom_password_store = None
def test_databaseview_edit(self):
# validate that sending a password-masked uri does not over-write the decrypted
# uri
self.login(ADMIN_USERNAME)
database = superset.utils.database.get_example_database()
sqlalchemy_uri_decrypted = database.sqlalchemy_uri_decrypted
url = f"databaseview/edit/{database.id}"
data = {k: database.__getattribute__(k) for k in DatabaseView.add_columns}
data["sqlalchemy_uri"] = database.safe_sqlalchemy_uri()
self.client.post(url, data=data)
database = superset.utils.database.get_example_database()
assert sqlalchemy_uri_decrypted == database.sqlalchemy_uri_decrypted
# Need to clean up after ourselves
database.impersonate_user = False
database.allow_dml = False
database.allow_run_async = False
db.session.commit()
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_warm_up_cache_error(self) -> None: def test_warm_up_cache_error(self) -> None:
self.login(ADMIN_USERNAME) self.login(ADMIN_USERNAME)

View File

@ -1818,35 +1818,6 @@ class TestDatabaseApi(SupersetTestCase):
db.session.delete(database) db.session.delete(database)
db.session.commit() db.session.commit()
def mock_empty_csv_function(d, user): # noqa: N805
return []
@mock.patch(
"superset.views.core.app.config",
{**app.config, "ALLOWED_USER_CSV_SCHEMA_FUNC": mock_empty_csv_function},
)
def test_get_allow_file_upload_false_csv(self):
"""
Database API: Test filter for allow file upload checks for schemas.
Both databases have false allow_file_upload
"""
with self.create_app().app_context():
self.login(ADMIN_USERNAME)
arguments = {
"columns": ["allow_file_upload"],
"filters": [
{
"col": "allow_file_upload",
"opr": "upload_is_enabled",
"value": True,
}
],
}
uri = f"api/v1/database/?q={prison.dumps(arguments)}"
rv = self.client.get(uri)
data = json.loads(rv.data.decode("utf-8"))
assert data["count"] == 1
def test_get_allow_file_upload_filter_no_permission(self): def test_get_allow_file_upload_filter_no_permission(self):
""" """
Database API: Test filter for allow file upload checks for schemas Database API: Test filter for allow file upload checks for schemas

View File

@ -27,7 +27,6 @@ from superset import db
from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn
from superset.utils.database import get_example_database from superset.utils.database import get_example_database
from superset.utils.dict_import_export import export_to_dict
from superset.utils import json from superset.utils import json
from .base_tests import SupersetTestCase from .base_tests import SupersetTestCase
@ -248,28 +247,6 @@ class TestDictImportExport(SupersetTestCase):
imported_copy_table.export_to_dict(), imported_table.export_to_dict() imported_copy_table.export_to_dict(), imported_table.export_to_dict()
) )
def test_export_datasource_ui_cli(self):
# TODO(bkyryliuk): find fake db is leaking from
self.delete_fake_db()
cli_export = export_to_dict(
recursive=True,
back_references=False,
include_defaults=False,
)
self.get_resp("/login/", data=dict(username="admin", password="general")) # noqa: S106, C408
resp = self.get_resp(
"/databaseview/action_post", {"action": "yaml_export", "rowid": 1}
)
ui_export = yaml.safe_load(resp)
assert (
ui_export["databases"][0]["database_name"]
== cli_export["databases"][0]["database_name"]
)
assert (
ui_export["databases"][0]["tables"] == cli_export["databases"][0]["tables"]
)
if __name__ == "__main__": if __name__ == "__main__":
unittest.main() unittest.main()

View File

@ -1,16 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

View File

@ -1,16 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

View File

@ -1,65 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from pytest_mock import MockerFixture
from superset.views.database.mixins import DatabaseMixin
def test_pre_add_update_with_catalog(mocker: MockerFixture) -> None:
"""
Test the `_pre_add_update` method on a DB with catalog support.
"""
from superset.models.core import Database
add_permission_view_menu = mocker.patch(
"superset.views.database.mixins.security_manager.add_permission_view_menu"
)
database = Database(
database_name="my_db",
id=42,
sqlalchemy_uri="postgresql://user:password@host:5432/examples",
)
mocker.patch.object(
database,
"get_all_catalog_names",
return_value=["examples", "other"],
)
mocker.patch.object(
database,
"get_all_schema_names",
side_effect=[
["public", "information_schema"],
["secret"],
],
)
mixin = DatabaseMixin()
mixin._pre_add_update(database)
add_permission_view_menu.assert_has_calls(
[
mocker.call("database_access", "[my_db].(id:42)"),
mocker.call("catalog_access", "[my_db].[examples]"),
mocker.call("catalog_access", "[my_db].[other]"),
mocker.call("schema_access", "[my_db].[examples].[public]"),
mocker.call("schema_access", "[my_db].[examples].[information_schema]"),
mocker.call("schema_access", "[my_db].[other].[secret]"),
],
any_order=True,
)