diff --git a/superset/views/database/__init__.py b/superset/views/database/__init__.py
index 98227ed95..13a83393a 100644
--- a/superset/views/database/__init__.py
+++ b/superset/views/database/__init__.py
@@ -14,260 +14,3 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
-# pylint: disable=C,R,W
-import inspect
-from typing import Type
-
-from flask import Markup
-from flask_babel import lazy_gettext as _
-from marshmallow import ValidationError
-from sqlalchemy import MetaData
-from sqlalchemy.engine.url import make_url
-from sqlalchemy.exc import ArgumentError
-
-from superset import security_manager
-from superset.exceptions import SupersetException
-from superset.utils import core as utils
-from superset.views.base import SupersetFilter
-
-
-def sqlalchemy_uri_validator(
- uri: str, exception: Type[ValidationError] = ValidationError
-) -> None:
- """
- Check if a user has submitted a valid SQLAlchemy URI
- """
- try:
- make_url(uri.strip())
- except (ArgumentError, AttributeError):
- raise exception(
- _(
- "Invalid connnection string, a valid string follows: "
- " 'DRIVER://USER:PASSWORD@DB-HOST/DATABASE-NAME'"
- "
Example:'postgresql://user:password@your-postgres-db/database'
"
- )
- )
-
-
-class DatabaseFilter(SupersetFilter):
- def apply(self, query, func):
- if security_manager.all_database_access():
- return query
- perms = self.get_view_menus("database_access")
- return query.filter(self.model.perm.in_(perms))
-
-
-class DatabaseMixin:
- list_title = _("Databases")
- show_title = _("Show Database")
- add_title = _("Add Database")
- edit_title = _("Edit Database")
-
- list_columns = [
- "database_name",
- "backend",
- "allow_run_async",
- "allow_dml",
- "allow_csv_upload",
- "expose_in_sqllab",
- "creator",
- "modified",
- ]
- order_columns = [
- "database_name",
- "allow_run_async",
- "allow_dml",
- "modified",
- "allow_csv_upload",
- "expose_in_sqllab",
- ]
- add_columns = [
- "database_name",
- "sqlalchemy_uri",
- "cache_timeout",
- "expose_in_sqllab",
- "allow_run_async",
- "allow_csv_upload",
- "allow_ctas",
- "allow_dml",
- "force_ctas_schema",
- "impersonate_user",
- "allow_multi_schema_metadata_fetch",
- "extra",
- "encrypted_extra",
- ]
- search_exclude_columns = (
- "password",
- "tables",
- "created_by",
- "changed_by",
- "queries",
- "saved_queries",
- "encrypted_extra",
- )
- edit_columns = add_columns
- show_columns = [
- "tables",
- "cache_timeout",
- "extra",
- "database_name",
- "sqlalchemy_uri",
- "perm",
- "created_by",
- "created_on",
- "changed_by",
- "changed_on",
- ]
- base_order = ("changed_on", "desc")
- description_columns = {
- "sqlalchemy_uri": utils.markdown(
- "Refer to the "
- "[SqlAlchemy docs]"
- "(https://docs.sqlalchemy.org/en/rel_1_2/core/engines.html#"
- "database-urls) "
- "for more information on how to structure your URI.",
- True,
- ),
- "expose_in_sqllab": _("Expose this DB in SQL Lab"),
- "allow_run_async": _(
- "Operate the database in asynchronous mode, meaning "
- "that the queries are executed on remote workers as opposed "
- "to on the web server itself. "
- "This assumes that you have a Celery worker setup as well "
- "as a results backend. Refer to the installation docs "
- "for more information."
- ),
- "allow_ctas": _("Allow CREATE TABLE AS option in SQL Lab"),
- "allow_dml": _(
- "Allow users to run non-SELECT statements "
- "(UPDATE, DELETE, CREATE, ...) "
- "in SQL Lab"
- ),
- "force_ctas_schema": _(
- "When allowing CREATE TABLE AS option in SQL Lab, "
- "this option forces the table to be created in this schema"
- ),
- "extra": utils.markdown(
- "JSON string containing extra configuration elements.
"
- "1. The ``engine_params`` object gets unpacked into the "
- "[sqlalchemy.create_engine]"
- "(https://docs.sqlalchemy.org/en/latest/core/engines.html#"
- "sqlalchemy.create_engine) call, while the ``metadata_params`` "
- "gets unpacked into the [sqlalchemy.MetaData]"
- "(https://docs.sqlalchemy.org/en/rel_1_0/core/metadata.html"
- "#sqlalchemy.schema.MetaData) call.
"
- "2. The ``metadata_cache_timeout`` is a cache timeout setting "
- "in seconds for metadata fetch of this database. Specify it as "
- '**"metadata_cache_timeout": {"schema_cache_timeout": 600, '
- '"table_cache_timeout": 600}**. '
- "If unset, cache will not be enabled for the functionality. "
- "A timeout of 0 indicates that the cache never expires.
"
- "3. The ``schemas_allowed_for_csv_upload`` is a comma separated list "
- "of schemas that CSVs are allowed to upload to. "
- 'Specify it as **"schemas_allowed_for_csv_upload": '
- '["public", "csv_upload"]**. '
- "If database flavor does not support schema or any schema is allowed "
- "to be accessed, just leave the list empty"
- "4. the ``version`` field is a string specifying the this db's version. "
- "This should be used with Presto DBs so that the syntax is correct",
- True,
- ),
- "encrypted_extra": utils.markdown(
- "JSON string containing additional connection configuration.
"
- "This is used to provide connection information for systems like "
- "Hive, Presto, and BigQuery, which do not conform to the username:password "
- "syntax normally used by SQLAlchemy.",
- True,
- ),
- "impersonate_user": _(
- "If Presto, all the queries in SQL Lab are going to be executed as the "
- "currently logged on user who must have permission to run them.
"
- "If Hive and hive.server2.enable.doAs is enabled, will run the queries as "
- "service account, but impersonate the currently logged on user "
- "via hive.server2.proxy.user property."
- ),
- "allow_multi_schema_metadata_fetch": _(
- "Allow SQL Lab to fetch a list of all tables and all views across "
- "all database schemas. For large data warehouse with thousands of "
- "tables, this can be expensive and put strain on the system."
- ),
- "cache_timeout": _(
- "Duration (in seconds) of the caching timeout for charts of this database. "
- "A timeout of 0 indicates that the cache never expires. "
- "Note this defaults to the global timeout if undefined."
- ),
- "allow_csv_upload": _(
- "If selected, please set the schemas allowed for csv upload in Extra."
- ),
- }
- base_filters = [["id", DatabaseFilter, lambda: []]]
- label_columns = {
- "expose_in_sqllab": _("Expose in SQL Lab"),
- "allow_ctas": _("Allow CREATE TABLE AS"),
- "allow_dml": _("Allow DML"),
- "force_ctas_schema": _("CTAS Schema"),
- "database_name": _("Database"),
- "creator": _("Creator"),
- "changed_on_": _("Last Changed"),
- "sqlalchemy_uri": _("SQLAlchemy URI"),
- "cache_timeout": _("Chart Cache Timeout"),
- "extra": _("Extra"),
- "encrypted_extra": _("Secure Extra"),
- "allow_run_async": _("Asynchronous Query Execution"),
- "impersonate_user": _("Impersonate the logged on user"),
- "allow_csv_upload": _("Allow Csv Upload"),
- "modified": _("Modified"),
- "allow_multi_schema_metadata_fetch": _("Allow Multi Schema Metadata Fetch"),
- "backend": _("Backend"),
- }
-
- def _pre_add_update(self, db):
- self.check_extra(db)
- self.check_encrypted_extra(db)
- db.set_sqlalchemy_uri(db.sqlalchemy_uri)
- security_manager.add_permission_view_menu("database_access", db.perm)
- # adding a new database we always want to force refresh schema list
- for schema in db.get_all_schema_names():
- security_manager.add_permission_view_menu(
- "schema_access", security_manager.get_schema_perm(db, schema)
- )
-
- def pre_add(self, db):
- self._pre_add_update(db)
-
- def pre_update(self, db):
- self._pre_add_update(db)
-
- def pre_delete(self, obj):
- if obj.tables:
- raise SupersetException(
- Markup(
- "Cannot delete a database that has tables attached. "
- "Here's the list of associated tables: "
- + ", ".join("{}".format(o) for o in obj.tables)
- )
- )
-
- def check_extra(self, db):
- # this will check whether json.loads(extra) can succeed
- try:
- extra = db.get_extra()
- except Exception as e:
- raise Exception("Extra field cannot be decoded by JSON. {}".format(str(e)))
-
- # this will check whether 'metadata_params' is configured correctly
- metadata_signature = inspect.signature(MetaData)
- for key in extra.get("metadata_params", {}):
- if key not in metadata_signature.parameters:
- raise Exception(
- "The metadata_params in Extra field "
- "is not configured correctly. The key "
- "{} is invalid.".format(key)
- )
-
- def check_encrypted_extra(self, db):
- # this will check whether json.loads(secure_extra) can succeed
- try:
- extra = db.get_encrypted_extra()
- except Exception as e:
- raise Exception(f"Secure Extra field cannot be decoded as JSON. {str(e)}")
diff --git a/superset/views/database/api.py b/superset/views/database/api.py
index 54448b771..e25061559 100644
--- a/superset/views/database/api.py
+++ b/superset/views/database/api.py
@@ -20,7 +20,8 @@ from flask_appbuilder.models.sqla.interface import SQLAInterface
import superset.models.core as models
from superset import appbuilder
-from . import DatabaseFilter, DatabaseMixin, sqlalchemy_uri_validator
+from .mixins import DatabaseFilter, DatabaseMixin
+from .validators import sqlalchemy_uri_validator
class DatabaseRestApi(DatabaseMixin, ModelRestApi):
diff --git a/superset/views/database/forms.py b/superset/views/database/forms.py
index 57144bc1c..4ae1abbde 100644
--- a/superset/views/database/forms.py
+++ b/superset/views/database/forms.py
@@ -14,7 +14,6 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
-# pylint: disable=C,R,W
"""Contains the logic to create cohesive forms on the explore view"""
from flask_appbuilder.fieldwidgets import BS3TextFieldWidget
from flask_appbuilder.forms import DynamicForm
diff --git a/superset/views/database/mixins.py b/superset/views/database/mixins.py
new file mode 100644
index 000000000..ff8dc1e9e
--- /dev/null
+++ b/superset/views/database/mixins.py
@@ -0,0 +1,250 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import inspect
+
+from flask import Markup
+from flask_babel import lazy_gettext as _
+from sqlalchemy import MetaData
+
+from superset import security_manager
+from superset.exceptions import SupersetException
+from superset.utils import core as utils
+from superset.views.base import SupersetFilter
+
+
+class DatabaseFilter(SupersetFilter):
+ def apply(self, query, value):
+ if security_manager.all_database_access():
+ return query
+ perms = self.get_view_menus("database_access")
+ return query.filter(self.model.perm.in_(perms))
+
+
+class DatabaseMixin:
+ list_title = _("Databases")
+ show_title = _("Show Database")
+ add_title = _("Add Database")
+ edit_title = _("Edit Database")
+
+ list_columns = [
+ "database_name",
+ "backend",
+ "allow_run_async",
+ "allow_dml",
+ "allow_csv_upload",
+ "expose_in_sqllab",
+ "creator",
+ "modified",
+ ]
+ order_columns = [
+ "database_name",
+ "allow_run_async",
+ "allow_dml",
+ "modified",
+ "allow_csv_upload",
+ "expose_in_sqllab",
+ ]
+ add_columns = [
+ "database_name",
+ "sqlalchemy_uri",
+ "cache_timeout",
+ "expose_in_sqllab",
+ "allow_run_async",
+ "allow_csv_upload",
+ "allow_ctas",
+ "allow_dml",
+ "force_ctas_schema",
+ "impersonate_user",
+ "allow_multi_schema_metadata_fetch",
+ "extra",
+ "encrypted_extra",
+ ]
+ search_exclude_columns = (
+ "password",
+ "tables",
+ "created_by",
+ "changed_by",
+ "queries",
+ "saved_queries",
+ "encrypted_extra",
+ )
+ edit_columns = add_columns
+ show_columns = [
+ "tables",
+ "cache_timeout",
+ "extra",
+ "database_name",
+ "sqlalchemy_uri",
+ "perm",
+ "created_by",
+ "created_on",
+ "changed_by",
+ "changed_on",
+ ]
+ base_order = ("changed_on", "desc")
+ description_columns = {
+ "sqlalchemy_uri": utils.markdown(
+ "Refer to the "
+ "[SqlAlchemy docs]"
+ "(https://docs.sqlalchemy.org/en/rel_1_2/core/engines.html#"
+ "database-urls) "
+ "for more information on how to structure your URI.",
+ True,
+ ),
+ "expose_in_sqllab": _("Expose this DB in SQL Lab"),
+ "allow_run_async": _(
+ "Operate the database in asynchronous mode, meaning "
+ "that the queries are executed on remote workers as opposed "
+ "to on the web server itself. "
+ "This assumes that you have a Celery worker setup as well "
+ "as a results backend. Refer to the installation docs "
+ "for more information."
+ ),
+ "allow_ctas": _("Allow CREATE TABLE AS option in SQL Lab"),
+ "allow_dml": _(
+ "Allow users to run non-SELECT statements "
+ "(UPDATE, DELETE, CREATE, ...) "
+ "in SQL Lab"
+ ),
+ "force_ctas_schema": _(
+ "When allowing CREATE TABLE AS option in SQL Lab, "
+ "this option forces the table to be created in this schema"
+ ),
+ "extra": utils.markdown(
+ "JSON string containing extra configuration elements.
"
+ "1. The ``engine_params`` object gets unpacked into the "
+ "[sqlalchemy.create_engine]"
+ "(https://docs.sqlalchemy.org/en/latest/core/engines.html#"
+ "sqlalchemy.create_engine) call, while the ``metadata_params`` "
+ "gets unpacked into the [sqlalchemy.MetaData]"
+ "(https://docs.sqlalchemy.org/en/rel_1_0/core/metadata.html"
+ "#sqlalchemy.schema.MetaData) call.
"
+ "2. The ``metadata_cache_timeout`` is a cache timeout setting "
+ "in seconds for metadata fetch of this database. Specify it as "
+ '**"metadata_cache_timeout": {"schema_cache_timeout": 600, '
+ '"table_cache_timeout": 600}**. '
+ "If unset, cache will not be enabled for the functionality. "
+ "A timeout of 0 indicates that the cache never expires.
"
+ "3. The ``schemas_allowed_for_csv_upload`` is a comma separated list "
+ "of schemas that CSVs are allowed to upload to. "
+ 'Specify it as **"schemas_allowed_for_csv_upload": '
+ '["public", "csv_upload"]**. '
+ "If database flavor does not support schema or any schema is allowed "
+ "to be accessed, just leave the list empty"
+ "4. the ``version`` field is a string specifying the this db's version. "
+ "This should be used with Presto DBs so that the syntax is correct",
+ True,
+ ),
+ "encrypted_extra": utils.markdown(
+ "JSON string containing additional connection configuration.
"
+ "This is used to provide connection information for systems like "
+ "Hive, Presto, and BigQuery, which do not conform to the username:password "
+ "syntax normally used by SQLAlchemy.",
+ True,
+ ),
+ "impersonate_user": _(
+ "If Presto, all the queries in SQL Lab are going to be executed as the "
+ "currently logged on user who must have permission to run them.
"
+ "If Hive and hive.server2.enable.doAs is enabled, will run the queries as "
+ "service account, but impersonate the currently logged on user "
+ "via hive.server2.proxy.user property."
+ ),
+ "allow_multi_schema_metadata_fetch": _(
+ "Allow SQL Lab to fetch a list of all tables and all views across "
+ "all database schemas. For large data warehouse with thousands of "
+ "tables, this can be expensive and put strain on the system."
+ ),
+ "cache_timeout": _(
+ "Duration (in seconds) of the caching timeout for charts of this database. "
+ "A timeout of 0 indicates that the cache never expires. "
+ "Note this defaults to the global timeout if undefined."
+ ),
+ "allow_csv_upload": _(
+ "If selected, please set the schemas allowed for csv upload in Extra."
+ ),
+ }
+ base_filters = [["id", DatabaseFilter, lambda: []]]
+ label_columns = {
+ "expose_in_sqllab": _("Expose in SQL Lab"),
+ "allow_ctas": _("Allow CREATE TABLE AS"),
+ "allow_dml": _("Allow DML"),
+ "force_ctas_schema": _("CTAS Schema"),
+ "database_name": _("Database"),
+ "creator": _("Creator"),
+ "changed_on_": _("Last Changed"),
+ "sqlalchemy_uri": _("SQLAlchemy URI"),
+ "cache_timeout": _("Chart Cache Timeout"),
+ "extra": _("Extra"),
+ "encrypted_extra": _("Secure Extra"),
+ "allow_run_async": _("Asynchronous Query Execution"),
+ "impersonate_user": _("Impersonate the logged on user"),
+ "allow_csv_upload": _("Allow Csv Upload"),
+ "modified": _("Modified"),
+ "allow_multi_schema_metadata_fetch": _("Allow Multi Schema Metadata Fetch"),
+ "backend": _("Backend"),
+ }
+
+ def _pre_add_update(self, database):
+ self.check_extra(database)
+ self.check_encrypted_extra(database)
+ database.set_sqlalchemy_uri(database.sqlalchemy_uri)
+ security_manager.add_permission_view_menu("database_access", database.perm)
+ # adding a new database we always want to force refresh schema list
+ for schema in database.get_all_schema_names():
+ security_manager.add_permission_view_menu(
+ "schema_access", security_manager.get_schema_perm(database, schema)
+ )
+
+ def pre_add(self, database):
+ self._pre_add_update(database)
+
+ def pre_update(self, database):
+ self._pre_add_update(database)
+
+ def pre_delete(self, obj): # pylint: disable=no-self-use
+ if obj.tables:
+ raise SupersetException(
+ Markup(
+ "Cannot delete a database that has tables attached. "
+ "Here's the list of associated tables: "
+ + ", ".join("{}".format(o) for o in obj.tables)
+ )
+ )
+
+ def check_extra(self, database): # pylint: disable=no-self-use
+ # this will check whether json.loads(extra) can succeed
+ try:
+ extra = database.get_extra()
+ except Exception as e:
+ raise Exception("Extra field cannot be decoded by JSON. {}".format(str(e)))
+
+ # this will check whether 'metadata_params' is configured correctly
+ metadata_signature = inspect.signature(MetaData)
+ for key in extra.get("metadata_params", {}):
+ if key not in metadata_signature.parameters:
+ raise Exception(
+ "The metadata_params in Extra field "
+ "is not configured correctly. The key "
+ "{} is invalid.".format(key)
+ )
+
+ def check_encrypted_extra(self, database): # pylint: disable=no-self-use
+ # this will check whether json.loads(secure_extra) can succeed
+ try:
+ database.get_encrypted_extra()
+ except Exception as e:
+ raise Exception(f"Secure Extra field cannot be decoded as JSON. {str(e)}")
diff --git a/superset/views/database/validators.py b/superset/views/database/validators.py
new file mode 100644
index 000000000..07ddc2410
--- /dev/null
+++ b/superset/views/database/validators.py
@@ -0,0 +1,55 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from typing import Type
+
+from flask_babel import lazy_gettext as _
+from marshmallow import ValidationError
+from sqlalchemy.engine.url import make_url
+from sqlalchemy.exc import ArgumentError
+
+from superset import security_manager
+
+
+def sqlalchemy_uri_validator(
+ uri: str, exception: Type[ValidationError] = ValidationError
+) -> None:
+ """
+ Check if a user has submitted a valid SQLAlchemy URI
+ """
+ try:
+ make_url(uri.strip())
+ except (ArgumentError, AttributeError):
+ raise exception(
+ _(
+ "Invalid connnection string, a valid string follows: "
+ " 'DRIVER://USER:PASSWORD@DB-HOST/DATABASE-NAME'"
+ " Example:'postgresql://user:password@your-postgres-db/database'
"
+ )
+ )
+
+
+def schema_allows_csv_upload(database, schema):
+ if not database.allow_csv_upload:
+ return False
+ schemas = database.get_schema_access_for_csv_upload()
+ if schemas:
+ return schema in schemas
+ return (
+ security_manager.database_access(database)
+ or security_manager.all_datasource_access()
+ )
diff --git a/superset/views/database/views.py b/superset/views/database/views.py
index edbc6e62f..9cafe07d3 100644
--- a/superset/views/database/views.py
+++ b/superset/views/database/views.py
@@ -14,40 +14,40 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
-# pylint: disable=C,R,W
import os
from flask import flash, g, redirect
from flask_appbuilder import SimpleFormView
-from flask_appbuilder.forms import DynamicForm
from flask_appbuilder.models.sqla.interface import SQLAInterface
from flask_babel import gettext as __, lazy_gettext as _
-from sqlalchemy.exc import IntegrityError
from werkzeug.utils import secure_filename
from wtforms.fields import StringField
from wtforms.validators import ValidationError
import superset.models.core as models
-from superset import app, appbuilder, db, security_manager
+from superset import app, appbuilder, db
from superset.connectors.sqla.models import SqlaTable
from superset.utils import core as utils
from superset.views.base import DeleteMixin, SupersetModelView, YamlExportMixin
-from . import DatabaseMixin, sqlalchemy_uri_validator
from .forms import CsvToDatabaseForm
+from .mixins import DatabaseMixin
+from .validators import schema_allows_csv_upload, sqlalchemy_uri_validator
config = app.config
stats_logger = config["STATS_LOGGER"]
-def sqlalchemy_uri_form_validator(form: DynamicForm, field: StringField) -> None:
+def sqlalchemy_uri_form_validator(_, field: StringField) -> None:
"""
Check if user has submitted a valid SQLAlchemy URI
"""
sqlalchemy_uri_validator(field.data, exception=ValidationError)
-class DatabaseView(DatabaseMixin, SupersetModelView, DeleteMixin, YamlExportMixin):
+class DatabaseView(
+ DatabaseMixin, SupersetModelView, DeleteMixin, YamlExportMixin
+): # pylint: disable=too-many-ancestors
datamodel = SQLAInterface(models.Database)
add_template = "superset/models/database/add.html"
@@ -102,7 +102,7 @@ class CsvToDatabaseView(SimpleFormView):
database = form.con.data
schema_name = form.schema.data or ""
- if not self.is_schema_allowed(database, schema_name):
+ if not schema_allows_csv_upload(database, schema_name):
message = _(
'Database "%(database_name)s" schema "%(schema_name)s" '
"is not allowed for csv uploads. Please contact your Superset Admin.",
@@ -147,7 +147,7 @@ class CsvToDatabaseView(SimpleFormView):
table.fetch_metadata()
db.session.add(table)
db.session.commit()
- except Exception as e:
+ except Exception as e: # pylint: disable=broad-except
db.session.rollback()
try:
os.remove(path)
@@ -180,29 +180,18 @@ class CsvToDatabaseView(SimpleFormView):
stats_logger.incr("successful_csv_upload")
return redirect("/tablemodelview/list/")
- def is_schema_allowed(self, database, schema):
- if not database.allow_csv_upload:
- return False
- schemas = database.get_schema_access_for_csv_upload()
- if schemas:
- return schema in schemas
- return (
- security_manager.database_access(database)
- or security_manager.all_datasource_access()
- )
-
appbuilder.add_view_no_menu(CsvToDatabaseView)
-class DatabaseTablesAsync(DatabaseView):
+class DatabaseTablesAsync(DatabaseView): # pylint: disable=too-many-ancestors
list_columns = ["id", "all_table_names_in_database", "all_schema_names"]
appbuilder.add_view_no_menu(DatabaseTablesAsync)
-class DatabaseAsync(DatabaseView):
+class DatabaseAsync(DatabaseView): # pylint: disable=too-many-ancestors
list_columns = [
"id",
"database_name",