diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index a476dc3ab..b6b0ad85c 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -19,25 +19,6 @@ repos:
rev: v0.2.2
hooks:
- id: auto-walrus
- - repo: https://github.com/asottile/pyupgrade
- rev: v3.4.0
- hooks:
- - id: pyupgrade
- exclude: scripts/change_detector.py
- args:
- - --py39-plus
- - repo: https://github.com/hadialqattan/pycln
- rev: v2.1.2
- hooks:
- - id: pycln
- args:
- - --disable-all-dunder-policy
- - --exclude=superset/config.py
- - --extend-exclude=tests/integration_tests/superset_test_config.*.py
- - repo: https://github.com/PyCQA/isort
- rev: 5.12.0
- hooks:
- - id: isort
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.3.0
hooks:
@@ -73,11 +54,6 @@ repos:
- id: end-of-file-fixer
- id: trailing-whitespace
args: ["--markdown-linebreak-ext=md"]
- - repo: https://github.com/psf/black
- rev: 23.1.0
- hooks:
- - id: black
- language_version: python3
- repo: https://github.com/pre-commit/mirrors-prettier
rev: v3.1.0 # Use the sha or tag you want to point at
hooks:
@@ -95,3 +71,9 @@ repos:
hooks:
- id: helm-docs
files: helm
+ - repo: https://github.com/astral-sh/ruff-pre-commit
+ rev: v0.4.0
+ hooks:
+ - id: ruff
+ args: [ --fix ]
+ - id: ruff-format
diff --git a/.pylintrc b/.pylintrc
index 1cab7a587..5f51e9fcf 100644
--- a/.pylintrc
+++ b/.pylintrc
@@ -77,6 +77,7 @@ disable=
cyclic-import, # re-enable once this no longer raises false positives
missing-docstring,
duplicate-code,
+ line-too-long,
unspecified-encoding,
too-many-instance-attributes # re-enable once this no longer raises false positives
@@ -171,7 +172,7 @@ max-nested-blocks=5
[FORMAT]
# Maximum number of characters on a single line.
-max-line-length=90
+max-line-length=100
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines=^\s*(# )??$
diff --git a/RELEASING/changelog.py b/RELEASING/changelog.py
index d1ba06a62..80ff98cba 100644
--- a/RELEASING/changelog.py
+++ b/RELEASING/changelog.py
@@ -94,10 +94,10 @@ class GitChangeLog:
if not pull_request:
pull_request = github_repo.get_pull(pr_number)
self._github_prs[pr_number] = pull_request
- except BadCredentialsException as ex:
+ except BadCredentialsException:
print(
- f"Bad credentials to github provided"
- f" use access_token parameter or set GITHUB_TOKEN"
+ "Bad credentials to github provided"
+ " use access_token parameter or set GITHUB_TOKEN"
)
sys.exit(1)
@@ -167,8 +167,8 @@ class GitChangeLog:
def _get_changelog_version_head(self) -> str:
if not len(self._logs):
print(
- f"No changes found between revisions. "
- f"Make sure your branch is up to date."
+ "No changes found between revisions. "
+ "Make sure your branch is up to date."
)
sys.exit(1)
return f"### {self._version} ({self._logs[0].time})"
diff --git a/pyproject.toml b/pyproject.toml
index fd7dc98c8..19d231906 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -191,6 +191,7 @@ development = [
"pytest-cov",
"pytest-mock",
"python-ldap>=3.4.4",
+ "ruff",
"sqloxide",
"statsd",
"tox",
@@ -267,7 +268,6 @@ usedevelop = true
allowlist_externals =
npm
pkill
- {toxinidir}/superset-frontend/cypress_build.sh
[testenv:cypress]
setenv =
@@ -393,3 +393,81 @@ envlist =
pylint
skipsdist = true
"""
+[tool.ruff]
+# Exclude a variety of commonly ignored directories.
+exclude = [
+ ".bzr",
+ ".direnv",
+ ".eggs",
+ ".git",
+ ".git-rewrite",
+ ".hg",
+ ".ipynb_checkpoints",
+ ".mypy_cache",
+ ".nox",
+ ".pants.d",
+ ".pyenv",
+ ".pytest_cache",
+ ".pytype",
+ ".ruff_cache",
+ ".svn",
+ ".tox",
+ ".venv",
+ ".vscode",
+ "__pypackages__",
+ "_build",
+ "buck-out",
+ "build",
+ "dist",
+ "node_modules",
+ "site-packages",
+ "venv",
+]
+
+# Same as Black.
+line-length = 88
+indent-width = 4
+
+# Assume Python 3.8
+target-version = "py310"
+
+[tool.ruff.lint]
+# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
+# Unlike Flake8, Ruff doesn't enable pycodestyle warnings (`W`) or
+# McCabe complexity (`C901`) by default.
+select = ["E4", "E7", "E9", "F"]
+ignore = []
+
+# Allow fix for all enabled rules (when `--fix`) is provided.
+fixable = ["ALL"]
+unfixable = []
+
+# Allow unused variables when underscore-prefixed.
+dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
+
+[tool.ruff.format]
+# Like Black, use double quotes for strings.
+quote-style = "double"
+
+# Like Black, indent with spaces, rather than tabs.
+indent-style = "space"
+
+# Like Black, respect magic trailing commas.
+skip-magic-trailing-comma = false
+
+# Like Black, automatically detect the appropriate line ending.
+line-ending = "auto"
+
+# Enable auto-formatting of code examples in docstrings. Markdown,
+# reStructuredText code/literal blocks and doctests are all supported.
+#
+# This is currently disabled by default, but it is planned for this
+# to be opt-out in the future.
+docstring-code-format = false
+
+# Set the line length limit used when formatting code snippets in
+# docstrings.
+#
+# This only has an effect when the `docstring-code-format` setting is
+# enabled.
+docstring-code-line-length = "dynamic"
diff --git a/requirements/development.txt b/requirements/development.txt
index d10af9d76..1d4aaea9c 100644
--- a/requirements/development.txt
+++ b/requirements/development.txt
@@ -10,6 +10,8 @@
# via
# -r requirements/base.in
# -r requirements/development.in
+appnope==0.1.4
+ # via ipython
astroid==3.1.0
# via pylint
asttokens==2.2.1
@@ -239,6 +241,8 @@ rfc3339-validator==0.1.4
# via openapi-schema-validator
rfc3986==2.0.0
# via tableschema
+ruff==0.4.0
+ # via apache-superset
s3transfer==0.6.1
# via boto3
sqlalchemy-bigquery==1.10.0
diff --git a/scripts/build_docker.py b/scripts/build_docker.py
index 3351e508d..023b00491 100755
--- a/scripts/build_docker.py
+++ b/scripts/build_docker.py
@@ -82,7 +82,7 @@ def is_latest_release(release: str) -> bool:
return "SKIP_TAG::false" in output
-def make_docker_tag(l: list[str]) -> str:
+def make_docker_tag(l: list[str]) -> str: # noqa: E741
return f"{REPO}:" + "-".join([o for o in l if o])
@@ -140,7 +140,7 @@ def get_docker_command(
build_context_ref: str,
force_latest: bool = False,
) -> str:
- tag = ""
+ tag = "" # noqa: F841
build_target = ""
py_ver = BASE_PY_IMAGE
docker_context = "."
@@ -284,7 +284,7 @@ def main(
script = script + docker_build_command
if verbose:
run_cmd("cat Dockerfile")
- stdout = run_cmd(script)
+ stdout = run_cmd(script) # noqa: F841
else:
print("Dry Run - Docker Build Command:")
print(docker_build_command)
diff --git a/scripts/cancel_github_workflows.py b/scripts/cancel_github_workflows.py
index 70744c295..fcf3bc494 100755
--- a/scripts/cancel_github_workflows.py
+++ b/scripts/cancel_github_workflows.py
@@ -32,6 +32,7 @@ Example:
# cancel all jobs of a PR, including the latest runs
./cancel_github_workflows.py 1024 --include-last
"""
+
import os
from collections.abc import Iterable, Iterator
from typing import Any, Literal, Optional, Union
diff --git a/scripts/change_detector.py b/scripts/change_detector.py
index df2c872a4..4c953555f 100755
--- a/scripts/change_detector.py
+++ b/scripts/change_detector.py
@@ -21,7 +21,6 @@ import os
import re
import subprocess
from typing import List
-from urllib.error import HTTPError, URLError
from urllib.request import Request, urlopen
# Define patterns for each group of files you're interested in
@@ -102,12 +101,12 @@ def main(event_type: str, sha: str, repo: str) -> None:
if event_type == "pull_request":
pr_number = os.getenv("GITHUB_REF", "").split("/")[-2]
files = fetch_changed_files_pr(repo, pr_number)
- print(f"PR files:")
+ print("PR files:")
print_files(files)
elif event_type == "push":
files = fetch_changed_files_push(repo, sha)
- print(f"Files touched since previous commit:")
+ print("Files touched since previous commit:")
print_files(files)
else:
raise ValueError("Unsupported event type")
diff --git a/scripts/erd/erd.py b/scripts/erd/erd.py
index 0622e0d85..d49940feb 100644
--- a/scripts/erd/erd.py
+++ b/scripts/erd/erd.py
@@ -19,6 +19,7 @@ This module contains utilities to auto-generate an
Entity-Relationship Diagram (ERD) from SQLAlchemy
and onto a plantuml file.
"""
+
import json
import os
from collections import defaultdict
@@ -27,9 +28,8 @@ from typing import Any, Optional
import click
import jinja2
-from flask.cli import FlaskGroup, with_appcontext
-from superset import app, db
+from superset import db
GROUPINGS: dict[str, Iterable[str]] = {
"Core": [
diff --git a/superset/__init__.py b/superset/__init__.py
index d96697de9..e68f60ac6 100644
--- a/superset/__init__.py
+++ b/superset/__init__.py
@@ -19,19 +19,19 @@
from flask import current_app, Flask
from werkzeug.local import LocalProxy
-from superset.app import create_app
+from superset.app import create_app # noqa: F401
from superset.extensions import (
- appbuilder,
+ appbuilder, # noqa: F401
cache_manager,
- db,
- event_logger,
+ db, # noqa: F401
+ event_logger, # noqa: F401
feature_flag_manager,
manifest_processor,
results_backend_manager,
- security_manager,
- talisman,
+ security_manager, # noqa: F401
+ talisman, # noqa: F401
)
-from superset.security import SupersetSecurityManager
+from superset.security import SupersetSecurityManager # noqa: F401
# All of the fields located here should be considered legacy. The correct way
# to declare "global" dependencies is to define it in extensions.py,
diff --git a/superset/advanced_data_type/plugins/internet_port.py b/superset/advanced_data_type/plugins/internet_port.py
index 3b2f8da7b..1af8e847a 100644
--- a/superset/advanced_data_type/plugins/internet_port.py
+++ b/superset/advanced_data_type/plugins/internet_port.py
@@ -88,9 +88,9 @@ def port_translation_func(req: AdvancedDataTypeRequest) -> AdvancedDataTypeRespo
else port_conversion_dict[string_value]
)
except (KeyError, ValueError):
- resp[
- "error_message"
- ] = f"'{string_value}' does not appear to be a port name or number"
+ resp["error_message"] = (
+ f"'{string_value}' does not appear to be a port name or number"
+ )
break
else:
resp["display_value"] = ", ".join(
diff --git a/superset/advanced_data_type/schemas.py b/superset/advanced_data_type/schemas.py
index 9c7dd221a..d94fd1987 100644
--- a/superset/advanced_data_type/schemas.py
+++ b/superset/advanced_data_type/schemas.py
@@ -17,6 +17,7 @@
"""
Schemas for advanced data types
"""
+
from marshmallow import fields, Schema
advanced_data_type_convert_schema = {
diff --git a/superset/annotation_layers/filters.py b/superset/annotation_layers/filters.py
index 5fbf13b0a..8f552fe6a 100644
--- a/superset/annotation_layers/filters.py
+++ b/superset/annotation_layers/filters.py
@@ -24,9 +24,7 @@ from superset.models.annotations import AnnotationLayer
from superset.views.base import BaseFilter
-class AnnotationLayerAllTextFilter(
- BaseFilter
-): # pylint: disable=too-few-public-methods
+class AnnotationLayerAllTextFilter(BaseFilter): # pylint: disable=too-few-public-methods
name = _("All Text")
arg_name = "annotation_layer_all_text"
diff --git a/superset/cachekeys/api.py b/superset/cachekeys/api.py
index 40d3830e8..3c90dafb7 100644
--- a/superset/cachekeys/api.py
+++ b/superset/cachekeys/api.py
@@ -109,10 +109,8 @@ class CacheRestApi(BaseSupersetModelRestApi):
)
try:
- delete_stmt = (
- CacheKey.__table__.delete().where( # pylint: disable=no-member
- CacheKey.cache_key.in_(cache_keys)
- )
+ delete_stmt = CacheKey.__table__.delete().where( # pylint: disable=no-member
+ CacheKey.cache_key.in_(cache_keys)
)
db.session.execute(delete_stmt)
db.session.commit()
diff --git a/superset/charts/filters.py b/superset/charts/filters.py
index 194c5cfb8..a7543ba28 100644
--- a/superset/charts/filters.py
+++ b/superset/charts/filters.py
@@ -130,9 +130,7 @@ class ChartCreatedByMeFilter(BaseFilter): # pylint: disable=too-few-public-meth
)
-class ChartOwnedCreatedFavoredByMeFilter(
- BaseFilter
-): # pylint: disable=too-few-public-methods
+class ChartOwnedCreatedFavoredByMeFilter(BaseFilter): # pylint: disable=too-few-public-methods
"""
Custom filter for the GET chart that filters all charts the user
owns, created, changed or favored.
diff --git a/superset/columns/models.py b/superset/columns/models.py
index 1abaa7bf0..142e3d672 100644
--- a/superset/columns/models.py
+++ b/superset/columns/models.py
@@ -23,6 +23,7 @@ tables, metrics, and datasets were also introduced.
These models are not fully implemented, and shouldn't be used yet.
"""
+
import sqlalchemy as sa
from flask_appbuilder import Model
diff --git a/superset/commands/chart/export.py b/superset/commands/chart/export.py
index 0a188aee4..dc24a1454 100644
--- a/superset/commands/chart/export.py
+++ b/superset/commands/chart/export.py
@@ -78,9 +78,10 @@ class ExportChartsCommand(ExportModelsCommand):
def _export(
model: Slice, export_related: bool = True
) -> Iterator[tuple[str, Callable[[], str]]]:
- yield ExportChartsCommand._file_name(
- model
- ), lambda: ExportChartsCommand._file_content(model)
+ yield (
+ ExportChartsCommand._file_name(model),
+ lambda: ExportChartsCommand._file_content(model),
+ )
if model.table and export_related:
yield from ExportDatasetsCommand([model.table.id]).run()
diff --git a/superset/commands/chart/importers/v1/__init__.py b/superset/commands/chart/importers/v1/__init__.py
index 7f2537383..89fe5e7a7 100644
--- a/superset/commands/chart/importers/v1/__init__.py
+++ b/superset/commands/chart/importers/v1/__init__.py
@@ -18,7 +18,7 @@
from typing import Any
from marshmallow import Schema
-from sqlalchemy.orm import Session
+from sqlalchemy.orm import Session # noqa: F401
from superset.charts.schemas import ImportV1ChartSchema
from superset.commands.chart.exceptions import ChartImportError
@@ -33,7 +33,6 @@ from superset.datasets.schemas import ImportV1DatasetSchema
class ImportChartsCommand(ImportModelsCommand):
-
"""Import charts"""
dao = ChartDAO
diff --git a/superset/commands/dashboard/export.py b/superset/commands/dashboard/export.py
index 344738746..d6f736964 100644
--- a/superset/commands/dashboard/export.py
+++ b/superset/commands/dashboard/export.py
@@ -154,9 +154,10 @@ class ExportDashboardsCommand(ExportModelsCommand):
def _export(
model: Dashboard, export_related: bool = True
) -> Iterator[tuple[str, Callable[[], str]]]:
- yield ExportDashboardsCommand._file_name(
- model
- ), lambda: ExportDashboardsCommand._file_content(model)
+ yield (
+ ExportDashboardsCommand._file_name(model),
+ lambda: ExportDashboardsCommand._file_content(model),
+ )
if export_related:
chart_ids = [chart.id for chart in model.slices]
diff --git a/superset/commands/dashboard/importers/v1/__init__.py b/superset/commands/dashboard/importers/v1/__init__.py
index 77d28696c..48b4e93e8 100644
--- a/superset/commands/dashboard/importers/v1/__init__.py
+++ b/superset/commands/dashboard/importers/v1/__init__.py
@@ -18,7 +18,7 @@
from typing import Any
from marshmallow import Schema
-from sqlalchemy.orm import Session
+from sqlalchemy.orm import Session # noqa: F401
from sqlalchemy.sql import select
from superset import db
@@ -43,7 +43,6 @@ from superset.models.dashboard import Dashboard, dashboard_slices
class ImportDashboardsCommand(ImportModelsCommand):
-
"""Import dashboards"""
dao = DashboardDAO
@@ -115,7 +114,6 @@ class ImportDashboardsCommand(ImportModelsCommand):
# update datasource id, type, and name
dataset_dict = dataset_info[config["dataset_uuid"]]
config.update(dataset_dict)
- # pylint: disable=line-too-long
dataset_uid = f"{dataset_dict['datasource_id']}__{dataset_dict['datasource_type']}"
config["params"].update({"datasource": dataset_uid})
if "query_context" in config:
diff --git a/superset/commands/database/create.py b/superset/commands/database/create.py
index 9efb39b75..4903938eb 100644
--- a/superset/commands/database/create.py
+++ b/superset/commands/database/create.py
@@ -19,7 +19,6 @@ from typing import Any, Optional
from flask import current_app
from flask_appbuilder.models.sqla import Model
-from flask_babel import gettext as _
from marshmallow import ValidationError
from superset import is_feature_enabled
diff --git a/superset/commands/database/export.py b/superset/commands/database/export.py
index 555a9c327..fefe86adc 100644
--- a/superset/commands/database/export.py
+++ b/superset/commands/database/export.py
@@ -106,9 +106,10 @@ class ExportDatabasesCommand(ExportModelsCommand):
def _export(
model: Database, export_related: bool = True
) -> Iterator[tuple[str, Callable[[], str]]]:
- yield ExportDatabasesCommand._file_name(
- model
- ), lambda: ExportDatabasesCommand._file_content(model)
+ yield (
+ ExportDatabasesCommand._file_name(model),
+ lambda: ExportDatabasesCommand._file_content(model),
+ )
if export_related:
db_file_name = get_filename(model.database_name, model.id, skip_id=True)
@@ -127,6 +128,9 @@ class ExportDatabasesCommand(ExportModelsCommand):
payload["version"] = EXPORT_VERSION
payload["database_uuid"] = str(model.uuid)
- yield file_path, functools.partial( # type: ignore
- yaml.safe_dump, payload, sort_keys=False
+ yield (
+ file_path,
+ functools.partial( # type: ignore
+ yaml.safe_dump, payload, sort_keys=False
+ ),
)
diff --git a/superset/commands/database/importers/v1/__init__.py b/superset/commands/database/importers/v1/__init__.py
index 203f0e308..c8684bc5e 100644
--- a/superset/commands/database/importers/v1/__init__.py
+++ b/superset/commands/database/importers/v1/__init__.py
@@ -18,7 +18,7 @@
from typing import Any
from marshmallow import Schema
-from sqlalchemy.orm import Session
+from sqlalchemy.orm import Session # noqa: F401
from superset.commands.database.exceptions import DatabaseImportError
from superset.commands.database.importers.v1.utils import import_database
@@ -30,7 +30,6 @@ from superset.datasets.schemas import ImportV1DatasetSchema
class ImportDatabasesCommand(ImportModelsCommand):
-
"""Import databases"""
dao = DatabaseDAO
diff --git a/superset/commands/database/update.py b/superset/commands/database/update.py
index 5575d674a..b057cb300 100644
--- a/superset/commands/database/update.py
+++ b/superset/commands/database/update.py
@@ -18,7 +18,6 @@ import logging
from typing import Any, Optional
from flask_appbuilder.models.sqla import Model
-from flask_babel import gettext as _
from marshmallow import ValidationError
from superset import is_feature_enabled
@@ -69,11 +68,11 @@ class UpdateDatabaseCommand(BaseCommand):
old_database_name = self._model.database_name
# unmask ``encrypted_extra``
- self._properties[
- "encrypted_extra"
- ] = self._model.db_engine_spec.unmask_encrypted_extra(
- self._model.encrypted_extra,
- self._properties.pop("masked_encrypted_extra", "{}"),
+ self._properties["encrypted_extra"] = (
+ self._model.db_engine_spec.unmask_encrypted_extra(
+ self._model.encrypted_extra,
+ self._properties.pop("masked_encrypted_extra", "{}"),
+ )
)
try:
diff --git a/superset/commands/database/uploaders/base.py b/superset/commands/database/uploaders/base.py
index 80e9b135a..a546ad7d0 100644
--- a/superset/commands/database/uploaders/base.py
+++ b/superset/commands/database/uploaders/base.py
@@ -61,8 +61,7 @@ class BaseDataReader:
self._options = options
@abstractmethod
- def file_to_dataframe(self, file: Any) -> pd.DataFrame:
- ...
+ def file_to_dataframe(self, file: Any) -> pd.DataFrame: ...
def read(
self, file: Any, database: Database, table_name: str, schema_name: Optional[str]
diff --git a/superset/commands/dataset/export.py b/superset/commands/dataset/export.py
index 4dd641190..9646bffc6 100644
--- a/superset/commands/dataset/export.py
+++ b/superset/commands/dataset/export.py
@@ -85,9 +85,10 @@ class ExportDatasetsCommand(ExportModelsCommand):
def _export(
model: SqlaTable, export_related: bool = True
) -> Iterator[tuple[str, Callable[[], str]]]:
- yield ExportDatasetsCommand._file_name(
- model
- ), lambda: ExportDatasetsCommand._file_content(model)
+ yield (
+ ExportDatasetsCommand._file_name(model),
+ lambda: ExportDatasetsCommand._file_content(model),
+ )
# include database as well
if export_related:
diff --git a/superset/commands/dataset/importers/v1/__init__.py b/superset/commands/dataset/importers/v1/__init__.py
index 29f850258..c7ecba122 100644
--- a/superset/commands/dataset/importers/v1/__init__.py
+++ b/superset/commands/dataset/importers/v1/__init__.py
@@ -18,7 +18,7 @@
from typing import Any
from marshmallow import Schema
-from sqlalchemy.orm import Session
+from sqlalchemy.orm import Session # noqa: F401
from superset.commands.database.importers.v1.utils import import_database
from superset.commands.dataset.exceptions import DatasetImportError
@@ -30,7 +30,6 @@ from superset.datasets.schemas import ImportV1DatasetSchema
class ImportDatasetsCommand(ImportModelsCommand):
-
"""Import datasets"""
dao = DatasetDAO
diff --git a/superset/commands/explore/get.py b/superset/commands/explore/get.py
index a0ff17610..d0d71c1ee 100644
--- a/superset/commands/explore/get.py
+++ b/superset/commands/explore/get.py
@@ -87,9 +87,9 @@ class GetExploreCommand(BaseCommand, ABC):
"Form data not found in cache, reverting to chart metadata."
)
elif self._datasource_id:
- initial_form_data[
- "datasource"
- ] = f"{self._datasource_id}__{self._datasource_type}"
+ initial_form_data["datasource"] = (
+ f"{self._datasource_id}__{self._datasource_type}"
+ )
if self._form_data_key:
message = _(
"Form data not found in cache, reverting to dataset metadata."
diff --git a/superset/commands/importers/v1/__init__.py b/superset/commands/importers/v1/__init__.py
index 8d90875fd..6c86faabe 100644
--- a/superset/commands/importers/v1/__init__.py
+++ b/superset/commands/importers/v1/__init__.py
@@ -16,9 +16,9 @@
# under the License.
from typing import Any, Optional
-from marshmallow import Schema, validate
+from marshmallow import Schema, validate # noqa: F401
from marshmallow.exceptions import ValidationError
-from sqlalchemy.orm import Session
+from sqlalchemy.orm import Session # noqa: F401
from superset import db
from superset.commands.base import BaseCommand
@@ -26,12 +26,12 @@ from superset.commands.exceptions import CommandException, CommandInvalidError
from superset.commands.importers.v1.utils import (
load_configs,
load_metadata,
- load_yaml,
- METADATA_FILE_NAME,
+ load_yaml, # noqa: F401
+ METADATA_FILE_NAME, # noqa: F401
validate_metadata_type,
)
from superset.daos.base import BaseDAO
-from superset.models.core import Database
+from superset.models.core import Database # noqa: F401
class ImportModelsCommand(BaseCommand):
@@ -81,7 +81,7 @@ class ImportModelsCommand(BaseCommand):
db.session.rollback()
raise self.import_error() from ex
- def validate(self) -> None:
+ def validate(self) -> None: # noqa: F811
exceptions: list[ValidationError] = []
# verify that the metadata file is present and valid
diff --git a/superset/commands/importers/v1/assets.py b/superset/commands/importers/v1/assets.py
index 876ce509a..29a2dec17 100644
--- a/superset/commands/importers/v1/assets.py
+++ b/superset/commands/importers/v1/assets.py
@@ -112,7 +112,6 @@ class ImportAssetsCommand(BaseCommand):
if file_name.startswith("charts/"):
dataset_dict = dataset_info[config["dataset_uuid"]]
config.update(dataset_dict)
- # pylint: disable=line-too-long
dataset_uid = f"{dataset_dict['datasource_id']}__{dataset_dict['datasource_type']}"
config["params"].update({"datasource": dataset_uid})
if "query_context" in config:
diff --git a/superset/commands/importers/v1/examples.py b/superset/commands/importers/v1/examples.py
index ff69aadc4..6525031ce 100644
--- a/superset/commands/importers/v1/examples.py
+++ b/superset/commands/importers/v1/examples.py
@@ -46,7 +46,6 @@ from superset.utils.database import get_example_database
class ImportExamplesCommand(ImportModelsCommand):
-
"""Import examples"""
dao = BaseDAO
diff --git a/superset/commands/importers/v1/utils.py b/superset/commands/importers/v1/utils.py
index 8cb0c1b55..912a4d1be 100644
--- a/superset/commands/importers/v1/utils.py
+++ b/superset/commands/importers/v1/utils.py
@@ -173,16 +173,16 @@ def load_configs(
# populate ssh_tunnel_passwords from the request or from existing DBs
if file_name in ssh_tunnel_priv_key_passwords:
- config["ssh_tunnel"][
- "private_key_password"
- ] = ssh_tunnel_priv_key_passwords[file_name]
+ config["ssh_tunnel"]["private_key_password"] = (
+ ssh_tunnel_priv_key_passwords[file_name]
+ )
elif (
prefix == "databases"
and config["uuid"] in db_ssh_tunnel_priv_key_passws
):
- config["ssh_tunnel"][
- "private_key_password"
- ] = db_ssh_tunnel_priv_key_passws[config["uuid"]]
+ config["ssh_tunnel"]["private_key_password"] = (
+ db_ssh_tunnel_priv_key_passws[config["uuid"]]
+ )
schema.load(config)
configs[file_name] = config
diff --git a/superset/commands/query/export.py b/superset/commands/query/export.py
index 5997f0c18..db3379ab8 100644
--- a/superset/commands/query/export.py
+++ b/superset/commands/query/export.py
@@ -69,9 +69,10 @@ class ExportSavedQueriesCommand(ExportModelsCommand):
def _export(
model: SavedQuery, export_related: bool = True
) -> Iterator[tuple[str, Callable[[], str]]]:
- yield ExportSavedQueriesCommand._file_name(
- model
- ), lambda: ExportSavedQueriesCommand._file_content(model)
+ yield (
+ ExportSavedQueriesCommand._file_name(model),
+ lambda: ExportSavedQueriesCommand._file_content(model),
+ )
if export_related: # TODO: Maybe we can use database export command here?
# include database as well
diff --git a/superset/commands/query/importers/v1/__init__.py b/superset/commands/query/importers/v1/__init__.py
index f251759c3..3dc25d93a 100644
--- a/superset/commands/query/importers/v1/__init__.py
+++ b/superset/commands/query/importers/v1/__init__.py
@@ -18,13 +18,13 @@
from typing import Any
from marshmallow import Schema
-from sqlalchemy.orm import Session
+from sqlalchemy.orm import Session # noqa: F401
from superset.commands.database.importers.v1.utils import import_database
from superset.commands.importers.v1 import ImportModelsCommand
from superset.commands.query.exceptions import SavedQueryImportError
from superset.commands.query.importers.v1.utils import import_saved_query
-from superset.connectors.sqla.models import SqlaTable
+from superset.connectors.sqla.models import SqlaTable # noqa: F401
from superset.daos.query import SavedQueryDAO
from superset.databases.schemas import ImportV1DatabaseSchema
from superset.queries.saved_queries.schemas import ImportV1SavedQuerySchema
diff --git a/superset/commands/temporary_cache/create.py b/superset/commands/temporary_cache/create.py
index e43d48e54..7d61a7807 100644
--- a/superset/commands/temporary_cache/create.py
+++ b/superset/commands/temporary_cache/create.py
@@ -41,5 +41,4 @@ class CreateTemporaryCacheCommand(BaseCommand, ABC):
pass
@abstractmethod
- def create(self, cmd_params: CommandParameters) -> str:
- ...
+ def create(self, cmd_params: CommandParameters) -> str: ...
diff --git a/superset/commands/temporary_cache/delete.py b/superset/commands/temporary_cache/delete.py
index d35b184d8..1cc291dbf 100644
--- a/superset/commands/temporary_cache/delete.py
+++ b/superset/commands/temporary_cache/delete.py
@@ -41,5 +41,4 @@ class DeleteTemporaryCacheCommand(BaseCommand, ABC):
pass
@abstractmethod
- def delete(self, cmd_params: CommandParameters) -> bool:
- ...
+ def delete(self, cmd_params: CommandParameters) -> bool: ...
diff --git a/superset/commands/temporary_cache/get.py b/superset/commands/temporary_cache/get.py
index fa16977a8..fd1814767 100644
--- a/superset/commands/temporary_cache/get.py
+++ b/superset/commands/temporary_cache/get.py
@@ -42,5 +42,4 @@ class GetTemporaryCacheCommand(BaseCommand, ABC):
pass
@abstractmethod
- def get(self, cmd_params: CommandParameters) -> Optional[str]:
- ...
+ def get(self, cmd_params: CommandParameters) -> Optional[str]: ...
diff --git a/superset/commands/temporary_cache/update.py b/superset/commands/temporary_cache/update.py
index 90b1c3d48..8daaae861 100644
--- a/superset/commands/temporary_cache/update.py
+++ b/superset/commands/temporary_cache/update.py
@@ -45,5 +45,4 @@ class UpdateTemporaryCacheCommand(BaseCommand, ABC):
pass
@abstractmethod
- def update(self, cmd_params: CommandParameters) -> Optional[str]:
- ...
+ def update(self, cmd_params: CommandParameters) -> Optional[str]: ...
diff --git a/superset/common/query_object.py b/superset/common/query_object.py
index a16166134..b183532eb 100644
--- a/superset/common/query_object.py
+++ b/superset/common/query_object.py
@@ -190,7 +190,8 @@ class QueryObject: # pylint: disable=too-many-instance-attributes
return isinstance(metric, str) or is_adhoc_metric(metric)
self.metrics = metrics and [
- x if is_str_or_adhoc(x) else x["label"] for x in metrics # type: ignore
+ x if is_str_or_adhoc(x) else x["label"] # type: ignore
+ for x in metrics
]
def _set_post_processing(
diff --git a/superset/config.py b/superset/config.py
index 940a167bd..11084129c 100644
--- a/superset/config.py
+++ b/superset/config.py
@@ -563,9 +563,9 @@ IS_FEATURE_ENABLED_FUNC: Callable[[str, bool | None], bool] | None = None
#
# Takes as a parameter the common bootstrap payload before transformations.
# Returns a dict containing data that should be added or overridden to the payload.
-COMMON_BOOTSTRAP_OVERRIDES_FUNC: Callable[
- [dict[str, Any]], dict[str, Any]
-] = lambda data: {} # default: empty dict
+COMMON_BOOTSTRAP_OVERRIDES_FUNC: Callable[[dict[str, Any]], dict[str, Any]] = ( # noqa: E731
+ lambda data: {}
+) # default: empty dict
# EXTRA_CATEGORICAL_COLOR_SCHEMES is used for adding custom categorical color schemes
# example code for "My custom warm to hot" color scheme
@@ -640,8 +640,8 @@ THUMBNAIL_EXECUTE_AS = [ExecutorType.CURRENT_USER, ExecutorType.SELENIUM]
# `THUMBNAIL_EXECUTE_AS`; the executor is only equal to the currently logged in
# user if the executor type is equal to `ExecutorType.CURRENT_USER`)
# and return the final digest string:
-THUMBNAIL_DASHBOARD_DIGEST_FUNC: None | (
- Callable[[Dashboard, ExecutorType, str], str]
+THUMBNAIL_DASHBOARD_DIGEST_FUNC: (
+ None | (Callable[[Dashboard, ExecutorType, str], str])
) = None
THUMBNAIL_CHART_DIGEST_FUNC: Callable[[Slice, ExecutorType, str], str] | None = None
@@ -1035,8 +1035,8 @@ SQLLAB_CTAS_NO_LIMIT = False
# else:
# return f'tmp_{schema}'
# Function accepts database object, user object, schema name and sql that will be run.
-SQLLAB_CTAS_SCHEMA_NAME_FUNC: None | (
- Callable[[Database, models.User, str, str], str]
+SQLLAB_CTAS_SCHEMA_NAME_FUNC: (
+ None | (Callable[[Database, models.User, str, str], str])
) = None
# If enabled, it can be used to store the results of long-running queries
@@ -1080,7 +1080,7 @@ UPLOADED_CSV_HIVE_NAMESPACE: str | None = None
# db configuration and a result of this function.
# mypy doesn't catch that if case ensures list content being always str
-ALLOWED_USER_CSV_SCHEMA_FUNC: Callable[[Database, models.User], list[str]] = (
+ALLOWED_USER_CSV_SCHEMA_FUNC: Callable[[Database, models.User], list[str]] = ( # noqa: E731
lambda database, user: [UPLOADED_CSV_HIVE_NAMESPACE]
if UPLOADED_CSV_HIVE_NAMESPACE
else []
@@ -1170,7 +1170,7 @@ BLUEPRINTS: list[Blueprint] = []
# lambda url, query: url if is_fresh(query) else None
# )
# pylint: disable-next=unnecessary-lambda-assignment
-TRACKING_URL_TRANSFORMER = lambda url: url
+TRACKING_URL_TRANSFORMER = lambda url: url # noqa: E731
# customize the polling time of each engine
@@ -1572,7 +1572,7 @@ SSL_CERT_PATH: str | None = None
# conventions and such. You can find examples in the tests.
# pylint: disable-next=unnecessary-lambda-assignment
-SQLA_TABLE_MUTATOR = lambda table: table
+SQLA_TABLE_MUTATOR = lambda table: table # noqa: E731
# Global async query config options.
@@ -1593,9 +1593,9 @@ GLOBAL_ASYNC_QUERIES_REDIS_STREAM_LIMIT_FIREHOSE = 1000000
GLOBAL_ASYNC_QUERIES_REGISTER_REQUEST_HANDLERS = True
GLOBAL_ASYNC_QUERIES_JWT_COOKIE_NAME = "async-token"
GLOBAL_ASYNC_QUERIES_JWT_COOKIE_SECURE = False
-GLOBAL_ASYNC_QUERIES_JWT_COOKIE_SAMESITE: None | (
- Literal["None", "Lax", "Strict"]
-) = None
+GLOBAL_ASYNC_QUERIES_JWT_COOKIE_SAMESITE: None | (Literal["None", "Lax", "Strict"]) = (
+ None
+)
GLOBAL_ASYNC_QUERIES_JWT_COOKIE_DOMAIN = None
GLOBAL_ASYNC_QUERIES_JWT_SECRET = "test-secret-change-me"
GLOBAL_ASYNC_QUERIES_TRANSPORT: Literal["polling", "ws"] = "polling"
@@ -1657,9 +1657,9 @@ ADVANCED_DATA_TYPES: dict[str, AdvancedDataType] = {
# "Xyz",
# [{"col": 'created_by', "opr": 'rel_o_m', "value": 10}],
# )
-WELCOME_PAGE_LAST_TAB: (
- Literal["examples", "all"] | tuple[str, list[dict[str, Any]]]
-) = "all"
+WELCOME_PAGE_LAST_TAB: Literal["examples", "all"] | tuple[str, list[dict[str, Any]]] = (
+ "all"
+)
# Max allowed size for a zipped file
ZIPPED_FILE_MAX_SIZE = 100 * 1024 * 1024 # 100MB
diff --git a/superset/connectors/sqla/__init__.py b/superset/connectors/sqla/__init__.py
index ad52fc6d8..c7619537b 100644
--- a/superset/connectors/sqla/__init__.py
+++ b/superset/connectors/sqla/__init__.py
@@ -14,4 +14,4 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
-from . import models, views
+from . import models, views # noqa: F401
diff --git a/superset/connectors/sqla/models.py b/superset/connectors/sqla/models.py
index 208487299..339be9d17 100644
--- a/superset/connectors/sqla/models.py
+++ b/superset/connectors/sqla/models.py
@@ -175,9 +175,7 @@ class DatasourceKind(StrEnum):
PHYSICAL = "physical"
-class BaseDatasource(
- AuditMixinNullable, ImportExportMixin
-): # pylint: disable=too-many-public-methods
+class BaseDatasource(AuditMixinNullable, ImportExportMixin): # pylint: disable=too-many-public-methods
"""A common interface to objects that are queryable
(tables and datasources)"""
@@ -669,7 +667,8 @@ class BaseDatasource(
)
def get_extra_cache_keys(
- self, query_obj: QueryObjectDict # pylint: disable=unused-argument
+ self,
+ query_obj: QueryObjectDict, # pylint: disable=unused-argument
) -> list[Hashable]:
"""If a datasource needs to provide additional keys for calculation of
cache keys, those can be provided via this method
@@ -757,7 +756,6 @@ class AnnotationDatasource(BaseDatasource):
class TableColumn(AuditMixinNullable, ImportExportMixin, CertificationMixin, Model):
-
"""ORM object for table columns, each table can have multiple columns"""
__tablename__ = "table_columns"
@@ -971,7 +969,6 @@ class TableColumn(AuditMixinNullable, ImportExportMixin, CertificationMixin, Mod
class SqlMetric(AuditMixinNullable, ImportExportMixin, CertificationMixin, Model):
-
"""ORM object for metrics, each table can have multiple metrics"""
__tablename__ = "sql_metrics"
@@ -1289,7 +1286,7 @@ class SqlaTable(
@property
def dttm_cols(self) -> list[str]:
- l = [c.column_name for c in self.columns if c.is_dttm]
+ l = [c.column_name for c in self.columns if c.is_dttm] # noqa: E741
if self.main_dttm_col and self.main_dttm_col not in l:
l.append(self.main_dttm_col)
return l
diff --git a/superset/connectors/sqla/views.py b/superset/connectors/sqla/views.py
index 6917667b0..fe2934ec0 100644
--- a/superset/connectors/sqla/views.py
+++ b/superset/connectors/sqla/views.py
@@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
"""Views used by the SqlAlchemy connector"""
+
import logging
import re
diff --git a/superset/daos/chart.py b/superset/daos/chart.py
index eb8b3e809..844b36b6b 100644
--- a/superset/daos/chart.py
+++ b/superset/daos/chart.py
@@ -28,7 +28,7 @@ from superset.models.slice import Slice
from superset.utils.core import get_user_id
if TYPE_CHECKING:
- from superset.connectors.sqla.models import BaseDatasource
+ pass
logger = logging.getLogger(__name__)
diff --git a/superset/daos/dataset.py b/superset/daos/dataset.py
index 4647e02ce..23b46e332 100644
--- a/superset/daos/dataset.py
+++ b/superset/daos/dataset.py
@@ -245,7 +245,7 @@ class DatasetDAO(BaseDAO[SqlaTable]):
[
{**properties, "table_id": model.id}
for properties in property_columns
- if not "id" in properties
+ if "id" not in properties
],
)
@@ -297,7 +297,7 @@ class DatasetDAO(BaseDAO[SqlaTable]):
[
{**properties, "table_id": model.id}
for properties in property_metrics
- if not "id" in properties
+ if "id" not in properties
],
)
diff --git a/superset/dashboards/api.py b/superset/dashboards/api.py
index ee8f1f73a..375c38466 100644
--- a/superset/dashboards/api.py
+++ b/superset/dashboards/api.py
@@ -102,7 +102,7 @@ logger = logging.getLogger(__name__)
def with_dashboard(
- f: Callable[[BaseSupersetModelRestApi, Dashboard], Response]
+ f: Callable[[BaseSupersetModelRestApi, Dashboard], Response],
) -> Callable[[BaseSupersetModelRestApi, str], Response]:
"""
A decorator that looks up the dashboard by id or slug and passes it to the api.
@@ -1261,7 +1261,9 @@ class DashboardRestApi(BaseSupersetModelRestApi):
@permission_name("set_embedded")
@statsd_metrics
@event_logger.log_this_with_context(
- action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.delete_embedded",
+ action=lambda self,
+ *args,
+ **kwargs: f"{self.__class__.__name__}.delete_embedded",
log_to_statsd=False,
)
@with_dashboard
diff --git a/superset/databases/api.py b/superset/databases/api.py
index 0e8e5be39..635a2da79 100644
--- a/superset/databases/api.py
+++ b/superset/databases/api.py
@@ -1692,9 +1692,9 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
and getattr(engine_spec, "default_driver") in drivers
):
payload["parameters"] = engine_spec.parameters_json_schema()
- payload[
- "sqlalchemy_uri_placeholder"
- ] = engine_spec.sqlalchemy_uri_placeholder
+ payload["sqlalchemy_uri_placeholder"] = (
+ engine_spec.sqlalchemy_uri_placeholder
+ )
available_databases.append(payload)
diff --git a/superset/dataframe.py b/superset/dataframe.py
index 808399329..6ce919042 100644
--- a/superset/dataframe.py
+++ b/superset/dataframe.py
@@ -14,8 +14,8 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
-""" Superset utilities for pandas.DataFrame.
-"""
+"""Superset utilities for pandas.DataFrame."""
+
import logging
from typing import Any
diff --git a/superset/datasets/models.py b/superset/datasets/models.py
index aa5d0fe72..76e2156e6 100644
--- a/superset/datasets/models.py
+++ b/superset/datasets/models.py
@@ -24,7 +24,6 @@ dataset, new models for columns, metrics, and tables were also introduced.
These models are not fully implemented, and shouldn't be used yet.
"""
-
import sqlalchemy as sa
from flask_appbuilder import Model
from sqlalchemy.orm import backref, relationship
diff --git a/superset/db_engine_specs/__init__.py b/superset/db_engine_specs/__init__.py
index 9ec0f0416..c40cf1554 100644
--- a/superset/db_engine_specs/__init__.py
+++ b/superset/db_engine_specs/__init__.py
@@ -27,6 +27,7 @@ at all. The classes here will use a common interface to specify all this.
The general idea is to use static classes and an inheritance scheme.
"""
+
import inspect
import logging
import pkgutil
diff --git a/superset/db_engine_specs/base.py b/superset/db_engine_specs/base.py
index 0a5671023..1fc6a40a3 100644
--- a/superset/db_engine_specs/base.py
+++ b/superset/db_engine_specs/base.py
@@ -130,9 +130,7 @@ builtin_time_grains: dict[str | None, str] = {
}
-class TimestampExpression(
- ColumnClause
-): # pylint: disable=abstract-method, too-many-ancestors
+class TimestampExpression(ColumnClause): # pylint: disable=abstract-method, too-many-ancestors
def __init__(self, expr: str, col: ColumnClause, **kwargs: Any) -> None:
"""Sqlalchemy class that can be used to render native column elements respecting
engine-specific quoting rules as part of a string-based expression.
@@ -575,7 +573,8 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
@classmethod
def get_allows_alias_in_select(
- cls, database: Database # pylint: disable=unused-argument
+ cls,
+ database: Database, # pylint: disable=unused-argument
) -> bool:
"""
Method for dynamic `allows_alias_in_select`.
@@ -1035,7 +1034,7 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
return indexes
@classmethod
- def get_extra_table_metadata( # pylint: disable=unused-argument
+ def get_extra_table_metadata(
cls,
database: Database,
table: Table,
diff --git a/superset/db_engine_specs/bigquery.py b/superset/db_engine_specs/bigquery.py
index 20eae4f93..78d845450 100644
--- a/superset/db_engine_specs/bigquery.py
+++ b/superset/db_engine_specs/bigquery.py
@@ -389,9 +389,9 @@ class BigQueryEngineSpec(BaseEngineSpec): # pylint: disable=too-many-public-met
# Add credentials if they are set on the SQLAlchemy dialect.
if creds := engine.dialect.credentials_info:
- to_gbq_kwargs[
- "credentials"
- ] = service_account.Credentials.from_service_account_info(creds)
+ to_gbq_kwargs["credentials"] = (
+ service_account.Credentials.from_service_account_info(creds)
+ )
# Only pass through supported kwargs.
supported_kwarg_keys = {"if_exists"}
diff --git a/superset/db_engine_specs/duckdb.py b/superset/db_engine_specs/duckdb.py
index 56539d665..89c45fb57 100644
--- a/superset/db_engine_specs/duckdb.py
+++ b/superset/db_engine_specs/duckdb.py
@@ -23,7 +23,7 @@ from typing import Any, TYPE_CHECKING, TypedDict
from apispec import APISpec
from apispec.ext.marshmallow import MarshmallowPlugin
-from flask_babel import gettext as __, lazy_gettext as _
+from flask_babel import gettext as __
from marshmallow import fields, Schema
from sqlalchemy import types
from sqlalchemy.engine.reflection import Inspector
diff --git a/superset/db_engine_specs/hive.py b/superset/db_engine_specs/hive.py
index 2655ed6c9..a10f5f66b 100644
--- a/superset/db_engine_specs/hive.py
+++ b/superset/db_engine_specs/hive.py
@@ -50,8 +50,6 @@ from superset.superset_typing import ResultSetColumnType
if TYPE_CHECKING:
# prevent circular imports
- from pyhive.hive import Cursor
- from TCLIService.ttypes import TFetchOrientation
from superset.models.core import Database
@@ -392,7 +390,7 @@ class HiveEngineSpec(PrestoEngineSpec):
# Wait for job id before logging things out
# this allows for prefixing all log lines and becoming
# searchable in something like Kibana
- for l in log_lines[last_log_line:]:
+ for l in log_lines[last_log_line:]: # noqa: E741
logger.info("Query %s: [%s] %s", str(query_id), str(job_id), l)
last_log_line = len(log_lines)
if needs_commit:
diff --git a/superset/db_engine_specs/ocient.py b/superset/db_engine_specs/ocient.py
index 77c906fe7..02d19add8 100644
--- a/superset/db_engine_specs/ocient.py
+++ b/superset/db_engine_specs/ocient.py
@@ -19,7 +19,7 @@ import contextlib
import re
import threading
from re import Pattern
-from typing import Any, Callable, List, NamedTuple, Optional
+from typing import Any, Callable, NamedTuple, Optional
from flask_babel import gettext as __
from sqlalchemy.engine.reflection import Inspector
@@ -98,7 +98,7 @@ def _wkt_to_geo_json(geo_as_wkt: str) -> Any:
def _point_list_to_wkt(
- points, # type: List[pyocient._STPoint]
+ points, # type: list[pyocient._STPoint]
) -> str:
"""
Converts the list of pyocient._STPoint elements to a WKT LineString.
@@ -204,7 +204,7 @@ try:
TypeCodes.ST_LINESTRING: _linestring_to_geo_json,
TypeCodes.ST_POLYGON: _polygon_to_geo_json,
}
-except ImportError as e:
+except ImportError:
_sanitized_ocient_type_codes = {}
diff --git a/superset/db_engine_specs/presto.py b/superset/db_engine_specs/presto.py
index a8143c87e..8a803d3f1 100644
--- a/superset/db_engine_specs/presto.py
+++ b/superset/db_engine_specs/presto.py
@@ -447,14 +447,14 @@ class PrestoBaseEngineSpec(BaseEngineSpec, metaclass=ABCMeta):
limit_clause = f"LIMIT {limit}" if limit else ""
order_by_clause = ""
if order_by:
- l = []
+ l = [] # noqa: E741
for field, desc in order_by:
l.append(field + " DESC" if desc else "")
order_by_clause = "ORDER BY " + ", ".join(l)
where_clause = ""
if filters:
- l = []
+ l = [] # noqa: E741
for field, value in filters.items():
l.append(f"{field} = '{value}'")
where_clause = "WHERE " + " AND ".join(l)
diff --git a/superset/examples/countries.py b/superset/examples/countries.py
index 2ea12baae..b40468cef 100644
--- a/superset/examples/countries.py
+++ b/superset/examples/countries.py
@@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
"""This module contains data related to countries and is used for geo mapping"""
+
# pylint: disable=too-many-lines
from typing import Any, Optional
diff --git a/superset/examples/data_loading.py b/superset/examples/data_loading.py
index 7b79adfcf..9cffa06e8 100644
--- a/superset/examples/data_loading.py
+++ b/superset/examples/data_loading.py
@@ -14,7 +14,6 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
-# pylint: disable=unused-import
from .bart_lines import load_bart_lines
from .big_data import load_big_data
from .birth_names import load_birth_names
@@ -33,3 +32,24 @@ from .supported_charts_dashboard import load_supported_charts_dashboard
from .tabbed_dashboard import load_tabbed_dashboard
from .utils import load_examples_from_configs
from .world_bank import load_world_bank_health_n_pop
+
+__all__ = [
+ "load_bart_lines",
+ "load_big_data",
+ "load_birth_names",
+ "load_country_map_data",
+ "load_css_templates",
+ "load_deck_dash",
+ "load_energy",
+ "load_flights",
+ "load_long_lat_data",
+ "load_misc_dashboard",
+ "load_multiformat_time_series",
+ "load_paris_iris_geojson",
+ "load_random_time_series_data",
+ "load_sf_population_polygons",
+ "load_supported_charts_dashboard",
+ "load_tabbed_dashboard",
+ "load_examples_from_configs",
+ "load_world_bank_health_n_pop",
+]
diff --git a/superset/examples/energy.py b/superset/examples/energy.py
index 1f11c0f3f..16d4eea37 100644
--- a/superset/examples/energy.py
+++ b/superset/examples/energy.py
@@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
"""Loads datasets, dashboards and slices in a new superset instance"""
+
import textwrap
import pandas as pd
diff --git a/superset/examples/flights.py b/superset/examples/flights.py
index a42df2023..1e22fed46 100644
--- a/superset/examples/flights.py
+++ b/superset/examples/flights.py
@@ -42,9 +42,7 @@ def load_flights(only_metadata: bool = False, force: bool = False) -> None:
pdf[ # pylint: disable=unsupported-assignment-operation,useless-suppression
"ds"
- ] = (
- pdf.YEAR.map(str) + "-0" + pdf.MONTH.map(str) + "-0" + pdf.DAY.map(str)
- )
+ ] = pdf.YEAR.map(str) + "-0" + pdf.MONTH.map(str) + "-0" + pdf.DAY.map(str)
pdf.ds = pd.to_datetime(pdf.ds)
pdf.drop(columns=["DAY", "MONTH", "YEAR"])
pdf = pdf.join(airports, on="ORIGIN_AIRPORT", rsuffix="_ORIG")
diff --git a/superset/examples/helpers.py b/superset/examples/helpers.py
index 9f893f1cc..76310b404 100644
--- a/superset/examples/helpers.py
+++ b/superset/examples/helpers.py
@@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
"""Loads datasets, dashboards and slices in a new superset instance"""
+
import json
import os
from typing import Any
diff --git a/superset/examples/tabbed_dashboard.py b/superset/examples/tabbed_dashboard.py
index b05726334..cc735ec4e 100644
--- a/superset/examples/tabbed_dashboard.py
+++ b/superset/examples/tabbed_dashboard.py
@@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
"""Loads datasets, dashboards and slices in a new superset instance"""
+
import json
import textwrap
diff --git a/superset/examples/world_bank.py b/superset/examples/world_bank.py
index 74ea2c43a..c98c1fc11 100644
--- a/superset/examples/world_bank.py
+++ b/superset/examples/world_bank.py
@@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
"""Loads datasets, dashboards and slices in a new superset instance"""
+
import json
import os
diff --git a/superset/extensions/metadb.py b/superset/extensions/metadb.py
index ea6ce118c..0d33ac97e 100644
--- a/superset/extensions/metadb.py
+++ b/superset/extensions/metadb.py
@@ -72,7 +72,6 @@ from superset import db, feature_flag_manager, security_manager, sql_parse
# pylint: disable=abstract-method
class SupersetAPSWDialect(APSWDialect):
-
"""
A SQLAlchemy dialect for an internal Superset engine.
@@ -187,7 +186,6 @@ class FallbackField(Field[Any, str]):
# pylint: disable=too-many-instance-attributes
class SupersetShillelaghAdapter(Adapter):
-
"""
A Shillelagh adapter for Superset tables.
diff --git a/superset/forms.py b/superset/forms.py
index f1e220ba9..a808da74b 100644
--- a/superset/forms.py
+++ b/superset/forms.py
@@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
"""Contains the logic to create cohesive forms on the explore view"""
+
import json
from typing import Any, Optional
diff --git a/superset/jinja_context.py b/superset/jinja_context.py
index 23949cca1..53325b7a4 100644
--- a/superset/jinja_context.py
+++ b/superset/jinja_context.py
@@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
"""Defines the templating context for SQL Lab"""
+
import json
import re
from datetime import datetime
diff --git a/superset/key_value/types.py b/superset/key_value/types.py
index f5c846799..7b0130c0e 100644
--- a/superset/key_value/types.py
+++ b/superset/key_value/types.py
@@ -59,12 +59,10 @@ class SharedKey(StrEnum):
class KeyValueCodec(ABC):
@abstractmethod
- def encode(self, value: Any) -> bytes:
- ...
+ def encode(self, value: Any) -> bytes: ...
@abstractmethod
- def decode(self, value: bytes) -> Any:
- ...
+ def decode(self, value: bytes) -> Any: ...
class JsonKeyValueCodec(KeyValueCodec):
diff --git a/superset/legacy.py b/superset/legacy.py
index 1a4cd10d7..06fc52758 100644
--- a/superset/legacy.py
+++ b/superset/legacy.py
@@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
"""Code related with dealing with legacy / change management"""
+
from typing import Any
diff --git a/superset/migrations/migration_utils.py b/superset/migrations/migration_utils.py
index c754669a1..99229a102 100644
--- a/superset/migrations/migration_utils.py
+++ b/superset/migrations/migration_utils.py
@@ -15,7 +15,7 @@
# specific language governing permissions and limitations
# under the License.
-from alembic.operations import BatchOperations, Operations
+from alembic.operations import Operations
naming_convention = {
"fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
diff --git a/superset/migrations/shared/migrate_viz/__init__.py b/superset/migrations/shared/migrate_viz/__init__.py
index aaa860e73..166cae398 100644
--- a/superset/migrations/shared/migrate_viz/__init__.py
+++ b/superset/migrations/shared/migrate_viz/__init__.py
@@ -14,4 +14,4 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
-from .processors import *
+from .processors import * # noqa: F403
diff --git a/superset/migrations/shared/migrate_viz/base.py b/superset/migrations/shared/migrate_viz/base.py
index 51699b9d0..83d27cdd4 100644
--- a/superset/migrations/shared/migrate_viz/base.py
+++ b/superset/migrations/shared/migrate_viz/base.py
@@ -24,7 +24,7 @@ from sqlalchemy import and_, Column, Integer, String, Text
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import Session
-from superset import conf, is_feature_enabled
+from superset import conf
from superset.constants import TimeGrain
from superset.migrations.shared.utils import paginated_update, try_load_json
diff --git a/superset/migrations/shared/utils.py b/superset/migrations/shared/utils.py
index 2ae0dfeac..44d4c3924 100644
--- a/superset/migrations/shared/utils.py
+++ b/superset/migrations/shared/utils.py
@@ -23,10 +23,9 @@ from typing import Any, Callable, Optional, Union
from uuid import uuid4
from alembic import op
-from sqlalchemy import engine_from_config, inspect
+from sqlalchemy import inspect
from sqlalchemy.dialects.mysql.base import MySQLDialect
from sqlalchemy.dialects.postgresql.base import PGDialect
-from sqlalchemy.engine import reflection
from sqlalchemy.exc import NoSuchTableError
from sqlalchemy.orm import Query, Session
@@ -106,7 +105,7 @@ def paginated_update(
result = session.execute(query)
if print_page_progress is None or print_page_progress is True:
- print_page_progress = lambda processed, total: print(
+ print_page_progress = lambda processed, total: print( # noqa: E731
f" {processed}/{total}", end="\r"
)
diff --git a/superset/migrations/versions/2015-09-21_17-30_4e6a06bad7a8_init.py b/superset/migrations/versions/2015-09-21_17-30_4e6a06bad7a8_init.py
index 31d46b879..cd0f74eaf 100644
--- a/superset/migrations/versions/2015-09-21_17-30_4e6a06bad7a8_init.py
+++ b/superset/migrations/versions/2015-09-21_17-30_4e6a06bad7a8_init.py
@@ -26,8 +26,8 @@ Create Date: 2015-09-21 17:30:38.442998
revision = "4e6a06bad7a8"
down_revision = None
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2015-10-05_10-32_5a7bad26f2a7_.py b/superset/migrations/versions/2015-10-05_10-32_5a7bad26f2a7_.py
index 36bb567b1..260eec998 100644
--- a/superset/migrations/versions/2015-10-05_10-32_5a7bad26f2a7_.py
+++ b/superset/migrations/versions/2015-10-05_10-32_5a7bad26f2a7_.py
@@ -26,8 +26,8 @@ Create Date: 2015-10-05 10:32:15.850753
revision = "5a7bad26f2a7"
down_revision = "4e6a06bad7a8"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2015-10-05_22-11_1e2841a4128_.py b/superset/migrations/versions/2015-10-05_22-11_1e2841a4128_.py
index 5bc8f8126..356c5ee35 100644
--- a/superset/migrations/versions/2015-10-05_22-11_1e2841a4128_.py
+++ b/superset/migrations/versions/2015-10-05_22-11_1e2841a4128_.py
@@ -26,8 +26,8 @@ Create Date: 2015-10-05 22:11:00.537054
revision = "1e2841a4128"
down_revision = "5a7bad26f2a7"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2015-10-19_20-54_2929af7925ed_tz_offsets_in_data_sources.py b/superset/migrations/versions/2015-10-19_20-54_2929af7925ed_tz_offsets_in_data_sources.py
index 8ddd23da4..839d6dbd8 100644
--- a/superset/migrations/versions/2015-10-19_20-54_2929af7925ed_tz_offsets_in_data_sources.py
+++ b/superset/migrations/versions/2015-10-19_20-54_2929af7925ed_tz_offsets_in_data_sources.py
@@ -26,8 +26,8 @@ Create Date: 2015-10-19 20:54:00.565633
revision = "2929af7925ed"
down_revision = "1e2841a4128"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2015-12-04_09-42_1a48a5411020_adding_slug_to_dash.py b/superset/migrations/versions/2015-12-04_09-42_1a48a5411020_adding_slug_to_dash.py
index d17a7e62c..1bee2ed14 100644
--- a/superset/migrations/versions/2015-12-04_09-42_1a48a5411020_adding_slug_to_dash.py
+++ b/superset/migrations/versions/2015-12-04_09-42_1a48a5411020_adding_slug_to_dash.py
@@ -26,15 +26,15 @@ Create Date: 2015-12-04 09:42:16.973264
revision = "1a48a5411020"
down_revision = "289ce07647b"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
op.add_column("dashboards", sa.Column("slug", sa.String(length=255), nullable=True))
try:
op.create_unique_constraint("idx_unique_slug", "dashboards", ["slug"])
- except:
+ except: # noqa: E722
pass
diff --git a/superset/migrations/versions/2015-12-04_11-16_315b3f4da9b0_adding_log_model.py b/superset/migrations/versions/2015-12-04_11-16_315b3f4da9b0_adding_log_model.py
index d19c243ac..5f1282139 100644
--- a/superset/migrations/versions/2015-12-04_11-16_315b3f4da9b0_adding_log_model.py
+++ b/superset/migrations/versions/2015-12-04_11-16_315b3f4da9b0_adding_log_model.py
@@ -26,8 +26,8 @@ Create Date: 2015-12-04 11:16:58.226984
revision = "315b3f4da9b0"
down_revision = "1a48a5411020"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2015-12-13_08-38_55179c7f25c7_sqla_descr.py b/superset/migrations/versions/2015-12-13_08-38_55179c7f25c7_sqla_descr.py
index 01e8950bd..d281f0350 100644
--- a/superset/migrations/versions/2015-12-13_08-38_55179c7f25c7_sqla_descr.py
+++ b/superset/migrations/versions/2015-12-13_08-38_55179c7f25c7_sqla_descr.py
@@ -26,8 +26,8 @@ Create Date: 2015-12-13 08:38:43.704145
revision = "55179c7f25c7"
down_revision = "315b3f4da9b0"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2015-12-14_13-37_12d55656cbca_is_featured.py b/superset/migrations/versions/2015-12-14_13-37_12d55656cbca_is_featured.py
index 229e918cb..c59c90bd6 100644
--- a/superset/migrations/versions/2015-12-14_13-37_12d55656cbca_is_featured.py
+++ b/superset/migrations/versions/2015-12-14_13-37_12d55656cbca_is_featured.py
@@ -26,8 +26,8 @@ Create Date: 2015-12-14 13:37:17.374852
revision = "12d55656cbca"
down_revision = "55179c7f25c7"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2015-12-15_17-02_2591d77e9831_user_id.py b/superset/migrations/versions/2015-12-15_17-02_2591d77e9831_user_id.py
index e22b50559..3bd4c0ab4 100644
--- a/superset/migrations/versions/2015-12-15_17-02_2591d77e9831_user_id.py
+++ b/superset/migrations/versions/2015-12-15_17-02_2591d77e9831_user_id.py
@@ -26,8 +26,8 @@ Create Date: 2015-12-15 17:02:45.128709
revision = "2591d77e9831"
down_revision = "12d55656cbca"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2016-01-13_20-24_8e80a26a31db_.py b/superset/migrations/versions/2016-01-13_20-24_8e80a26a31db_.py
index 2f3db103f..e0f85c3a5 100644
--- a/superset/migrations/versions/2016-01-13_20-24_8e80a26a31db_.py
+++ b/superset/migrations/versions/2016-01-13_20-24_8e80a26a31db_.py
@@ -21,12 +21,13 @@ Revises: 2591d77e9831
Create Date: 2016-01-13 20:24:45.256437
"""
+
# revision identifiers, used by Alembic.
revision = "8e80a26a31db"
down_revision = "2591d77e9831"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2016-01-17_22-00_7dbf98566af7_slice_description.py b/superset/migrations/versions/2016-01-17_22-00_7dbf98566af7_slice_description.py
index eb926ccd7..96907c314 100644
--- a/superset/migrations/versions/2016-01-17_22-00_7dbf98566af7_slice_description.py
+++ b/superset/migrations/versions/2016-01-17_22-00_7dbf98566af7_slice_description.py
@@ -26,8 +26,8 @@ Create Date: 2016-01-17 22:00:23.640788
revision = "7dbf98566af7"
down_revision = "8e80a26a31db"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2016-01-18_23-43_43df8de3a5f4_dash_json.py b/superset/migrations/versions/2016-01-18_23-43_43df8de3a5f4_dash_json.py
index 9bbfde91a..7781de9d3 100644
--- a/superset/migrations/versions/2016-01-18_23-43_43df8de3a5f4_dash_json.py
+++ b/superset/migrations/versions/2016-01-18_23-43_43df8de3a5f4_dash_json.py
@@ -26,8 +26,8 @@ Create Date: 2016-01-18 23:43:16.073483
revision = "43df8de3a5f4"
down_revision = "7dbf98566af7"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2016-02-03_17-41_d827694c7555_css_templates.py b/superset/migrations/versions/2016-02-03_17-41_d827694c7555_css_templates.py
index 57a287eb7..f65582563 100644
--- a/superset/migrations/versions/2016-02-03_17-41_d827694c7555_css_templates.py
+++ b/superset/migrations/versions/2016-02-03_17-41_d827694c7555_css_templates.py
@@ -26,8 +26,8 @@ Create Date: 2016-02-03 17:41:10.944019
revision = "d827694c7555"
down_revision = "43df8de3a5f4"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2016-02-10_08-47_430039611635_log_more.py b/superset/migrations/versions/2016-02-10_08-47_430039611635_log_more.py
index 50a60dc91..e7ca98a23 100644
--- a/superset/migrations/versions/2016-02-10_08-47_430039611635_log_more.py
+++ b/superset/migrations/versions/2016-02-10_08-47_430039611635_log_more.py
@@ -21,6 +21,7 @@ Revises: d827694c7555
Create Date: 2016-02-10 08:47:28.950891
"""
+
import sqlalchemy as sa
from alembic import op
diff --git a/superset/migrations/versions/2016-03-13_09-56_a2d606a761d9_adding_favstar_model.py b/superset/migrations/versions/2016-03-13_09-56_a2d606a761d9_adding_favstar_model.py
index d9253636b..b2ba06749 100644
--- a/superset/migrations/versions/2016-03-13_09-56_a2d606a761d9_adding_favstar_model.py
+++ b/superset/migrations/versions/2016-03-13_09-56_a2d606a761d9_adding_favstar_model.py
@@ -26,8 +26,8 @@ Create Date: 2016-03-13 09:56:58.329512
revision = "a2d606a761d9"
down_revision = "18e88e1cc004"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2016-03-13_21-30_18e88e1cc004_making_audit_nullable.py b/superset/migrations/versions/2016-03-13_21-30_18e88e1cc004_making_audit_nullable.py
index a39b6bc82..4cd834b88 100644
--- a/superset/migrations/versions/2016-03-13_21-30_18e88e1cc004_making_audit_nullable.py
+++ b/superset/migrations/versions/2016-03-13_21-30_18e88e1cc004_making_audit_nullable.py
@@ -21,6 +21,7 @@ Revises: 430039611635
Create Date: 2016-03-13 21:30:24.833107
"""
+
import sqlalchemy as sa
from alembic import op
diff --git a/superset/migrations/versions/2016-03-17_08-40_836c0bf75904_cache_timeouts.py b/superset/migrations/versions/2016-03-17_08-40_836c0bf75904_cache_timeouts.py
index e4b07af37..7211cba1c 100644
--- a/superset/migrations/versions/2016-03-17_08-40_836c0bf75904_cache_timeouts.py
+++ b/superset/migrations/versions/2016-03-17_08-40_836c0bf75904_cache_timeouts.py
@@ -21,12 +21,13 @@ Revises: 18e88e1cc004
Create Date: 2016-03-17 08:40:03.186534
"""
+
# revision identifiers, used by Alembic.
revision = "836c0bf75904"
down_revision = "18e88e1cc004"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2016-03-24_14-13_763d4b211ec9_fixing_audit_fk.py b/superset/migrations/versions/2016-03-24_14-13_763d4b211ec9_fixing_audit_fk.py
index fdb7b1407..94521d556 100644
--- a/superset/migrations/versions/2016-03-24_14-13_763d4b211ec9_fixing_audit_fk.py
+++ b/superset/migrations/versions/2016-03-24_14-13_763d4b211ec9_fixing_audit_fk.py
@@ -26,8 +26,8 @@ Create Date: 2016-03-24 14:13:44.817723
revision = "763d4b211ec9"
down_revision = "d2424a248d63"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
@@ -96,7 +96,7 @@ def upgrade():
op.alter_column("url", "created_on", existing_type=sa.DATETIME(), nullable=True)
op.create_foreign_key(None, "metrics", "ab_user", ["changed_by_fk"], ["id"])
op.create_foreign_key(None, "metrics", "ab_user", ["created_by_fk"], ["id"])
- except:
+ except: # noqa: E722
pass
@@ -174,5 +174,5 @@ def downgrade():
op.alter_column(
"columns", "changed_on", existing_type=sa.DATETIME(), nullable=False
)
- except:
+ except: # noqa: E722
pass
diff --git a/superset/migrations/versions/2016-03-25_14-35_1d2ddd543133_log_dt.py b/superset/migrations/versions/2016-03-25_14-35_1d2ddd543133_log_dt.py
index b6d8492a1..1a1cb4403 100644
--- a/superset/migrations/versions/2016-03-25_14-35_1d2ddd543133_log_dt.py
+++ b/superset/migrations/versions/2016-03-25_14-35_1d2ddd543133_log_dt.py
@@ -21,12 +21,13 @@ Revises: d2424a248d63
Create Date: 2016-03-25 14:35:44.642576
"""
+
# revision identifiers, used by Alembic.
revision = "1d2ddd543133"
down_revision = "d2424a248d63"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2016-04-03_15-23_867bf4f117f9_adding_extra_field_to_database_model.py b/superset/migrations/versions/2016-04-03_15-23_867bf4f117f9_adding_extra_field_to_database_model.py
index c4497ce28..ab6de980d 100644
--- a/superset/migrations/versions/2016-04-03_15-23_867bf4f117f9_adding_extra_field_to_database_model.py
+++ b/superset/migrations/versions/2016-04-03_15-23_867bf4f117f9_adding_extra_field_to_database_model.py
@@ -21,12 +21,13 @@ Revises: fee7b758c130
Create Date: 2016-04-03 15:23:20.280841
"""
+
# revision identifiers, used by Alembic.
revision = "867bf4f117f9"
down_revision = "fee7b758c130"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2016-04-11_22-41_bb51420eaf83_add_schema_to_table_model.py b/superset/migrations/versions/2016-04-11_22-41_bb51420eaf83_add_schema_to_table_model.py
index 7598cd64e..37dbdeae1 100644
--- a/superset/migrations/versions/2016-04-11_22-41_bb51420eaf83_add_schema_to_table_model.py
+++ b/superset/migrations/versions/2016-04-11_22-41_bb51420eaf83_add_schema_to_table_model.py
@@ -26,8 +26,8 @@ Create Date: 2016-04-11 22:41:06.185955
revision = "bb51420eaf83"
down_revision = "867bf4f117f9"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2016-04-15_08-31_b4456560d4f3_change_table_unique_constraint.py b/superset/migrations/versions/2016-04-15_08-31_b4456560d4f3_change_table_unique_constraint.py
index 5ce049f8e..429c44757 100644
--- a/superset/migrations/versions/2016-04-15_08-31_b4456560d4f3_change_table_unique_constraint.py
+++ b/superset/migrations/versions/2016-04-15_08-31_b4456560d4f3_change_table_unique_constraint.py
@@ -21,6 +21,7 @@ Revises: bb51420eaf83
Create Date: 2016-04-15 08:31:26.249591
"""
+
from alembic import op
# revision identifiers, used by Alembic.
diff --git a/superset/migrations/versions/2016-04-15_17-58_4fa88fe24e94_owners_many_to_many.py b/superset/migrations/versions/2016-04-15_17-58_4fa88fe24e94_owners_many_to_many.py
index b04cfefa6..bbb015775 100644
--- a/superset/migrations/versions/2016-04-15_17-58_4fa88fe24e94_owners_many_to_many.py
+++ b/superset/migrations/versions/2016-04-15_17-58_4fa88fe24e94_owners_many_to_many.py
@@ -21,12 +21,13 @@ Revises: b4456560d4f3
Create Date: 2016-04-15 17:58:33.842012
"""
+
# revision identifiers, used by Alembic.
revision = "4fa88fe24e94"
down_revision = "b4456560d4f3"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2016-04-25_08-54_c3a8f8611885_materializing_permission.py b/superset/migrations/versions/2016-04-25_08-54_c3a8f8611885_materializing_permission.py
index c3d04e875..0231fb1e2 100644
--- a/superset/migrations/versions/2016-04-25_08-54_c3a8f8611885_materializing_permission.py
+++ b/superset/migrations/versions/2016-04-25_08-54_c3a8f8611885_materializing_permission.py
@@ -21,6 +21,7 @@ Revises: 4fa88fe24e94
Create Date: 2016-04-25 08:54:04.303859
"""
+
import sqlalchemy as sa
from alembic import op
from sqlalchemy import Column, ForeignKey, Integer, String
diff --git a/superset/migrations/versions/2016-05-01_12-21_f0fbf6129e13_adding_verbose_name_to_tablecolumn.py b/superset/migrations/versions/2016-05-01_12-21_f0fbf6129e13_adding_verbose_name_to_tablecolumn.py
index 346b0af09..17cb0d1c1 100644
--- a/superset/migrations/versions/2016-05-01_12-21_f0fbf6129e13_adding_verbose_name_to_tablecolumn.py
+++ b/superset/migrations/versions/2016-05-01_12-21_f0fbf6129e13_adding_verbose_name_to_tablecolumn.py
@@ -26,8 +26,8 @@ Create Date: 2016-05-01 12:21:18.331191
revision = "f0fbf6129e13"
down_revision = "c3a8f8611885"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2016-05-11_17-28_956a063c52b3_adjusting_key_length.py b/superset/migrations/versions/2016-05-11_17-28_956a063c52b3_adjusting_key_length.py
index 4e945ef0b..41dfdb212 100644
--- a/superset/migrations/versions/2016-05-11_17-28_956a063c52b3_adjusting_key_length.py
+++ b/superset/migrations/versions/2016-05-11_17-28_956a063c52b3_adjusting_key_length.py
@@ -21,6 +21,7 @@ Revises: f0fbf6129e13
Create Date: 2016-05-11 17:28:32.407340
"""
+
import sqlalchemy as sa
from alembic import op
diff --git a/superset/migrations/versions/2016-05-27_15-03_1226819ee0e3_fix_wrong_constraint_on_table_columns.py b/superset/migrations/versions/2016-05-27_15-03_1226819ee0e3_fix_wrong_constraint_on_table_columns.py
index 9f09c8991..9eda0bf89 100644
--- a/superset/migrations/versions/2016-05-27_15-03_1226819ee0e3_fix_wrong_constraint_on_table_columns.py
+++ b/superset/migrations/versions/2016-05-27_15-03_1226819ee0e3_fix_wrong_constraint_on_table_columns.py
@@ -21,6 +21,7 @@ Revises: 956a063c52b3
Create Date: 2016-05-27 15:03:32.980343
"""
+
import logging
from alembic import op
@@ -59,7 +60,7 @@ def upgrade():
["datasource_name"],
["datasource_name"],
)
- except:
+ except: # noqa: E722
logging.warning("Could not find or drop constraint on `columns`")
diff --git a/superset/migrations/versions/2016-06-07_12-33_d8bc074f7aad_add_new_field_is_restricted_to_.py b/superset/migrations/versions/2016-06-07_12-33_d8bc074f7aad_add_new_field_is_restricted_to_.py
index d75655602..def84c9a9 100644
--- a/superset/migrations/versions/2016-06-07_12-33_d8bc074f7aad_add_new_field_is_restricted_to_.py
+++ b/superset/migrations/versions/2016-06-07_12-33_d8bc074f7aad_add_new_field_is_restricted_to_.py
@@ -26,12 +26,12 @@ Create Date: 2016-06-07 12:33:25.756640
revision = "d8bc074f7aad"
down_revision = "1226819ee0e3"
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy import Boolean, Column, Integer
-from sqlalchemy.ext.declarative import declarative_base
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
+from sqlalchemy import Boolean, Column, Integer # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
+from superset import db # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2016-06-16_14-15_960c69cb1f5b_.py b/superset/migrations/versions/2016-06-16_14-15_960c69cb1f5b_.py
index 6ff1fc9f3..f1acb5e74 100644
--- a/superset/migrations/versions/2016-06-16_14-15_960c69cb1f5b_.py
+++ b/superset/migrations/versions/2016-06-16_14-15_960c69cb1f5b_.py
@@ -26,8 +26,8 @@ Create Date: 2016-06-16 14:15:19.573183
revision = "960c69cb1f5b"
down_revision = "27ae655e4247"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2016-06-27_08-43_27ae655e4247_make_creator_owners.py b/superset/migrations/versions/2016-06-27_08-43_27ae655e4247_make_creator_owners.py
index 1bf5e2eca..76a823041 100644
--- a/superset/migrations/versions/2016-06-27_08-43_27ae655e4247_make_creator_owners.py
+++ b/superset/migrations/versions/2016-06-27_08-43_27ae655e4247_make_creator_owners.py
@@ -26,15 +26,13 @@ Create Date: 2016-06-27 08:43:52.592242
revision = "27ae655e4247"
down_revision = "d8bc074f7aad"
-from alembic import op
-from flask import g
-from flask_appbuilder import Model
-from sqlalchemy import Column, ForeignKey, Integer, Table
-from sqlalchemy.ext.declarative import declarative_base, declared_attr
-from sqlalchemy.orm import relationship
+from alembic import op # noqa: E402
+from sqlalchemy import Column, ForeignKey, Integer, Table # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base, declared_attr # noqa: E402
+from sqlalchemy.orm import relationship # noqa: E402
-from superset import db
-from superset.utils.core import get_user_id
+from superset import db # noqa: E402
+from superset.utils.core import get_user_id # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2016-07-06_22-04_f162a1dea4c4_d3format_by_metric.py b/superset/migrations/versions/2016-07-06_22-04_f162a1dea4c4_d3format_by_metric.py
index a3760b1e9..fdece59e0 100644
--- a/superset/migrations/versions/2016-07-06_22-04_f162a1dea4c4_d3format_by_metric.py
+++ b/superset/migrations/versions/2016-07-06_22-04_f162a1dea4c4_d3format_by_metric.py
@@ -26,8 +26,8 @@ Create Date: 2016-07-06 22:04:28.685100
revision = "f162a1dea4c4"
down_revision = "960c69cb1f5b"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2016-07-25_17-48_ad82a75afd82_add_query_model.py b/superset/migrations/versions/2016-07-25_17-48_ad82a75afd82_add_query_model.py
index 26d190a87..c62ac0c41 100644
--- a/superset/migrations/versions/2016-07-25_17-48_ad82a75afd82_add_query_model.py
+++ b/superset/migrations/versions/2016-07-25_17-48_ad82a75afd82_add_query_model.py
@@ -26,8 +26,8 @@ Create Date: 2016-07-25 17:48:12.771103
revision = "ad82a75afd82"
down_revision = "f162a1dea4c4"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2016-08-18_14-06_3c3ffe173e4f_add_sql_string_to_table.py b/superset/migrations/versions/2016-08-18_14-06_3c3ffe173e4f_add_sql_string_to_table.py
index e94de6147..e8274c6d3 100644
--- a/superset/migrations/versions/2016-08-18_14-06_3c3ffe173e4f_add_sql_string_to_table.py
+++ b/superset/migrations/versions/2016-08-18_14-06_3c3ffe173e4f_add_sql_string_to_table.py
@@ -26,8 +26,8 @@ Create Date: 2016-08-18 14:06:28.784699
revision = "3c3ffe173e4f"
down_revision = "ad82a75afd82"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2016-08-31_10-26_41f6a59a61f2_database_options_for_sql_lab.py b/superset/migrations/versions/2016-08-31_10-26_41f6a59a61f2_database_options_for_sql_lab.py
index 9d47815e9..c76bc84d7 100644
--- a/superset/migrations/versions/2016-08-31_10-26_41f6a59a61f2_database_options_for_sql_lab.py
+++ b/superset/migrations/versions/2016-08-31_10-26_41f6a59a61f2_database_options_for_sql_lab.py
@@ -21,6 +21,7 @@ Revises: 3c3ffe173e4f
Create Date: 2016-08-31 10:26:37.969107
"""
+
import sqlalchemy as sa
from alembic import op
diff --git a/superset/migrations/versions/2016-09-07_23-50_33d996bcc382_update_slice_model.py b/superset/migrations/versions/2016-09-07_23-50_33d996bcc382_update_slice_model.py
index 8f4542cb3..01907e958 100644
--- a/superset/migrations/versions/2016-09-07_23-50_33d996bcc382_update_slice_model.py
+++ b/superset/migrations/versions/2016-09-07_23-50_33d996bcc382_update_slice_model.py
@@ -21,6 +21,7 @@ Revises: 41f6a59a61f2
Create Date: 2016-09-07 23:50:59.366779
"""
+
import sqlalchemy as sa
from alembic import op
from sqlalchemy import Column, Integer, String
diff --git a/superset/migrations/versions/2016-09-09_17-39_5e4a03ef0bf0_add_request_access_model.py b/superset/migrations/versions/2016-09-09_17-39_5e4a03ef0bf0_add_request_access_model.py
index 11d77f51a..cfba54e8b 100644
--- a/superset/migrations/versions/2016-09-09_17-39_5e4a03ef0bf0_add_request_access_model.py
+++ b/superset/migrations/versions/2016-09-09_17-39_5e4a03ef0bf0_add_request_access_model.py
@@ -21,6 +21,7 @@ Revises: 41f6a59a61f2
Create Date: 2016-09-09 17:39:57.846309
"""
+
import sqlalchemy as sa
from alembic import op
diff --git a/superset/migrations/versions/2016-09-12_23-33_4500485bde7d_allow_run_sync_async.py b/superset/migrations/versions/2016-09-12_23-33_4500485bde7d_allow_run_sync_async.py
index 338750255..a1e71a554 100644
--- a/superset/migrations/versions/2016-09-12_23-33_4500485bde7d_allow_run_sync_async.py
+++ b/superset/migrations/versions/2016-09-12_23-33_4500485bde7d_allow_run_sync_async.py
@@ -26,8 +26,8 @@ Create Date: 2016-09-12 23:33:14.789632
revision = "4500485bde7d"
down_revision = "41f6a59a61f2"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2016-09-19_17-22_b347b202819b_.py b/superset/migrations/versions/2016-09-19_17-22_b347b202819b_.py
index 78cf56fc6..5948e45c8 100644
--- a/superset/migrations/versions/2016-09-19_17-22_b347b202819b_.py
+++ b/superset/migrations/versions/2016-09-19_17-22_b347b202819b_.py
@@ -21,6 +21,7 @@ Revises: ('33d996bcc382', '65903709c321')
Create Date: 2016-09-19 17:22:40.138601
"""
+
# revision identifiers, used by Alembic.
revision = "b347b202819b"
down_revision = ("33d996bcc382", "65903709c321")
diff --git a/superset/migrations/versions/2016-09-22_10-21_3b626e2a6783_sync_db_with_models.py b/superset/migrations/versions/2016-09-22_10-21_3b626e2a6783_sync_db_with_models.py
index a652439cc..8ba611007 100644
--- a/superset/migrations/versions/2016-09-22_10-21_3b626e2a6783_sync_db_with_models.py
+++ b/superset/migrations/versions/2016-09-22_10-21_3b626e2a6783_sync_db_with_models.py
@@ -24,6 +24,7 @@ Revises: 5e4a03ef0bf0
Create Date: 2016-09-22 10:21:33.618976
"""
+
import logging
import sqlalchemy as sa
diff --git a/superset/migrations/versions/2016-09-22_11-31_eca4694defa7_sqllab_setting_defaults.py b/superset/migrations/versions/2016-09-22_11-31_eca4694defa7_sqllab_setting_defaults.py
index 17f2f9148..b63ebef5d 100644
--- a/superset/migrations/versions/2016-09-22_11-31_eca4694defa7_sqllab_setting_defaults.py
+++ b/superset/migrations/versions/2016-09-22_11-31_eca4694defa7_sqllab_setting_defaults.py
@@ -21,6 +21,7 @@ Revises: 5e4a03ef0bf0
Create Date: 2016-09-22 11:31:50.543820
"""
+
from alembic import op
from sqlalchemy import Boolean, Column, Integer
from sqlalchemy.ext.declarative import declarative_base
@@ -35,7 +36,6 @@ Base = declarative_base()
class Database(Base):
-
"""An ORM object that stores Database related information"""
__tablename__ = "dbs"
diff --git a/superset/migrations/versions/2016-09-30_18-01_ab3d66c4246e_add_cache_timeout_to_druid_cluster.py b/superset/migrations/versions/2016-09-30_18-01_ab3d66c4246e_add_cache_timeout_to_druid_cluster.py
index b2dcc402d..00b9675bf 100644
--- a/superset/migrations/versions/2016-09-30_18-01_ab3d66c4246e_add_cache_timeout_to_druid_cluster.py
+++ b/superset/migrations/versions/2016-09-30_18-01_ab3d66c4246e_add_cache_timeout_to_druid_cluster.py
@@ -26,8 +26,8 @@ Create Date: 2016-09-30 18:01:30.579760
revision = "ab3d66c4246e"
down_revision = "eca4694defa7"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2016-10-02_10-35_ef8843b41dac_.py b/superset/migrations/versions/2016-10-02_10-35_ef8843b41dac_.py
index 5293ef3ac..482e254d1 100644
--- a/superset/migrations/versions/2016-10-02_10-35_ef8843b41dac_.py
+++ b/superset/migrations/versions/2016-10-02_10-35_ef8843b41dac_.py
@@ -21,6 +21,7 @@ Revises: ('3b626e2a6783', 'ab3d66c4246e')
Create Date: 2016-10-02 10:35:38.825231
"""
+
# revision identifiers, used by Alembic.
revision = "ef8843b41dac"
down_revision = ("3b626e2a6783", "ab3d66c4246e")
diff --git a/superset/migrations/versions/2016-10-05_11-30_b46fa1b0b39e_add_params_to_tables.py b/superset/migrations/versions/2016-10-05_11-30_b46fa1b0b39e_add_params_to_tables.py
index 97e58b173..7bd949218 100644
--- a/superset/migrations/versions/2016-10-05_11-30_b46fa1b0b39e_add_params_to_tables.py
+++ b/superset/migrations/versions/2016-10-05_11-30_b46fa1b0b39e_add_params_to_tables.py
@@ -26,10 +26,10 @@ Create Date: 2016-10-05 11:30:31.748238
revision = "b46fa1b0b39e"
down_revision = "ef8843b41dac"
-import logging
+import logging # noqa: E402
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2016-10-14_11-17_7e3ddad2a00b_results_key_to_query.py b/superset/migrations/versions/2016-10-14_11-17_7e3ddad2a00b_results_key_to_query.py
index cc59aafb5..21dc31dc7 100644
--- a/superset/migrations/versions/2016-10-14_11-17_7e3ddad2a00b_results_key_to_query.py
+++ b/superset/migrations/versions/2016-10-14_11-17_7e3ddad2a00b_results_key_to_query.py
@@ -26,8 +26,8 @@ Create Date: 2016-10-14 11:17:54.995156
revision = "7e3ddad2a00b"
down_revision = "b46fa1b0b39e"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2016-10-25_10-16_ad4d656d92bc_add_avg_metric.py b/superset/migrations/versions/2016-10-25_10-16_ad4d656d92bc_add_avg_metric.py
index 2e1a49aaa..47991c6fe 100644
--- a/superset/migrations/versions/2016-10-25_10-16_ad4d656d92bc_add_avg_metric.py
+++ b/superset/migrations/versions/2016-10-25_10-16_ad4d656d92bc_add_avg_metric.py
@@ -26,8 +26,8 @@ Create Date: 2016-10-25 10:16:39.871078
revision = "ad4d656d92bc"
down_revision = "7e3ddad2a00b"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2016-11-02_17-36_c611f2b591b8_dim_spec.py b/superset/migrations/versions/2016-11-02_17-36_c611f2b591b8_dim_spec.py
index faecb4274..8d026ad2c 100644
--- a/superset/migrations/versions/2016-11-02_17-36_c611f2b591b8_dim_spec.py
+++ b/superset/migrations/versions/2016-11-02_17-36_c611f2b591b8_dim_spec.py
@@ -26,8 +26,8 @@ Create Date: 2016-11-02 17:36:04.970448
revision = "c611f2b591b8"
down_revision = "ad4d656d92bc"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2016-11-14_15-23_e46f2d27a08e_materialize_perms.py b/superset/migrations/versions/2016-11-14_15-23_e46f2d27a08e_materialize_perms.py
index 99774966a..628f2ffcd 100644
--- a/superset/migrations/versions/2016-11-14_15-23_e46f2d27a08e_materialize_perms.py
+++ b/superset/migrations/versions/2016-11-14_15-23_e46f2d27a08e_materialize_perms.py
@@ -21,12 +21,13 @@ Revises: c611f2b591b8
Create Date: 2016-11-14 15:23:32.594898
"""
+
# revision identifiers, used by Alembic.
revision = "e46f2d27a08e"
down_revision = "c611f2b591b8"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2016-11-23_10-27_f1f2d4af5b90_.py b/superset/migrations/versions/2016-11-23_10-27_f1f2d4af5b90_.py
index f84d87273..1393b3b03 100644
--- a/superset/migrations/versions/2016-11-23_10-27_f1f2d4af5b90_.py
+++ b/superset/migrations/versions/2016-11-23_10-27_f1f2d4af5b90_.py
@@ -26,8 +26,8 @@ Create Date: 2016-11-23 10:27:18.517919
revision = "f1f2d4af5b90"
down_revision = "e46f2d27a08e"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2016-12-06_17-40_1296d28ec131_druid_exports.py b/superset/migrations/versions/2016-12-06_17-40_1296d28ec131_druid_exports.py
index 1aa34f42f..436605896 100644
--- a/superset/migrations/versions/2016-12-06_17-40_1296d28ec131_druid_exports.py
+++ b/superset/migrations/versions/2016-12-06_17-40_1296d28ec131_druid_exports.py
@@ -26,8 +26,8 @@ Create Date: 2016-12-06 17:40:40.389652
revision = "1296d28ec131"
down_revision = "6414e83d82b7"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2016-12-13_16-19_525c854f0005_log_this_plus.py b/superset/migrations/versions/2016-12-13_16-19_525c854f0005_log_this_plus.py
index 354765072..d0d5bd7d7 100644
--- a/superset/migrations/versions/2016-12-13_16-19_525c854f0005_log_this_plus.py
+++ b/superset/migrations/versions/2016-12-13_16-19_525c854f0005_log_this_plus.py
@@ -26,8 +26,8 @@ Create Date: 2016-12-13 16:19:02.239322
revision = "525c854f0005"
down_revision = "e46f2d27a08e"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2016-12-19_09-57_6414e83d82b7_.py b/superset/migrations/versions/2016-12-19_09-57_6414e83d82b7_.py
index 22688add4..6830c1853 100644
--- a/superset/migrations/versions/2016-12-19_09-57_6414e83d82b7_.py
+++ b/superset/migrations/versions/2016-12-19_09-57_6414e83d82b7_.py
@@ -21,13 +21,11 @@ Revises: ('525c854f0005', 'f1f2d4af5b90')
Create Date: 2016-12-19 09:57:05.814013
"""
+
# revision identifiers, used by Alembic.
revision = "6414e83d82b7"
down_revision = ("525c854f0005", "f1f2d4af5b90")
-import sqlalchemy as sa
-from alembic import op
-
def upgrade():
pass
diff --git a/superset/migrations/versions/2017-01-10_11-47_bcf3126872fc_add_keyvalue.py b/superset/migrations/versions/2017-01-10_11-47_bcf3126872fc_add_keyvalue.py
index 52e62659e..e40973890 100644
--- a/superset/migrations/versions/2017-01-10_11-47_bcf3126872fc_add_keyvalue.py
+++ b/superset/migrations/versions/2017-01-10_11-47_bcf3126872fc_add_keyvalue.py
@@ -26,8 +26,8 @@ Create Date: 2017-01-10 11:47:56.306938
revision = "bcf3126872fc"
down_revision = "f18570e03440"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2017-01-24_12-31_db0c65b146bd_update_slice_model_json.py b/superset/migrations/versions/2017-01-24_12-31_db0c65b146bd_update_slice_model_json.py
index 0bae8cd9a..4eff93991 100644
--- a/superset/migrations/versions/2017-01-24_12-31_db0c65b146bd_update_slice_model_json.py
+++ b/superset/migrations/versions/2017-01-24_12-31_db0c65b146bd_update_slice_model_json.py
@@ -26,13 +26,13 @@ Create Date: 2017-01-24 12:31:06.541746
revision = "db0c65b146bd"
down_revision = "f18570e03440"
-import json
+import json # noqa: E402
-from alembic import op
-from sqlalchemy import Column, Integer, String, Text
-from sqlalchemy.ext.declarative import declarative_base
+from alembic import op # noqa: E402
+from sqlalchemy import Column, Integer, String, Text # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
+from superset import db # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2017-01-24_12-40_f18570e03440_add_query_result_key_index.py b/superset/migrations/versions/2017-01-24_12-40_f18570e03440_add_query_result_key_index.py
index 9889c752e..98bad0bf6 100644
--- a/superset/migrations/versions/2017-01-24_12-40_f18570e03440_add_query_result_key_index.py
+++ b/superset/migrations/versions/2017-01-24_12-40_f18570e03440_add_query_result_key_index.py
@@ -21,6 +21,7 @@ Revises: 1296d28ec131
Create Date: 2017-01-24 12:40:42.494787
"""
+
from alembic import op
# revision identifiers, used by Alembic.
diff --git a/superset/migrations/versions/2017-02-08_14-16_a99f2f7c195a_rewriting_url_from_shortner_with_new_.py b/superset/migrations/versions/2017-02-08_14-16_a99f2f7c195a_rewriting_url_from_shortner_with_new_.py
index 8dafb77be..5df9a6273 100644
--- a/superset/migrations/versions/2017-02-08_14-16_a99f2f7c195a_rewriting_url_from_shortner_with_new_.py
+++ b/superset/migrations/versions/2017-02-08_14-16_a99f2f7c195a_rewriting_url_from_shortner_with_new_.py
@@ -26,14 +26,14 @@ Create Date: 2017-02-08 14:16:34.948793
revision = "a99f2f7c195a"
down_revision = "db0c65b146bd"
-import json
-from urllib import parse
+import json # noqa: E402
+from urllib import parse # noqa: E402
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy.ext.declarative import declarative_base
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
+from superset import db # noqa: E402
Base = declarative_base()
@@ -41,7 +41,7 @@ Base = declarative_base()
def parse_querystring(qs):
d = {}
for k, v in parse.parse_qsl(qs):
- if not k in d:
+ if k not in d:
d[k] = v
else:
if isinstance(d[k], list):
@@ -52,7 +52,6 @@ def parse_querystring(qs):
class Url(Base):
-
"""Used for the short url feature"""
__tablename__ = "url"
diff --git a/superset/migrations/versions/2017-02-10_17-58_d6db5a5cdb5d_.py b/superset/migrations/versions/2017-02-10_17-58_d6db5a5cdb5d_.py
index e69f03896..62c1a439e 100644
--- a/superset/migrations/versions/2017-02-10_17-58_d6db5a5cdb5d_.py
+++ b/superset/migrations/versions/2017-02-10_17-58_d6db5a5cdb5d_.py
@@ -21,13 +21,11 @@ Revises: ('a99f2f7c195a', 'bcf3126872fc')
Create Date: 2017-02-10 17:58:20.149960
"""
+
# revision identifiers, used by Alembic.
revision = "d6db5a5cdb5d"
down_revision = ("a99f2f7c195a", "bcf3126872fc")
-import sqlalchemy as sa
-from alembic import op
-
def upgrade():
pass
diff --git a/superset/migrations/versions/2017-03-03_09-15_732f1c06bcbf_add_fetch_values_predicate.py b/superset/migrations/versions/2017-03-03_09-15_732f1c06bcbf_add_fetch_values_predicate.py
index 8723d1490..5049809d3 100644
--- a/superset/migrations/versions/2017-03-03_09-15_732f1c06bcbf_add_fetch_values_predicate.py
+++ b/superset/migrations/versions/2017-03-03_09-15_732f1c06bcbf_add_fetch_values_predicate.py
@@ -26,8 +26,8 @@ Create Date: 2017-03-03 09:15:56.800930
revision = "732f1c06bcbf"
down_revision = "d6db5a5cdb5d"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2017-03-08_11-48_b318dfe5fb6c_adding_verbose_name_to_druid_column.py b/superset/migrations/versions/2017-03-08_11-48_b318dfe5fb6c_adding_verbose_name_to_druid_column.py
index 3cd00ae9e..b61ea22d8 100644
--- a/superset/migrations/versions/2017-03-08_11-48_b318dfe5fb6c_adding_verbose_name_to_druid_column.py
+++ b/superset/migrations/versions/2017-03-08_11-48_b318dfe5fb6c_adding_verbose_name_to_druid_column.py
@@ -26,8 +26,8 @@ Create Date: 2017-03-08 11:48:10.835741
revision = "b318dfe5fb6c"
down_revision = "d6db5a5cdb5d"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2017-03-16_14-55_ea033256294a_.py b/superset/migrations/versions/2017-03-16_14-55_ea033256294a_.py
index 9031b8b90..213f3ccf4 100644
--- a/superset/migrations/versions/2017-03-16_14-55_ea033256294a_.py
+++ b/superset/migrations/versions/2017-03-16_14-55_ea033256294a_.py
@@ -26,9 +26,6 @@ Create Date: 2017-03-16 14:55:59.431283
revision = "ea033256294a"
down_revision = ("732f1c06bcbf", "b318dfe5fb6c")
-import sqlalchemy as sa
-from alembic import op
-
def upgrade():
pass
diff --git a/superset/migrations/versions/2017-03-16_18-10_db527d8c4c78_add_db_verbose_name.py b/superset/migrations/versions/2017-03-16_18-10_db527d8c4c78_add_db_verbose_name.py
index 0cb9c94b5..5df45650a 100644
--- a/superset/migrations/versions/2017-03-16_18-10_db527d8c4c78_add_db_verbose_name.py
+++ b/superset/migrations/versions/2017-03-16_18-10_db527d8c4c78_add_db_verbose_name.py
@@ -26,10 +26,10 @@ Create Date: 2017-03-16 18:10:57.193035
revision = "db527d8c4c78"
down_revision = "b318dfe5fb6c"
-import logging
+import logging # noqa: E402
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2017-03-21_15-41_979c03af3341_.py b/superset/migrations/versions/2017-03-21_15-41_979c03af3341_.py
index d97ba8dcb..93efa8f1a 100644
--- a/superset/migrations/versions/2017-03-21_15-41_979c03af3341_.py
+++ b/superset/migrations/versions/2017-03-21_15-41_979c03af3341_.py
@@ -26,9 +26,6 @@ Create Date: 2017-03-21 15:41:34.383808
revision = "979c03af3341"
down_revision = ("db527d8c4c78", "ea033256294a")
-import sqlalchemy as sa
-from alembic import op
-
def upgrade():
pass
diff --git a/superset/migrations/versions/2017-03-28_11-28_a6c18f869a4e_query_start_running_time.py b/superset/migrations/versions/2017-03-28_11-28_a6c18f869a4e_query_start_running_time.py
index 661fed8fa..d0f163ab5 100644
--- a/superset/migrations/versions/2017-03-28_11-28_a6c18f869a4e_query_start_running_time.py
+++ b/superset/migrations/versions/2017-03-28_11-28_a6c18f869a4e_query_start_running_time.py
@@ -21,6 +21,7 @@ Revises: 979c03af3341
Create Date: 2017-03-28 11:28:41.387182
"""
+
import sqlalchemy as sa
from alembic import op
diff --git a/superset/migrations/versions/2017-03-29_15-04_2fcdcb35e487_saved_queries.py b/superset/migrations/versions/2017-03-29_15-04_2fcdcb35e487_saved_queries.py
index 581a69dc5..da6f48b7a 100644
--- a/superset/migrations/versions/2017-03-29_15-04_2fcdcb35e487_saved_queries.py
+++ b/superset/migrations/versions/2017-03-29_15-04_2fcdcb35e487_saved_queries.py
@@ -21,6 +21,7 @@ Revises: a6c18f869a4e
Create Date: 2017-03-29 15:04:35.734190
"""
+
import sqlalchemy as sa
from alembic import op
diff --git a/superset/migrations/versions/2017-04-25_10-00_a65458420354_add_result_backend_time_logging.py b/superset/migrations/versions/2017-04-25_10-00_a65458420354_add_result_backend_time_logging.py
index 27872751c..f7b515268 100644
--- a/superset/migrations/versions/2017-04-25_10-00_a65458420354_add_result_backend_time_logging.py
+++ b/superset/migrations/versions/2017-04-25_10-00_a65458420354_add_result_backend_time_logging.py
@@ -21,6 +21,7 @@ Revises: 2fcdcb35e487
Create Date: 2017-04-25 10:00:58.053120
"""
+
import sqlalchemy as sa
from alembic import op
diff --git a/superset/migrations/versions/2017-07-26_20-09_ca69c70ec99b_tracking_url.py b/superset/migrations/versions/2017-07-26_20-09_ca69c70ec99b_tracking_url.py
index f3efb60c5..de261b81e 100644
--- a/superset/migrations/versions/2017-07-26_20-09_ca69c70ec99b_tracking_url.py
+++ b/superset/migrations/versions/2017-07-26_20-09_ca69c70ec99b_tracking_url.py
@@ -26,9 +26,8 @@ Create Date: 2017-07-26 20:09:52.606416
revision = "ca69c70ec99b"
down_revision = "a65458420354"
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy.dialects import mysql
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2017-08-31_17-35_a9c47e2c1547_add_impersonate_user_to_dbs.py b/superset/migrations/versions/2017-08-31_17-35_a9c47e2c1547_add_impersonate_user_to_dbs.py
index f2fc304a0..e0ce55770 100644
--- a/superset/migrations/versions/2017-08-31_17-35_a9c47e2c1547_add_impersonate_user_to_dbs.py
+++ b/superset/migrations/versions/2017-08-31_17-35_a9c47e2c1547_add_impersonate_user_to_dbs.py
@@ -21,12 +21,13 @@ Revises: ca69c70ec99b
Create Date: 2017-08-31 17:35:58.230723
"""
+
# revision identifiers, used by Alembic.
revision = "a9c47e2c1547"
down_revision = "ca69c70ec99b"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2017-09-13_16-36_ddd6ebdd853b_annotations.py b/superset/migrations/versions/2017-09-13_16-36_ddd6ebdd853b_annotations.py
index 3f6ae3f0a..eb0671233 100644
--- a/superset/migrations/versions/2017-09-13_16-36_ddd6ebdd853b_annotations.py
+++ b/superset/migrations/versions/2017-09-13_16-36_ddd6ebdd853b_annotations.py
@@ -21,6 +21,7 @@ Revises: ca69c70ec99b
Create Date: 2017-09-13 16:36:39.144489
"""
+
import sqlalchemy as sa
from alembic import op
diff --git a/superset/migrations/versions/2017-09-15_15-09_19a814813610_adding_metric_warning_text.py b/superset/migrations/versions/2017-09-15_15-09_19a814813610_adding_metric_warning_text.py
index f62db5f5f..826f9f2d3 100644
--- a/superset/migrations/versions/2017-09-15_15-09_19a814813610_adding_metric_warning_text.py
+++ b/superset/migrations/versions/2017-09-15_15-09_19a814813610_adding_metric_warning_text.py
@@ -26,8 +26,8 @@ Create Date: 2017-09-15 15:09:40.495345
revision = "19a814813610"
down_revision = "ca69c70ec99b"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2017-09-19_15-09_d39b1e37131d_.py b/superset/migrations/versions/2017-09-19_15-09_d39b1e37131d_.py
index ee8bba406..b07e51ba5 100644
--- a/superset/migrations/versions/2017-09-19_15-09_d39b1e37131d_.py
+++ b/superset/migrations/versions/2017-09-19_15-09_d39b1e37131d_.py
@@ -26,9 +26,6 @@ Create Date: 2017-09-19 15:09:14.292633
revision = "d39b1e37131d"
down_revision = ("a9c47e2c1547", "ddd6ebdd853b")
-import sqlalchemy as sa
-from alembic import op
-
def upgrade():
pass
diff --git a/superset/migrations/versions/2017-09-21_18-37_472d2f73dfd4_.py b/superset/migrations/versions/2017-09-21_18-37_472d2f73dfd4_.py
index 0bf7ba8bf..d2f888266 100644
--- a/superset/migrations/versions/2017-09-21_18-37_472d2f73dfd4_.py
+++ b/superset/migrations/versions/2017-09-21_18-37_472d2f73dfd4_.py
@@ -26,9 +26,6 @@ Create Date: 2017-09-21 18:37:30.844196
revision = "472d2f73dfd4"
down_revision = ("19a814813610", "a9c47e2c1547")
-import sqlalchemy as sa
-from alembic import op
-
def upgrade():
pass
diff --git a/superset/migrations/versions/2017-09-24_20-18_f959a6652acd_.py b/superset/migrations/versions/2017-09-24_20-18_f959a6652acd_.py
index 12eff47f5..1fddb2a83 100644
--- a/superset/migrations/versions/2017-09-24_20-18_f959a6652acd_.py
+++ b/superset/migrations/versions/2017-09-24_20-18_f959a6652acd_.py
@@ -26,9 +26,6 @@ Create Date: 2017-09-24 20:18:35.791707
revision = "f959a6652acd"
down_revision = ("472d2f73dfd4", "d39b1e37131d")
-import sqlalchemy as sa
-from alembic import op
-
def upgrade():
pass
diff --git a/superset/migrations/versions/2017-12-08_08-19_67a6ac9b727b_update_spatial_params.py b/superset/migrations/versions/2017-12-08_08-19_67a6ac9b727b_update_spatial_params.py
index 81bbb4791..df5c11537 100644
--- a/superset/migrations/versions/2017-12-08_08-19_67a6ac9b727b_update_spatial_params.py
+++ b/superset/migrations/versions/2017-12-08_08-19_67a6ac9b727b_update_spatial_params.py
@@ -21,6 +21,7 @@ Revises: 4736ec66ce19
Create Date: 2017-12-08 08:19:21.148775
"""
+
import json
from alembic import op
diff --git a/superset/migrations/versions/2017-12-17_11-06_21e88bc06c02_annotation_migration.py b/superset/migrations/versions/2017-12-17_11-06_21e88bc06c02_annotation_migration.py
index 785e28239..9c28d6cc2 100644
--- a/superset/migrations/versions/2017-12-17_11-06_21e88bc06c02_annotation_migration.py
+++ b/superset/migrations/versions/2017-12-17_11-06_21e88bc06c02_annotation_migration.py
@@ -21,6 +21,7 @@ Revises: 67a6ac9b727b
Create Date: 2017-12-17 11:06:30.180267
"""
+
import json
from alembic import op
diff --git a/superset/migrations/versions/2018-02-13_08-07_e866bd2d4976_smaller_grid.py b/superset/migrations/versions/2018-02-13_08-07_e866bd2d4976_smaller_grid.py
index 6241ab2a3..3a4a7634e 100644
--- a/superset/migrations/versions/2018-02-13_08-07_e866bd2d4976_smaller_grid.py
+++ b/superset/migrations/versions/2018-02-13_08-07_e866bd2d4976_smaller_grid.py
@@ -20,11 +20,11 @@ Revision ID: e866bd2d4976
Revises: 21e88bc06c02
Create Date: 2018-02-13 08:07:40.766277
"""
+
import json
import sqlalchemy as sa
from alembic import op
-from flask_appbuilder.models.mixins import AuditMixin
from sqlalchemy.ext.declarative import declarative_base
from superset import db
diff --git a/superset/migrations/versions/2018-03-06_12-24_e68c4473c581_allow_multi_schema_metadata_fetch.py b/superset/migrations/versions/2018-03-06_12-24_e68c4473c581_allow_multi_schema_metadata_fetch.py
index 73eb4f95e..cd0eed8b3 100644
--- a/superset/migrations/versions/2018-03-06_12-24_e68c4473c581_allow_multi_schema_metadata_fetch.py
+++ b/superset/migrations/versions/2018-03-06_12-24_e68c4473c581_allow_multi_schema_metadata_fetch.py
@@ -21,6 +21,7 @@ Revises: e866bd2d4976
Create Date: 2018-03-06 12:24:30.896293
"""
+
import sqlalchemy as sa
from alembic import op
diff --git a/superset/migrations/versions/2018-03-20_19-47_f231d82b9b26_.py b/superset/migrations/versions/2018-03-20_19-47_f231d82b9b26_.py
index c73399fb9..a0d283859 100644
--- a/superset/migrations/versions/2018-03-20_19-47_f231d82b9b26_.py
+++ b/superset/migrations/versions/2018-03-20_19-47_f231d82b9b26_.py
@@ -21,6 +21,7 @@ Revises: e68c4473c581
Create Date: 2018-03-20 19:47:54.991259
"""
+
import sqlalchemy as sa
from alembic import op
diff --git a/superset/migrations/versions/2018-03-30_14-00_d6ffdf31bdd4_add_published_column_to_dashboards.py b/superset/migrations/versions/2018-03-30_14-00_d6ffdf31bdd4_add_published_column_to_dashboards.py
index bf6b7e460..2113c6c13 100644
--- a/superset/migrations/versions/2018-03-30_14-00_d6ffdf31bdd4_add_published_column_to_dashboards.py
+++ b/superset/migrations/versions/2018-03-30_14-00_d6ffdf31bdd4_add_published_column_to_dashboards.py
@@ -26,8 +26,8 @@ Create Date: 2018-03-30 14:00:44.929483
revision = "d6ffdf31bdd4"
down_revision = "b4a38aa87893"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2018-04-03_08-19_130915240929_is_sqllab_viz_flow.py b/superset/migrations/versions/2018-04-03_08-19_130915240929_is_sqllab_viz_flow.py
index f615294f3..63e0d1ff2 100644
--- a/superset/migrations/versions/2018-04-03_08-19_130915240929_is_sqllab_viz_flow.py
+++ b/superset/migrations/versions/2018-04-03_08-19_130915240929_is_sqllab_viz_flow.py
@@ -21,6 +21,7 @@ Revises: f231d82b9b26
Create Date: 2018-04-03 08:19:34.098789
"""
+
import sqlalchemy as sa
from alembic import op
from sqlalchemy.ext.declarative import declarative_base
diff --git a/superset/migrations/versions/2018-04-08_07-34_30bb17c0dc76_.py b/superset/migrations/versions/2018-04-08_07-34_30bb17c0dc76_.py
index 6a6426701..811330bf4 100644
--- a/superset/migrations/versions/2018-04-08_07-34_30bb17c0dc76_.py
+++ b/superset/migrations/versions/2018-04-08_07-34_30bb17c0dc76_.py
@@ -26,10 +26,10 @@ Create Date: 2018-04-08 07:34:12.149910
revision = "30bb17c0dc76"
down_revision = "f231d82b9b26"
-from datetime import date
+from datetime import date # noqa: E402
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2018-04-10_11-19_bf706ae5eb46_cal_heatmap_metric_to_metrics.py b/superset/migrations/versions/2018-04-10_11-19_bf706ae5eb46_cal_heatmap_metric_to_metrics.py
index 2aa703cfe..9c660b651 100644
--- a/superset/migrations/versions/2018-04-10_11-19_bf706ae5eb46_cal_heatmap_metric_to_metrics.py
+++ b/superset/migrations/versions/2018-04-10_11-19_bf706ae5eb46_cal_heatmap_metric_to_metrics.py
@@ -21,6 +21,7 @@ Revises: f231d82b9b26
Create Date: 2018-04-10 11:19:47.621878
"""
+
import json
from alembic import op
diff --git a/superset/migrations/versions/2018-04-10_20-46_c9495751e314_.py b/superset/migrations/versions/2018-04-10_20-46_c9495751e314_.py
index 6ae29a510..e43ce3d31 100644
--- a/superset/migrations/versions/2018-04-10_20-46_c9495751e314_.py
+++ b/superset/migrations/versions/2018-04-10_20-46_c9495751e314_.py
@@ -21,13 +21,11 @@ Revises: ('30bb17c0dc76', 'bf706ae5eb46')
Create Date: 2018-04-10 20:46:57.890773
"""
+
# revision identifiers, used by Alembic.
revision = "c9495751e314"
down_revision = ("30bb17c0dc76", "bf706ae5eb46")
-import sqlalchemy as sa
-from alembic import op
-
def upgrade():
pass
diff --git a/superset/migrations/versions/2018-04-12_16-00_5ccf602336a0_.py b/superset/migrations/versions/2018-04-12_16-00_5ccf602336a0_.py
index 464648219..ea300da42 100644
--- a/superset/migrations/versions/2018-04-12_16-00_5ccf602336a0_.py
+++ b/superset/migrations/versions/2018-04-12_16-00_5ccf602336a0_.py
@@ -21,13 +21,11 @@ Revises: ('130915240929', 'c9495751e314')
Create Date: 2018-04-12 16:00:47.639218
"""
+
# revision identifiers, used by Alembic.
revision = "5ccf602336a0"
down_revision = ("130915240929", "c9495751e314")
-import sqlalchemy as sa
-from alembic import op
-
def upgrade():
pass
diff --git a/superset/migrations/versions/2018-05-09_23-45_e502db2af7be_add_template_params_to_tables.py b/superset/migrations/versions/2018-05-09_23-45_e502db2af7be_add_template_params_to_tables.py
index b76ea6238..27f0f137d 100644
--- a/superset/migrations/versions/2018-05-09_23-45_e502db2af7be_add_template_params_to_tables.py
+++ b/superset/migrations/versions/2018-05-09_23-45_e502db2af7be_add_template_params_to_tables.py
@@ -26,8 +26,8 @@ Create Date: 2018-05-09 23:45:14.296283
revision = "e502db2af7be"
down_revision = "5ccf602336a0"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
@@ -38,4 +38,4 @@ def downgrade():
try:
op.drop_column("tables", "template_params")
except Exception as ex:
- logging.warning(str(ex))
+ logging.warning(str(ex)) # noqa: F821
diff --git a/superset/migrations/versions/2018-05-15_20-28_6c7537a6004a_models_for_email_reports.py b/superset/migrations/versions/2018-05-15_20-28_6c7537a6004a_models_for_email_reports.py
index b4229b161..3b53fc599 100644
--- a/superset/migrations/versions/2018-05-15_20-28_6c7537a6004a_models_for_email_reports.py
+++ b/superset/migrations/versions/2018-05-15_20-28_6c7537a6004a_models_for_email_reports.py
@@ -26,8 +26,8 @@ Create Date: 2018-05-15 20:28:51.977572
revision = "6c7537a6004a"
down_revision = "a61b40f9f57f"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2018-06-04_11-12_c5756bec8b47_time_grain_sqla.py b/superset/migrations/versions/2018-06-04_11-12_c5756bec8b47_time_grain_sqla.py
index a092bd6ef..b0f00e219 100644
--- a/superset/migrations/versions/2018-06-04_11-12_c5756bec8b47_time_grain_sqla.py
+++ b/superset/migrations/versions/2018-06-04_11-12_c5756bec8b47_time_grain_sqla.py
@@ -26,13 +26,13 @@ Create Date: 2018-06-04 11:12:59.878742
revision = "c5756bec8b47"
down_revision = "e502db2af7be"
-import json
+import json # noqa: E402
-from alembic import op
-from sqlalchemy import Column, Integer, Text
-from sqlalchemy.ext.declarative import declarative_base
+from alembic import op # noqa: E402
+from sqlalchemy import Column, Integer, Text # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
+from superset import db # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2018-06-07_09-52_afb7730f6a9c_remove_empty_filters.py b/superset/migrations/versions/2018-06-07_09-52_afb7730f6a9c_remove_empty_filters.py
index e4cc3a64a..ab11a2cf6 100644
--- a/superset/migrations/versions/2018-06-07_09-52_afb7730f6a9c_remove_empty_filters.py
+++ b/superset/migrations/versions/2018-06-07_09-52_afb7730f6a9c_remove_empty_filters.py
@@ -26,13 +26,13 @@ Create Date: 2018-06-07 09:52:54.535961
revision = "afb7730f6a9c"
down_revision = "c5756bec8b47"
-import json
+import json # noqa: E402
-from alembic import op
-from sqlalchemy import Column, Integer, Text
-from sqlalchemy.ext.declarative import declarative_base
+from alembic import op # noqa: E402
+from sqlalchemy import Column, Integer, Text # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
+from superset import db # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2018-06-13_10-20_4451805bbaa1_remove_double_percents.py b/superset/migrations/versions/2018-06-13_10-20_4451805bbaa1_remove_double_percents.py
index bf7a74559..80612747a 100644
--- a/superset/migrations/versions/2018-06-13_10-20_4451805bbaa1_remove_double_percents.py
+++ b/superset/migrations/versions/2018-06-13_10-20_4451805bbaa1_remove_double_percents.py
@@ -27,13 +27,13 @@ revision = "4451805bbaa1"
down_revision = "bddc498dd179"
-import json
+import json # noqa: E402
-from alembic import op
-from sqlalchemy import Column, create_engine, ForeignKey, Integer, String, Text
-from sqlalchemy.ext.declarative import declarative_base
+from alembic import op # noqa: E402
+from sqlalchemy import Column, create_engine, ForeignKey, Integer, String, Text # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
+from superset import db # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2018-06-13_14-54_bddc498dd179_adhoc_filters.py b/superset/migrations/versions/2018-06-13_14-54_bddc498dd179_adhoc_filters.py
index 6292e2860..e3b891d29 100644
--- a/superset/migrations/versions/2018-06-13_14-54_bddc498dd179_adhoc_filters.py
+++ b/superset/migrations/versions/2018-06-13_14-54_bddc498dd179_adhoc_filters.py
@@ -27,14 +27,14 @@ revision = "bddc498dd179"
down_revision = "80a67c5192fa"
-import json
+import json # noqa: E402
-from alembic import op
-from sqlalchemy import Column, Integer, Text
-from sqlalchemy.ext.declarative import declarative_base
+from alembic import op # noqa: E402
+from sqlalchemy import Column, Integer, Text # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
-from superset.utils.core import (
+from superset import db # noqa: E402
+from superset.utils.core import ( # noqa: E402
convert_legacy_filters_into_adhoc,
split_adhoc_filters_into_base_filters,
)
diff --git a/superset/migrations/versions/2018-06-14_14-31_80a67c5192fa_single_pie_chart_metric.py b/superset/migrations/versions/2018-06-14_14-31_80a67c5192fa_single_pie_chart_metric.py
index a24247a0b..d25a37cba 100644
--- a/superset/migrations/versions/2018-06-14_14-31_80a67c5192fa_single_pie_chart_metric.py
+++ b/superset/migrations/versions/2018-06-14_14-31_80a67c5192fa_single_pie_chart_metric.py
@@ -27,13 +27,13 @@ revision = "80a67c5192fa"
down_revision = "afb7730f6a9c"
-import json
+import json # noqa: E402
-from alembic import op
-from sqlalchemy import Column, Integer, String, Text
-from sqlalchemy.ext.declarative import declarative_base
+from alembic import op # noqa: E402
+from sqlalchemy import Column, Integer, String, Text # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
+from superset import db # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2018-07-16_18-04_1d9e835a84f9_.py b/superset/migrations/versions/2018-07-16_18-04_1d9e835a84f9_.py
index 0929dfff3..d0f0ed2aa 100644
--- a/superset/migrations/versions/2018-07-16_18-04_1d9e835a84f9_.py
+++ b/superset/migrations/versions/2018-07-16_18-04_1d9e835a84f9_.py
@@ -21,6 +21,7 @@ Revises: 3dda56f1c4c6
Create Date: 2018-07-16 18:04:07.764659
"""
+
import sqlalchemy as sa
from alembic import op
from sqlalchemy.sql import expression
diff --git a/superset/migrations/versions/2018-07-19_23-41_c617da68de7d_form_nullable.py b/superset/migrations/versions/2018-07-19_23-41_c617da68de7d_form_nullable.py
index c5ffd7213..8521845db 100644
--- a/superset/migrations/versions/2018-07-19_23-41_c617da68de7d_form_nullable.py
+++ b/superset/migrations/versions/2018-07-19_23-41_c617da68de7d_form_nullable.py
@@ -26,12 +26,12 @@ Create Date: 2018-07-19 23:41:32.631556
revision = "c617da68de7d"
down_revision = "18dc26817ad2"
-from alembic import op
-from sqlalchemy import Column, Integer, String, Text
-from sqlalchemy.ext.declarative import declarative_base
+from alembic import op # noqa: E402
+from sqlalchemy import Column, Integer, String, Text # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
-from superset.utils.core import MediumText
+from superset import db # noqa: E402
+from superset.utils.core import MediumText # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2018-07-20_15-31_7f2635b51f5d_update_base_columns.py b/superset/migrations/versions/2018-07-20_15-31_7f2635b51f5d_update_base_columns.py
index b5724d651..4f9fdec10 100644
--- a/superset/migrations/versions/2018-07-20_15-31_7f2635b51f5d_update_base_columns.py
+++ b/superset/migrations/versions/2018-07-20_15-31_7f2635b51f5d_update_base_columns.py
@@ -29,12 +29,12 @@ Create Date: 2018-07-20 15:31:05.058050
revision = "7f2635b51f5d"
down_revision = "937d04c16b64"
-from alembic import op
-from sqlalchemy import Column, engine, Integer, String
-from sqlalchemy.ext.declarative import declarative_base
+from alembic import op # noqa: E402
+from sqlalchemy import Column, engine, Integer, String # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
-from superset.utils.core import generic_find_uq_constraint_name
+from superset import db # noqa: E402
+from superset.utils.core import generic_find_uq_constraint_name # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2018-07-20_15-57_e9df189e5c7e_update_base_metrics.py b/superset/migrations/versions/2018-07-20_15-57_e9df189e5c7e_update_base_metrics.py
index 81dc9cfb7..1291fb13d 100644
--- a/superset/migrations/versions/2018-07-20_15-57_e9df189e5c7e_update_base_metrics.py
+++ b/superset/migrations/versions/2018-07-20_15-57_e9df189e5c7e_update_base_metrics.py
@@ -29,12 +29,12 @@ Create Date: 2018-07-20 15:57:48.118304
revision = "e9df189e5c7e"
down_revision = "7f2635b51f5d"
-from alembic import op
-from sqlalchemy import Column, engine, Integer, String, Text
-from sqlalchemy.ext.declarative import declarative_base
+from alembic import op # noqa: E402
+from sqlalchemy import Column, engine, Integer, String, Text # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
-from superset.utils.core import generic_find_uq_constraint_name
+from superset import db # noqa: E402
+from superset.utils.core import generic_find_uq_constraint_name # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2018-07-20_16-08_937d04c16b64_update_datasources.py b/superset/migrations/versions/2018-07-20_16-08_937d04c16b64_update_datasources.py
index 98c14f457..ef2a989e9 100644
--- a/superset/migrations/versions/2018-07-20_16-08_937d04c16b64_update_datasources.py
+++ b/superset/migrations/versions/2018-07-20_16-08_937d04c16b64_update_datasources.py
@@ -26,8 +26,8 @@ Create Date: 2018-07-20 16:08:10.195843
revision = "937d04c16b64"
down_revision = "d94d33dbe938"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2018-07-22_08-49_c829ff0b37d0_.py b/superset/migrations/versions/2018-07-22_08-49_c829ff0b37d0_.py
index 3dc624e31..06be95c86 100644
--- a/superset/migrations/versions/2018-07-22_08-49_c829ff0b37d0_.py
+++ b/superset/migrations/versions/2018-07-22_08-49_c829ff0b37d0_.py
@@ -26,9 +26,6 @@ Create Date: 2018-07-22 08:49:48.936117
revision = "c829ff0b37d0"
down_revision = ("4451805bbaa1", "1d9e835a84f9")
-import sqlalchemy as sa
-from alembic import op
-
def upgrade():
pass
diff --git a/superset/migrations/versions/2018-07-22_08-50_7467e77870e4_remove_aggs.py b/superset/migrations/versions/2018-07-22_08-50_7467e77870e4_remove_aggs.py
index 7158105e9..4b4bdbaa3 100644
--- a/superset/migrations/versions/2018-07-22_08-50_7467e77870e4_remove_aggs.py
+++ b/superset/migrations/versions/2018-07-22_08-50_7467e77870e4_remove_aggs.py
@@ -21,6 +21,7 @@ Revises: c829ff0b37d0
Create Date: 2018-07-22 08:50:01.078218
"""
+
import sqlalchemy as sa
from alembic import op
diff --git a/superset/migrations/versions/2018-07-22_09-32_e3970889f38e_.py b/superset/migrations/versions/2018-07-22_09-32_e3970889f38e_.py
index ef45bed10..9d13ab3e1 100644
--- a/superset/migrations/versions/2018-07-22_09-32_e3970889f38e_.py
+++ b/superset/migrations/versions/2018-07-22_09-32_e3970889f38e_.py
@@ -26,9 +26,6 @@ Create Date: 2018-07-22 09:32:36.986561
revision = "e3970889f38e"
down_revision = ("4451805bbaa1", "1d9e835a84f9")
-import sqlalchemy as sa
-from alembic import op
-
def upgrade():
pass
diff --git a/superset/migrations/versions/2018-07-22_11-50_fc480c87706c_.py b/superset/migrations/versions/2018-07-22_11-50_fc480c87706c_.py
index 33b746c76..804847da6 100644
--- a/superset/migrations/versions/2018-07-22_11-50_fc480c87706c_.py
+++ b/superset/migrations/versions/2018-07-22_11-50_fc480c87706c_.py
@@ -26,9 +26,6 @@ Create Date: 2018-07-22 11:50:54.174443
revision = "fc480c87706c"
down_revision = ("4451805bbaa1", "1d9e835a84f9")
-import sqlalchemy as sa
-from alembic import op
-
def upgrade():
pass
diff --git a/superset/migrations/versions/2018-07-22_11-59_bebcf3fed1fe_convert_dashboard_v1_positions.py b/superset/migrations/versions/2018-07-22_11-59_bebcf3fed1fe_convert_dashboard_v1_positions.py
index 3dc0bcc45..f5af630a8 100644
--- a/superset/migrations/versions/2018-07-22_11-59_bebcf3fed1fe_convert_dashboard_v1_positions.py
+++ b/superset/migrations/versions/2018-07-22_11-59_bebcf3fed1fe_convert_dashboard_v1_positions.py
@@ -469,9 +469,9 @@ def convert_to_layout(positions):
root[childId], root
)
else:
- root[childId]["meta"][
- "width"
- ] = reduce_component_width(root[childId])
+ root[childId]["meta"]["width"] = (
+ reduce_component_width(root[childId])
+ )
root[current_column]["meta"]["width"] = get_children_max(
root[current_column]["children"], "width", root
diff --git a/superset/migrations/versions/2018-07-22_21-51_705732c70154_.py b/superset/migrations/versions/2018-07-22_21-51_705732c70154_.py
index 36003a0bc..cd74387b2 100644
--- a/superset/migrations/versions/2018-07-22_21-51_705732c70154_.py
+++ b/superset/migrations/versions/2018-07-22_21-51_705732c70154_.py
@@ -21,13 +21,11 @@ Revises: ('4451805bbaa1', '1d9e835a84f9')
Create Date: 2018-07-22 21:51:19.235558
"""
+
# revision identifiers, used by Alembic.
revision = "705732c70154"
down_revision = ("4451805bbaa1", "1d9e835a84f9")
-import sqlalchemy as sa
-from alembic import op
-
def upgrade():
pass
diff --git a/superset/migrations/versions/2018-07-23_11-20_46ba6aaaac97_.py b/superset/migrations/versions/2018-07-23_11-20_46ba6aaaac97_.py
index 17f9b3d73..632f53abf 100644
--- a/superset/migrations/versions/2018-07-23_11-20_46ba6aaaac97_.py
+++ b/superset/migrations/versions/2018-07-23_11-20_46ba6aaaac97_.py
@@ -26,9 +26,6 @@ Create Date: 2018-07-23 11:20:54.929246
revision = "46ba6aaaac97"
down_revision = ("705732c70154", "e3970889f38e")
-import sqlalchemy as sa
-from alembic import op
-
def upgrade():
pass
diff --git a/superset/migrations/versions/2018-07-26_11-10_c82ee8a39623_add_implicit_tags.py b/superset/migrations/versions/2018-07-26_11-10_c82ee8a39623_add_implicit_tags.py
index 75151d340..42b52cfe6 100644
--- a/superset/migrations/versions/2018-07-26_11-10_c82ee8a39623_add_implicit_tags.py
+++ b/superset/migrations/versions/2018-07-26_11-10_c82ee8a39623_add_implicit_tags.py
@@ -26,15 +26,15 @@ Create Date: 2018-07-26 11:10:23.653524
revision = "c82ee8a39623"
down_revision = "c617da68de7d"
-from datetime import datetime
+from datetime import datetime # noqa: E402
-from alembic import op
-from flask_appbuilder.models.mixins import AuditMixin
-from sqlalchemy import Column, DateTime, Enum, ForeignKey, Integer, String
-from sqlalchemy.ext.declarative import declarative_base, declared_attr
+from alembic import op # noqa: E402
+from flask_appbuilder.models.mixins import AuditMixin # noqa: E402
+from sqlalchemy import Column, DateTime, Enum, ForeignKey, Integer, String # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base, declared_attr # noqa: E402
-from superset.tags.models import ObjectType, TagType
-from superset.utils.core import get_user_id
+from superset.tags.models import ObjectType, TagType # noqa: E402
+from superset.utils.core import get_user_id # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2018-08-06_14-38_0c5070e96b57_add_user_attributes_table.py b/superset/migrations/versions/2018-08-06_14-38_0c5070e96b57_add_user_attributes_table.py
index a72bf04aa..676359623 100644
--- a/superset/migrations/versions/2018-08-06_14-38_0c5070e96b57_add_user_attributes_table.py
+++ b/superset/migrations/versions/2018-08-06_14-38_0c5070e96b57_add_user_attributes_table.py
@@ -26,8 +26,8 @@ Create Date: 2018-08-06 14:38:18.965248
revision = "0c5070e96b57"
down_revision = "7fcdcde0761c"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2018-08-13_11-30_1a1d627ebd8e_position_json.py b/superset/migrations/versions/2018-08-13_11-30_1a1d627ebd8e_position_json.py
index f6b79db43..468dc0876 100644
--- a/superset/migrations/versions/2018-08-13_11-30_1a1d627ebd8e_position_json.py
+++ b/superset/migrations/versions/2018-08-13_11-30_1a1d627ebd8e_position_json.py
@@ -22,7 +22,6 @@ Create Date: 2018-08-13 11:30:07.101702
"""
-
import sqlalchemy as sa
from alembic import op
diff --git a/superset/migrations/versions/2018-08-29_14-35_55e910a74826_add_metadata_column_to_annotation_model_.py b/superset/migrations/versions/2018-08-29_14-35_55e910a74826_add_metadata_column_to_annotation_model_.py
index b819dbfd2..c13cf20d6 100644
--- a/superset/migrations/versions/2018-08-29_14-35_55e910a74826_add_metadata_column_to_annotation_model_.py
+++ b/superset/migrations/versions/2018-08-29_14-35_55e910a74826_add_metadata_column_to_annotation_model_.py
@@ -26,8 +26,8 @@ Create Date: 2018-08-29 14:35:20.407743
revision = "55e910a74826"
down_revision = "1a1d627ebd8e"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2018-11-05_08-42_0b1f1ab473c0_add_extra_column_to_query.py b/superset/migrations/versions/2018-11-05_08-42_0b1f1ab473c0_add_extra_column_to_query.py
index f131c8454..27e569210 100644
--- a/superset/migrations/versions/2018-11-05_08-42_0b1f1ab473c0_add_extra_column_to_query.py
+++ b/superset/migrations/versions/2018-11-05_08-42_0b1f1ab473c0_add_extra_column_to_query.py
@@ -21,6 +21,7 @@ Revises: 55e910a74826
Create Date: 2018-11-05 08:42:56.181012
"""
+
import sqlalchemy as sa
from alembic import op
diff --git a/superset/migrations/versions/2018-11-12_13-31_4ce8df208545_migrate_time_range_for_default_filters.py b/superset/migrations/versions/2018-11-12_13-31_4ce8df208545_migrate_time_range_for_default_filters.py
index 3c6979f96..74b276e27 100644
--- a/superset/migrations/versions/2018-11-12_13-31_4ce8df208545_migrate_time_range_for_default_filters.py
+++ b/superset/migrations/versions/2018-11-12_13-31_4ce8df208545_migrate_time_range_for_default_filters.py
@@ -100,9 +100,9 @@ def upgrade():
# just abandon __from and __to
if "__time_range" not in val:
val.append("__time_range")
- json_metadata[
- "filter_immune_slice_fields"
- ] = filter_immune_slice_fields
+ json_metadata["filter_immune_slice_fields"] = (
+ filter_immune_slice_fields
+ )
has_update = True
if has_update:
diff --git a/superset/migrations/versions/2018-11-26_00-01_46f444d8b9b7_remove_coordinator_from_druid_cluster_.py b/superset/migrations/versions/2018-11-26_00-01_46f444d8b9b7_remove_coordinator_from_druid_cluster_.py
index edeee4069..2e0d806f4 100644
--- a/superset/migrations/versions/2018-11-26_00-01_46f444d8b9b7_remove_coordinator_from_druid_cluster_.py
+++ b/superset/migrations/versions/2018-11-26_00-01_46f444d8b9b7_remove_coordinator_from_druid_cluster_.py
@@ -21,6 +21,7 @@ Revises: 4ce8df208545
Create Date: 2018-11-26 00:01:04.781119
"""
+
import sqlalchemy as sa
from alembic import op
diff --git a/superset/migrations/versions/2018-11-27_11-53_a61b40f9f57f_remove_allow_run_sync.py b/superset/migrations/versions/2018-11-27_11-53_a61b40f9f57f_remove_allow_run_sync.py
index 52ec967fb..fad12b6f1 100644
--- a/superset/migrations/versions/2018-11-27_11-53_a61b40f9f57f_remove_allow_run_sync.py
+++ b/superset/migrations/versions/2018-11-27_11-53_a61b40f9f57f_remove_allow_run_sync.py
@@ -21,6 +21,7 @@ Revises: 46f444d8b9b7
Create Date: 2018-11-27 11:53:17.512627
"""
+
import sqlalchemy as sa
from alembic import op
diff --git a/superset/migrations/versions/2018-12-11_22-03_fb13d49b72f9_better_filters.py b/superset/migrations/versions/2018-12-11_22-03_fb13d49b72f9_better_filters.py
index 2a58fdf21..7b6676d55 100644
--- a/superset/migrations/versions/2018-12-11_22-03_fb13d49b72f9_better_filters.py
+++ b/superset/migrations/versions/2018-12-11_22-03_fb13d49b72f9_better_filters.py
@@ -21,6 +21,7 @@ Revises: 6c7537a6004a
Create Date: 2018-12-11 22:03:21.612516
"""
+
import json
import logging
@@ -78,8 +79,8 @@ def upgrade():
for slc in filter_box_slices.all():
try:
upgrade_slice(slc)
- except Exception as ex:
- logging.exception(e)
+ except Exception:
+ logging.exception(e) # noqa: F821
session.commit()
session.close()
diff --git a/superset/migrations/versions/2018-12-13_15-38_cefabc8f7d38_increase_size_of_name_column_in_ab_view_.py b/superset/migrations/versions/2018-12-13_15-38_cefabc8f7d38_increase_size_of_name_column_in_ab_view_.py
index 042a4f844..63ba7c44a 100644
--- a/superset/migrations/versions/2018-12-13_15-38_cefabc8f7d38_increase_size_of_name_column_in_ab_view_.py
+++ b/superset/migrations/versions/2018-12-13_15-38_cefabc8f7d38_increase_size_of_name_column_in_ab_view_.py
@@ -26,8 +26,8 @@ Create Date: 2018-12-13 15:38:36.772750
revision = "cefabc8f7d38"
down_revision = "6c7537a6004a"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2018-12-18_22-45_de021a1ca60d_.py b/superset/migrations/versions/2018-12-18_22-45_de021a1ca60d_.py
index 3f9e79881..24299c3b8 100644
--- a/superset/migrations/versions/2018-12-18_22-45_de021a1ca60d_.py
+++ b/superset/migrations/versions/2018-12-18_22-45_de021a1ca60d_.py
@@ -21,6 +21,7 @@ Revises: ('0b1f1ab473c0', 'cefabc8f7d38')
Create Date: 2018-12-18 22:45:55.783083
"""
+
# revision identifiers, used by Alembic.
revision = "de021a1ca60d"
down_revision = ("0b1f1ab473c0", "cefabc8f7d38", "3e1b21cd94a4")
diff --git a/superset/migrations/versions/2018-12-22_17-26_fbd55e0f83eb_.py b/superset/migrations/versions/2018-12-22_17-26_fbd55e0f83eb_.py
index 99525fd1e..4f5d06224 100644
--- a/superset/migrations/versions/2018-12-22_17-26_fbd55e0f83eb_.py
+++ b/superset/migrations/versions/2018-12-22_17-26_fbd55e0f83eb_.py
@@ -26,9 +26,6 @@ Create Date: 2018-12-22 17:26:16.113317
revision = "fbd55e0f83eb"
down_revision = ("7467e77870e4", "de021a1ca60d")
-import sqlalchemy as sa
-from alembic import op
-
def upgrade():
pass
diff --git a/superset/migrations/versions/2019-01-14_16-00_a33a03f16c4a_add_extra_column_to_savedquery.py b/superset/migrations/versions/2019-01-14_16-00_a33a03f16c4a_add_extra_column_to_savedquery.py
index b7f272256..21a1a0bf8 100644
--- a/superset/migrations/versions/2019-01-14_16-00_a33a03f16c4a_add_extra_column_to_savedquery.py
+++ b/superset/migrations/versions/2019-01-14_16-00_a33a03f16c4a_add_extra_column_to_savedquery.py
@@ -41,8 +41,8 @@ Create Date: 2019-01-14 16:00:26.344439
revision = "a33a03f16c4a"
down_revision = "fb13d49b72f9"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2019-01-17_08-31_8b70aa3d0f87_.py b/superset/migrations/versions/2019-01-17_08-31_8b70aa3d0f87_.py
index 484f2d0a0..43a718aaf 100644
--- a/superset/migrations/versions/2019-01-17_08-31_8b70aa3d0f87_.py
+++ b/superset/migrations/versions/2019-01-17_08-31_8b70aa3d0f87_.py
@@ -26,9 +26,6 @@ Create Date: 2019-01-17 08:31:55.781032
revision = "8b70aa3d0f87"
down_revision = ("fbd55e0f83eb", "fb13d49b72f9")
-import sqlalchemy as sa
-from alembic import op
-
def upgrade():
pass
diff --git a/superset/migrations/versions/2019-01-18_14-56_18dc26817ad2_.py b/superset/migrations/versions/2019-01-18_14-56_18dc26817ad2_.py
index 2c5ffdfdc..7722669db 100644
--- a/superset/migrations/versions/2019-01-18_14-56_18dc26817ad2_.py
+++ b/superset/migrations/versions/2019-01-18_14-56_18dc26817ad2_.py
@@ -26,9 +26,6 @@ Create Date: 2019-01-18 14:56:26.307684
revision = "18dc26817ad2"
down_revision = ("8b70aa3d0f87", "a33a03f16c4a")
-import sqlalchemy as sa
-from alembic import op
-
def upgrade():
pass
diff --git a/superset/migrations/versions/2019-02-01_16-07_e553e78e90c5_add_druid_auth_py_py.py b/superset/migrations/versions/2019-02-01_16-07_e553e78e90c5_add_druid_auth_py_py.py
index dcdbe15ee..326b58611 100644
--- a/superset/migrations/versions/2019-02-01_16-07_e553e78e90c5_add_druid_auth_py_py.py
+++ b/superset/migrations/versions/2019-02-01_16-07_e553e78e90c5_add_druid_auth_py_py.py
@@ -26,9 +26,8 @@ Create Date: 2019-02-01 16:07:04.268023
revision = "e553e78e90c5"
down_revision = "18dc26817ad2"
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy_utils import EncryptedType
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2019-02-16_17-44_45e7da7cfeba_.py b/superset/migrations/versions/2019-02-16_17-44_45e7da7cfeba_.py
index 888483de0..e0dc0cb86 100644
--- a/superset/migrations/versions/2019-02-16_17-44_45e7da7cfeba_.py
+++ b/superset/migrations/versions/2019-02-16_17-44_45e7da7cfeba_.py
@@ -26,9 +26,6 @@ Create Date: 2019-02-16 17:44:44.493427
revision = "45e7da7cfeba"
down_revision = ("e553e78e90c5", "c82ee8a39623")
-import sqlalchemy as sa
-from alembic import op
-
def upgrade():
pass
diff --git a/superset/migrations/versions/2019-03-21_10-22_d94d33dbe938_form_strip.py b/superset/migrations/versions/2019-03-21_10-22_d94d33dbe938_form_strip.py
index a899fd5ab..7796ad107 100644
--- a/superset/migrations/versions/2019-03-21_10-22_d94d33dbe938_form_strip.py
+++ b/superset/migrations/versions/2019-03-21_10-22_d94d33dbe938_form_strip.py
@@ -26,12 +26,12 @@ Create Date: 2019-03-21 10:22:01.610217
revision = "d94d33dbe938"
down_revision = "80aa3f04bc82"
-from alembic import op
-from sqlalchemy import Column, Integer, String, Text
-from sqlalchemy.ext.declarative import declarative_base
+from alembic import op # noqa: E402
+from sqlalchemy import Column, Integer, String, Text # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
-from superset.utils.core import MediumText
+from superset import db # noqa: E402
+from superset.utils.core import MediumText # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2019-04-09_16-27_80aa3f04bc82_add_parent_ids_in_dashboard_layout.py b/superset/migrations/versions/2019-04-09_16-27_80aa3f04bc82_add_parent_ids_in_dashboard_layout.py
index 47c8a6cbc..61cb3c62f 100644
--- a/superset/migrations/versions/2019-04-09_16-27_80aa3f04bc82_add_parent_ids_in_dashboard_layout.py
+++ b/superset/migrations/versions/2019-04-09_16-27_80aa3f04bc82_add_parent_ids_in_dashboard_layout.py
@@ -21,14 +21,13 @@ Revises: 45e7da7cfeba
Create Date: 2019-04-09 16:27:03.392872
"""
+
import json
import logging
-import sqlalchemy as sa
from alembic import op
from sqlalchemy import Column, Integer, Text
from sqlalchemy.ext.declarative import declarative_base
-from sqlalchemy.orm import relationship
from superset import db
diff --git a/superset/migrations/versions/2019-05-06_14-30_afc69274c25a_alter_sql_column_data_type_in_query_mysql_table.py b/superset/migrations/versions/2019-05-06_14-30_afc69274c25a_alter_sql_column_data_type_in_query_mysql_table.py
index 9fb9acae7..d7a907dbd 100644
--- a/superset/migrations/versions/2019-05-06_14-30_afc69274c25a_alter_sql_column_data_type_in_query_mysql_table.py
+++ b/superset/migrations/versions/2019-05-06_14-30_afc69274c25a_alter_sql_column_data_type_in_query_mysql_table.py
@@ -22,6 +22,7 @@ Revises: e9df189e5c7e
Create Date: 2019-05-06 14:30:26.181449
"""
+
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import mysql
diff --git a/superset/migrations/versions/2019-06-04_10-12_d7c1a0d6f2da_remove_limit_used_from_query_model.py b/superset/migrations/versions/2019-06-04_10-12_d7c1a0d6f2da_remove_limit_used_from_query_model.py
index 20841c1a6..f2c2a756c 100644
--- a/superset/migrations/versions/2019-06-04_10-12_d7c1a0d6f2da_remove_limit_used_from_query_model.py
+++ b/superset/migrations/versions/2019-06-04_10-12_d7c1a0d6f2da_remove_limit_used_from_query_model.py
@@ -26,8 +26,8 @@ Create Date: 2019-06-04 10:12:36.675369
revision = "d7c1a0d6f2da"
down_revision = "afc69274c25a"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2019-06-05_11-35_b4a38aa87893_deprecate_database_expression.py b/superset/migrations/versions/2019-06-05_11-35_b4a38aa87893_deprecate_database_expression.py
index f8696727e..967c44e7b 100644
--- a/superset/migrations/versions/2019-06-05_11-35_b4a38aa87893_deprecate_database_expression.py
+++ b/superset/migrations/versions/2019-06-05_11-35_b4a38aa87893_deprecate_database_expression.py
@@ -26,8 +26,8 @@ Create Date: 2019-06-05 11:35:16.222519
revision = "b4a38aa87893"
down_revision = "ab8c66efdd01"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2019-06-28_13-17_ab8c66efdd01_resample.py b/superset/migrations/versions/2019-06-28_13-17_ab8c66efdd01_resample.py
index 928636040..ea45104ee 100644
--- a/superset/migrations/versions/2019-06-28_13-17_ab8c66efdd01_resample.py
+++ b/superset/migrations/versions/2019-06-28_13-17_ab8c66efdd01_resample.py
@@ -26,14 +26,14 @@ Create Date: 2019-06-28 13:17:59.517089
revision = "ab8c66efdd01"
down_revision = "d7c1a0d6f2da"
-import json
-import logging
+import json # noqa: E402
+import logging # noqa: E402
-from alembic import op
-from sqlalchemy import Column, Integer, Text
-from sqlalchemy.ext.declarative import declarative_base
+from alembic import op # noqa: E402
+from sqlalchemy import Column, Integer, Text # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
+from superset import db # noqa: E402
Base = declarative_base()
@@ -76,7 +76,7 @@ def upgrade():
params["resample_method"] = fill_method
# Ensure that the resample logic is fully defined.
- if not "resample_method" in params:
+ if "resample_method" not in params:
del params["resample_rule"]
else:
del params["resample_rule"]
diff --git a/superset/migrations/versions/2019-07-11_19-02_def97f26fdfb_add_index_to_tagged_object.py b/superset/migrations/versions/2019-07-11_19-02_def97f26fdfb_add_index_to_tagged_object.py
index b3af2edba..a5707c583 100644
--- a/superset/migrations/versions/2019-07-11_19-02_def97f26fdfb_add_index_to_tagged_object.py
+++ b/superset/migrations/versions/2019-07-11_19-02_def97f26fdfb_add_index_to_tagged_object.py
@@ -26,7 +26,7 @@ Create Date: 2019-07-11 19:02:38.768324
revision = "def97f26fdfb"
down_revision = "190188938582"
-from alembic import op
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2019-07-15_12-00_190188938582_adding_unique_constraint_on_dashboard_slices_tbl.py b/superset/migrations/versions/2019-07-15_12-00_190188938582_adding_unique_constraint_on_dashboard_slices_tbl.py
index 3cea2e032..011a72902 100644
--- a/superset/migrations/versions/2019-07-15_12-00_190188938582_adding_unique_constraint_on_dashboard_slices_tbl.py
+++ b/superset/migrations/versions/2019-07-15_12-00_190188938582_adding_unique_constraint_on_dashboard_slices_tbl.py
@@ -21,10 +21,11 @@ Revises: d6ffdf31bdd4
Create Date: 2019-07-15 12:00:32.267507
"""
+
import logging
from alembic import op
-from sqlalchemy import and_, Column, ForeignKey, Integer, Table
+from sqlalchemy import and_, Column, ForeignKey, Integer
from sqlalchemy.ext.declarative import declarative_base
from superset import db
diff --git a/superset/migrations/versions/2019-09-08_21-50_11c737c17cc6_deprecate_restricted_metrics.py b/superset/migrations/versions/2019-09-08_21-50_11c737c17cc6_deprecate_restricted_metrics.py
index 8bb1c18c9..1f803c794 100644
--- a/superset/migrations/versions/2019-09-08_21-50_11c737c17cc6_deprecate_restricted_metrics.py
+++ b/superset/migrations/versions/2019-09-08_21-50_11c737c17cc6_deprecate_restricted_metrics.py
@@ -21,6 +21,7 @@ Revises: def97f26fdfb
Create Date: 2019-09-08 21:50:58.200229
"""
+
import sqlalchemy as sa
from alembic import op
diff --git a/superset/migrations/versions/2019-09-11_21-49_5afa9079866a_serialize_schema_permissions_py.py b/superset/migrations/versions/2019-09-11_21-49_5afa9079866a_serialize_schema_permissions_py.py
index 5788102b0..1cf60f105 100644
--- a/superset/migrations/versions/2019-09-11_21-49_5afa9079866a_serialize_schema_permissions_py.py
+++ b/superset/migrations/versions/2019-09-11_21-49_5afa9079866a_serialize_schema_permissions_py.py
@@ -22,7 +22,6 @@ Create Date: 2019-09-11 21:49:00.608346
"""
-
# revision identifiers, used by Alembic.
from alembic import op
from sqlalchemy import Column, ForeignKey, Integer, String
diff --git a/superset/migrations/versions/2019-09-19_13-40_258b5280a45e_form_strip_leading_and_trailing_whitespace.py b/superset/migrations/versions/2019-09-19_13-40_258b5280a45e_form_strip_leading_and_trailing_whitespace.py
index 0aa924dac..b5d80877a 100644
--- a/superset/migrations/versions/2019-09-19_13-40_258b5280a45e_form_strip_leading_and_trailing_whitespace.py
+++ b/superset/migrations/versions/2019-09-19_13-40_258b5280a45e_form_strip_leading_and_trailing_whitespace.py
@@ -21,6 +21,7 @@ Revises: 11c737c17cc6
Create Date: 2019-09-19 13:40:25.293907
"""
+
import re
from alembic import op
diff --git a/superset/migrations/versions/2019-10-02_00-29_b6fa807eac07_make_names_non_nullable.py b/superset/migrations/versions/2019-10-02_00-29_b6fa807eac07_make_names_non_nullable.py
index b0b952908..815f58611 100644
--- a/superset/migrations/versions/2019-10-02_00-29_b6fa807eac07_make_names_non_nullable.py
+++ b/superset/migrations/versions/2019-10-02_00-29_b6fa807eac07_make_names_non_nullable.py
@@ -21,6 +21,7 @@ Revises: 1495eb914ad3
Create Date: 2019-10-02 00:29:16.679272
"""
+
import sqlalchemy as sa
from alembic import op
diff --git a/superset/migrations/versions/2019-10-09_15-05_cca2f5d568c8_add_encrypted_extra_to_dbs.py b/superset/migrations/versions/2019-10-09_15-05_cca2f5d568c8_add_encrypted_extra_to_dbs.py
index 5f99fb910..5f369f2c0 100644
--- a/superset/migrations/versions/2019-10-09_15-05_cca2f5d568c8_add_encrypted_extra_to_dbs.py
+++ b/superset/migrations/versions/2019-10-09_15-05_cca2f5d568c8_add_encrypted_extra_to_dbs.py
@@ -26,8 +26,8 @@ Create Date: 2019-10-09 15:05:06.965042
revision = "cca2f5d568c8"
down_revision = "b6fa807eac07"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2019-10-10_13-52_1495eb914ad3_time_range.py b/superset/migrations/versions/2019-10-10_13-52_1495eb914ad3_time_range.py
index 374dcc672..40f7bf8f3 100644
--- a/superset/migrations/versions/2019-10-10_13-52_1495eb914ad3_time_range.py
+++ b/superset/migrations/versions/2019-10-10_13-52_1495eb914ad3_time_range.py
@@ -21,6 +21,7 @@ Revises: 258b5280a45e
Create Date: 2019-10-10 13:52:54.544475
"""
+
import json
import logging
diff --git a/superset/migrations/versions/2019-11-01_09-18_c2acd2cf3df2_alter_type_of_dbs_encrypted_extra.py b/superset/migrations/versions/2019-11-01_09-18_c2acd2cf3df2_alter_type_of_dbs_encrypted_extra.py
index 4a9b345c5..5fae00b33 100644
--- a/superset/migrations/versions/2019-11-01_09-18_c2acd2cf3df2_alter_type_of_dbs_encrypted_extra.py
+++ b/superset/migrations/versions/2019-11-01_09-18_c2acd2cf3df2_alter_type_of_dbs_encrypted_extra.py
@@ -25,7 +25,6 @@ Create Date: 2019-11-01 09:18:36.953603
import sqlalchemy as sa
from alembic import op
-from sqlalchemy_utils import EncryptedType
# revision identifiers, used by Alembic.
revision = "c2acd2cf3df2"
diff --git a/superset/migrations/versions/2019-11-06_15-23_78ee127d0d1d_reconvert_legacy_filters_into_adhoc.py b/superset/migrations/versions/2019-11-06_15-23_78ee127d0d1d_reconvert_legacy_filters_into_adhoc.py
index 073bfdc47..a9f013f40 100644
--- a/superset/migrations/versions/2019-11-06_15-23_78ee127d0d1d_reconvert_legacy_filters_into_adhoc.py
+++ b/superset/migrations/versions/2019-11-06_15-23_78ee127d0d1d_reconvert_legacy_filters_into_adhoc.py
@@ -26,18 +26,17 @@ Create Date: 2019-11-06 15:23:26.497876
revision = "78ee127d0d1d"
down_revision = "c2acd2cf3df2"
-import copy
-import json
-import logging
+import copy # noqa: E402
+import json # noqa: E402
+import logging # noqa: E402
-from alembic import op
-from sqlalchemy import Column, Integer, Text
-from sqlalchemy.ext.declarative import declarative_base
+from alembic import op # noqa: E402
+from sqlalchemy import Column, Integer, Text # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
-from superset.utils.core import (
+from superset import db # noqa: E402
+from superset.utils.core import ( # noqa: E402
convert_legacy_filters_into_adhoc,
- split_adhoc_filters_into_base_filters,
)
Base = declarative_base()
diff --git a/superset/migrations/versions/2019-11-13_11-05_db4b49eb0782_add_tables_for_sql_lab_state.py b/superset/migrations/versions/2019-11-13_11-05_db4b49eb0782_add_tables_for_sql_lab_state.py
index 53afac7d6..44efe9c8a 100644
--- a/superset/migrations/versions/2019-11-13_11-05_db4b49eb0782_add_tables_for_sql_lab_state.py
+++ b/superset/migrations/versions/2019-11-13_11-05_db4b49eb0782_add_tables_for_sql_lab_state.py
@@ -26,9 +26,8 @@ Create Date: 2019-11-13 11:05:30.122167
revision = "db4b49eb0782"
down_revision = "78ee127d0d1d"
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy.dialects import mysql
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2019-12-03_10-24_817e1c9b09d0_add_not_null_to_dbs_sqlalchemy_url.py b/superset/migrations/versions/2019-12-03_10-24_817e1c9b09d0_add_not_null_to_dbs_sqlalchemy_url.py
index fc1a5aab6..042e551b5 100644
--- a/superset/migrations/versions/2019-12-03_10-24_817e1c9b09d0_add_not_null_to_dbs_sqlalchemy_url.py
+++ b/superset/migrations/versions/2019-12-03_10-24_817e1c9b09d0_add_not_null_to_dbs_sqlalchemy_url.py
@@ -21,6 +21,7 @@ Revises: db4b49eb0782
Create Date: 2019-12-03 10:24:16.201580
"""
+
import sqlalchemy as sa
from alembic import op
diff --git a/superset/migrations/versions/2019-12-03_13-50_89115a40e8ea_change_table_schema_description_to_long_.py b/superset/migrations/versions/2019-12-03_13-50_89115a40e8ea_change_table_schema_description_to_long_.py
index 7679baa5a..cf5f155cf 100644
--- a/superset/migrations/versions/2019-12-03_13-50_89115a40e8ea_change_table_schema_description_to_long_.py
+++ b/superset/migrations/versions/2019-12-03_13-50_89115a40e8ea_change_table_schema_description_to_long_.py
@@ -26,10 +26,10 @@ Create Date: 2019-12-03 13:50:24.746867
revision = "89115a40e8ea"
down_revision = "5afa9079866a"
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy.dialects import mysql
-from sqlalchemy.dialects.mysql.base import MySQLDialect
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
+from sqlalchemy.dialects import mysql # noqa: E402
+from sqlalchemy.dialects.mysql.base import MySQLDialect # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2019-12-04_17-07_0a6f12f60c73_add_role_level_security.py b/superset/migrations/versions/2019-12-04_17-07_0a6f12f60c73_add_role_level_security.py
index b202b8708..d24857870 100644
--- a/superset/migrations/versions/2019-12-04_17-07_0a6f12f60c73_add_role_level_security.py
+++ b/superset/migrations/versions/2019-12-04_17-07_0a6f12f60c73_add_role_level_security.py
@@ -26,8 +26,8 @@ Create Date: 2019-12-04 17:07:54.390805
revision = "0a6f12f60c73"
down_revision = "3325d4caccc8"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2020-01-08_01-17_e96dbf2cfef0_datasource_cluster_fk.py b/superset/migrations/versions/2020-01-08_01-17_e96dbf2cfef0_datasource_cluster_fk.py
index e94505ba3..9682ee6c5 100644
--- a/superset/migrations/versions/2020-01-08_01-17_e96dbf2cfef0_datasource_cluster_fk.py
+++ b/superset/migrations/versions/2020-01-08_01-17_e96dbf2cfef0_datasource_cluster_fk.py
@@ -21,10 +21,10 @@ Revises: 817e1c9b09d0
Create Date: 2020-01-08 01:17:40.127610
"""
+
import sqlalchemy as sa
from alembic import op
-from superset import db
from superset.utils.core import (
generic_find_fk_constraint_name,
generic_find_uq_constraint_name,
diff --git a/superset/migrations/versions/2020-02-07_14-13_3325d4caccc8_dashboard_scoped_filters.py b/superset/migrations/versions/2020-02-07_14-13_3325d4caccc8_dashboard_scoped_filters.py
index ec02a8ca8..f925b54b2 100644
--- a/superset/migrations/versions/2020-02-07_14-13_3325d4caccc8_dashboard_scoped_filters.py
+++ b/superset/migrations/versions/2020-02-07_14-13_3325d4caccc8_dashboard_scoped_filters.py
@@ -27,7 +27,7 @@ import json
import logging
from alembic import op
-from sqlalchemy import and_, Column, ForeignKey, Integer, String, Table, Text
+from sqlalchemy import Column, ForeignKey, Integer, String, Table, Text
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
diff --git a/superset/migrations/versions/2020-02-20_08-52_72428d1ea401_add_tmp_schema_name_to_the_query_object.py b/superset/migrations/versions/2020-02-20_08-52_72428d1ea401_add_tmp_schema_name_to_the_query_object.py
index d50db62b5..bb2198291 100644
--- a/superset/migrations/versions/2020-02-20_08-52_72428d1ea401_add_tmp_schema_name_to_the_query_object.py
+++ b/superset/migrations/versions/2020-02-20_08-52_72428d1ea401_add_tmp_schema_name_to_the_query_object.py
@@ -26,8 +26,8 @@ Create Date: 2020-02-20 08:52:22.877902
revision = "72428d1ea401"
down_revision = "0a6f12f60c73"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2020-03-25_10-42_f9a30386bd74_cleanup_time_grainularity.py b/superset/migrations/versions/2020-03-25_10-42_f9a30386bd74_cleanup_time_grainularity.py
index b45e7f9ad..fd1469a6d 100644
--- a/superset/migrations/versions/2020-03-25_10-42_f9a30386bd74_cleanup_time_grainularity.py
+++ b/superset/migrations/versions/2020-03-25_10-42_f9a30386bd74_cleanup_time_grainularity.py
@@ -26,13 +26,13 @@ Create Date: 2020-03-25 10:42:11.047328
revision = "f9a30386bd74"
down_revision = "b5998378c225"
-import json
+import json # noqa: E402
-from alembic import op
-from sqlalchemy import Column, Integer, String, Text
-from sqlalchemy.ext.declarative import declarative_base
+from alembic import op # noqa: E402
+from sqlalchemy import Column, Integer, String, Text # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
+from superset import db # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2020-03-25_10-49_b5998378c225_add_certificate_to_dbs.py b/superset/migrations/versions/2020-03-25_10-49_b5998378c225_add_certificate_to_dbs.py
index 3b7c3951c..78f53466c 100644
--- a/superset/migrations/versions/2020-03-25_10-49_b5998378c225_add_certificate_to_dbs.py
+++ b/superset/migrations/versions/2020-03-25_10-49_b5998378c225_add_certificate_to_dbs.py
@@ -27,13 +27,13 @@ revision = "b5998378c225"
down_revision = "72428d1ea401"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
kwargs: dict[str, str] = {}
- bind = op.get_bind()
+ bind = op.get_bind() # noqa: F841
op.add_column(
"dbs",
sa.Column("server_cert", sa.LargeBinary(), nullable=True, **kwargs),
diff --git a/superset/migrations/versions/2020-04-24_10-46_e557699a813e_add_tables_relation_to_row_level_.py b/superset/migrations/versions/2020-04-24_10-46_e557699a813e_add_tables_relation_to_row_level_.py
index 1ed33379b..1efa321b6 100644
--- a/superset/migrations/versions/2020-04-24_10-46_e557699a813e_add_tables_relation_to_row_level_.py
+++ b/superset/migrations/versions/2020-04-24_10-46_e557699a813e_add_tables_relation_to_row_level_.py
@@ -26,10 +26,10 @@ Create Date: 2020-04-24 10:46:24.119363
revision = "e557699a813e"
down_revision = "743a117f0d98"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
-from superset.utils.core import generic_find_fk_constraint_name
+from superset.utils.core import generic_find_fk_constraint_name # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2020-04-29_09-24_620241d1153f_update_time_grain_sqla.py b/superset/migrations/versions/2020-04-29_09-24_620241d1153f_update_time_grain_sqla.py
index 29735facb..3492d77dd 100644
--- a/superset/migrations/versions/2020-04-29_09-24_620241d1153f_update_time_grain_sqla.py
+++ b/superset/migrations/versions/2020-04-29_09-24_620241d1153f_update_time_grain_sqla.py
@@ -26,14 +26,14 @@ Create Date: 2020-04-29 09:24:04.952368
revision = "620241d1153f"
down_revision = "f9a30386bd74"
-import json
+import json # noqa: E402
-from alembic import op
-from sqlalchemy import Column, ForeignKey, Integer, Text
-from sqlalchemy.ext.declarative import declarative_base
+from alembic import op # noqa: E402
+from sqlalchemy import Column, ForeignKey, Integer, Text # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db, db_engine_specs
-from superset.databases.utils import make_url_safe
+from superset import db, db_engine_specs # noqa: E402
+from superset.databases.utils import make_url_safe # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2020-05-12_12-59_ea396d202291_ctas_method_in_query.py b/superset/migrations/versions/2020-05-12_12-59_ea396d202291_ctas_method_in_query.py
index 6dd0b24cf..38e26845b 100644
--- a/superset/migrations/versions/2020-05-12_12-59_ea396d202291_ctas_method_in_query.py
+++ b/superset/migrations/versions/2020-05-12_12-59_ea396d202291_ctas_method_in_query.py
@@ -26,8 +26,8 @@ Create Date: 2020-05-12 12:59:26.583276
revision = "ea396d202291"
down_revision = "e557699a813e"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2020-05-13_21-01_743a117f0d98_add_slack_to_the_schedule.py b/superset/migrations/versions/2020-05-13_21-01_743a117f0d98_add_slack_to_the_schedule.py
index 2e72a01cb..3179cd57f 100644
--- a/superset/migrations/versions/2020-05-13_21-01_743a117f0d98_add_slack_to_the_schedule.py
+++ b/superset/migrations/versions/2020-05-13_21-01_743a117f0d98_add_slack_to_the_schedule.py
@@ -26,8 +26,8 @@ Create Date: 2020-05-13 21:01:26.163478
revision = "743a117f0d98"
down_revision = "620241d1153f"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2020-05-26_23-21_2f1d15e8a6af_add_alerts.py b/superset/migrations/versions/2020-05-26_23-21_2f1d15e8a6af_add_alerts.py
index bb85d5198..c0243717f 100644
--- a/superset/migrations/versions/2020-05-26_23-21_2f1d15e8a6af_add_alerts.py
+++ b/superset/migrations/versions/2020-05-26_23-21_2f1d15e8a6af_add_alerts.py
@@ -26,9 +26,8 @@ Create Date: 2020-05-26 23:21:50.059635
revision = "2f1d15e8a6af"
down_revision = "a72cb0ebeb22"
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy.dialects import mysql
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2020-07-09_17-12_73fd22e742ab_add_dynamic_plugins_py.py b/superset/migrations/versions/2020-07-09_17-12_73fd22e742ab_add_dynamic_plugins_py.py
index e2bbedcd3..b6a20148c 100644
--- a/superset/migrations/versions/2020-07-09_17-12_73fd22e742ab_add_dynamic_plugins_py.py
+++ b/superset/migrations/versions/2020-07-09_17-12_73fd22e742ab_add_dynamic_plugins_py.py
@@ -26,8 +26,8 @@ Create Date: 2020-07-09 17:12:00.686702
revision = "73fd22e742ab"
down_revision = "ab104a954a8f"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2020-08-08_18-10_f2672aa8350a_add_slack_to_alerts.py b/superset/migrations/versions/2020-08-08_18-10_f2672aa8350a_add_slack_to_alerts.py
index c7c8c4c2d..e87591fe8 100644
--- a/superset/migrations/versions/2020-08-08_18-10_f2672aa8350a_add_slack_to_alerts.py
+++ b/superset/migrations/versions/2020-08-08_18-10_f2672aa8350a_add_slack_to_alerts.py
@@ -26,8 +26,8 @@ Create Date: 2020-08-08 18:10:51.973551
revision = "f2672aa8350a"
down_revision = "2f1d15e8a6af"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2020-08-12_00-24_978245563a02_migrate_iframe_to_dash_markdown.py b/superset/migrations/versions/2020-08-12_00-24_978245563a02_migrate_iframe_to_dash_markdown.py
index 70f1fcc07..c59abc8d4 100644
--- a/superset/migrations/versions/2020-08-12_00-24_978245563a02_migrate_iframe_to_dash_markdown.py
+++ b/superset/migrations/versions/2020-08-12_00-24_978245563a02_migrate_iframe_to_dash_markdown.py
@@ -21,13 +21,14 @@ Revises: f2672aa8350a
Create Date: 2020-08-12 00:24:39.617899
"""
+
import json
import logging
import uuid
from collections import defaultdict
from alembic import op
-from sqlalchemy import and_, Column, ForeignKey, Integer, String, Table, Text
+from sqlalchemy import Column, ForeignKey, Integer, String, Table, Text
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
diff --git a/superset/migrations/versions/2020-08-12_10-01_f120347acb39_add_extra_column_to_tables_and_metrics.py b/superset/migrations/versions/2020-08-12_10-01_f120347acb39_add_extra_column_to_tables_and_metrics.py
index face9ce73..2bee1ee14 100644
--- a/superset/migrations/versions/2020-08-12_10-01_f120347acb39_add_extra_column_to_tables_and_metrics.py
+++ b/superset/migrations/versions/2020-08-12_10-01_f120347acb39_add_extra_column_to_tables_and_metrics.py
@@ -26,8 +26,8 @@ Create Date: 2020-08-12 10:01:43.531845
revision = "f120347acb39"
down_revision = "f2672aa8350a"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2020-08-12_15-47_f80a3b88324b_.py b/superset/migrations/versions/2020-08-12_15-47_f80a3b88324b_.py
index 7defdc5e2..22dc3ca50 100644
--- a/superset/migrations/versions/2020-08-12_15-47_f80a3b88324b_.py
+++ b/superset/migrations/versions/2020-08-12_15-47_f80a3b88324b_.py
@@ -26,9 +26,6 @@ Create Date: 2020-08-12 15:47:56.580191
revision = "f80a3b88324b"
down_revision = ("978245563a02", "f120347acb39")
-import sqlalchemy as sa
-from alembic import op
-
def upgrade():
pass
diff --git a/superset/migrations/versions/2020-08-28_17-16_175ea3592453_cache_lookup.py b/superset/migrations/versions/2020-08-28_17-16_175ea3592453_cache_lookup.py
index 9057b169d..da3cd807d 100644
--- a/superset/migrations/versions/2020-08-28_17-16_175ea3592453_cache_lookup.py
+++ b/superset/migrations/versions/2020-08-28_17-16_175ea3592453_cache_lookup.py
@@ -26,8 +26,8 @@ Create Date: 2020-08-28 17:16:57.379425
revision = "175ea3592453"
down_revision = "f80a3b88324b"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2020-08-31_20-30_2e5a0ee25ed4_refractor_alerting.py b/superset/migrations/versions/2020-08-31_20-30_2e5a0ee25ed4_refractor_alerting.py
index 4eca5f147..d6c2b4a53 100644
--- a/superset/migrations/versions/2020-08-31_20-30_2e5a0ee25ed4_refractor_alerting.py
+++ b/superset/migrations/versions/2020-08-31_20-30_2e5a0ee25ed4_refractor_alerting.py
@@ -26,9 +26,9 @@ Create Date: 2020-08-31 20:30:30.781478
revision = "2e5a0ee25ed4"
down_revision = "f80a3b88324b"
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy.dialects import mysql
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
+from sqlalchemy.dialects import mysql # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2020-09-15_18-22_e5ef6828ac4e_add_rls_filter_type_and_grouping_key.py b/superset/migrations/versions/2020-09-15_18-22_e5ef6828ac4e_add_rls_filter_type_and_grouping_key.py
index 7e1de6112..d28633c09 100644
--- a/superset/migrations/versions/2020-09-15_18-22_e5ef6828ac4e_add_rls_filter_type_and_grouping_key.py
+++ b/superset/migrations/versions/2020-09-15_18-22_e5ef6828ac4e_add_rls_filter_type_and_grouping_key.py
@@ -26,16 +26,16 @@ Create Date: 2020-09-15 18:22:40.130985
revision = "e5ef6828ac4e"
down_revision = "ae19b4ee3692"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
-from superset.utils import core as utils
+from superset.utils import core as utils # noqa: E402
def upgrade():
with op.batch_alter_table("row_level_security_filters") as batch_op:
- batch_op.add_column(sa.Column("filter_type", sa.VARCHAR(255), nullable=True)),
- batch_op.add_column(sa.Column("group_key", sa.VARCHAR(255), nullable=True)),
+ (batch_op.add_column(sa.Column("filter_type", sa.VARCHAR(255), nullable=True)),)
+ (batch_op.add_column(sa.Column("group_key", sa.VARCHAR(255), nullable=True)),)
batch_op.create_index(
op.f("ix_row_level_security_filters_filter_type"),
["filter_type"],
diff --git a/superset/migrations/versions/2020-09-24_12-04_3fbbc6e8d654_fix_data_access_permissions_for_virtual_.py b/superset/migrations/versions/2020-09-24_12-04_3fbbc6e8d654_fix_data_access_permissions_for_virtual_.py
index 45f091c38..47be42486 100644
--- a/superset/migrations/versions/2020-09-24_12-04_3fbbc6e8d654_fix_data_access_permissions_for_virtual_.py
+++ b/superset/migrations/versions/2020-09-24_12-04_3fbbc6e8d654_fix_data_access_permissions_for_virtual_.py
@@ -26,10 +26,10 @@ Create Date: 2020-09-24 12:04:33.827436
revision = "3fbbc6e8d654"
down_revision = "e5ef6828ac4e"
-import re
+import re # noqa: E402
-from alembic import op
-from sqlalchemy import (
+from alembic import op # noqa: E402
+from sqlalchemy import ( # noqa: E402
Column,
ForeignKey,
Integer,
@@ -39,9 +39,9 @@ from sqlalchemy import (
Table,
UniqueConstraint,
)
-from sqlalchemy.exc import SQLAlchemyError
-from sqlalchemy.ext.declarative import declarative_base
-from sqlalchemy.orm import backref, relationship
+from sqlalchemy.exc import SQLAlchemyError # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
+from sqlalchemy.orm import backref, relationship # noqa: E402
Base = declarative_base()
@@ -106,7 +106,6 @@ sqlatable_user = Table(
class Database(Base): # pylint: disable=too-many-public-methods
-
"""An ORM object that stores Database related information"""
__tablename__ = "dbs"
diff --git a/superset/migrations/versions/2020-09-25_10-56_18532d70ab98_fix_table_unique_constraint_in_mysql.py b/superset/migrations/versions/2020-09-25_10-56_18532d70ab98_fix_table_unique_constraint_in_mysql.py
index 79aef84a3..22fe4bca0 100644
--- a/superset/migrations/versions/2020-09-25_10-56_18532d70ab98_fix_table_unique_constraint_in_mysql.py
+++ b/superset/migrations/versions/2020-09-25_10-56_18532d70ab98_fix_table_unique_constraint_in_mysql.py
@@ -26,11 +26,11 @@ Create Date: 2020-09-25 10:56:13.711182
revision = "18532d70ab98"
down_revision = "3fbbc6e8d654"
-from alembic import op
-from sqlalchemy.dialects.mysql.base import MySQLDialect
-from sqlalchemy.engine.reflection import Inspector
+from alembic import op # noqa: E402
+from sqlalchemy.dialects.mysql.base import MySQLDialect # noqa: E402
+from sqlalchemy.engine.reflection import Inspector # noqa: E402
-from superset.utils.core import generic_find_uq_constraint_name
+from superset.utils.core import generic_find_uq_constraint_name # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2020-09-28_17-57_b56500de1855_add_uuid_column_to_import_mixin.py b/superset/migrations/versions/2020-09-28_17-57_b56500de1855_add_uuid_column_to_import_mixin.py
index 980ca56f6..e92c1f065 100644
--- a/superset/migrations/versions/2020-09-28_17-57_b56500de1855_add_uuid_column_to_import_mixin.py
+++ b/superset/migrations/versions/2020-09-28_17-57_b56500de1855_add_uuid_column_to_import_mixin.py
@@ -21,6 +21,7 @@ Revises: 18532d70ab98
Create Date: 2020-09-28 17:57:23.128142
"""
+
import json
import os
from json.decoder import JSONDecodeError
diff --git a/superset/migrations/versions/2020-10-20_17-28_585b0b1a7b18_add_exec_info_to_saved_queries.py b/superset/migrations/versions/2020-10-20_17-28_585b0b1a7b18_add_exec_info_to_saved_queries.py
index 54755c429..ec0891ce6 100644
--- a/superset/migrations/versions/2020-10-20_17-28_585b0b1a7b18_add_exec_info_to_saved_queries.py
+++ b/superset/migrations/versions/2020-10-20_17-28_585b0b1a7b18_add_exec_info_to_saved_queries.py
@@ -26,8 +26,8 @@ Create Date: 2020-10-20 17:28:22.857694
revision = "585b0b1a7b18"
down_revision = "af30ca79208f"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2020-10-21_21-09_96e99fb176a0_add_import_mixing_to_saved_query.py b/superset/migrations/versions/2020-10-21_21-09_96e99fb176a0_add_import_mixing_to_saved_query.py
index bda21e475..b3f57cf35 100644
--- a/superset/migrations/versions/2020-10-21_21-09_96e99fb176a0_add_import_mixing_to_saved_query.py
+++ b/superset/migrations/versions/2020-10-21_21-09_96e99fb176a0_add_import_mixing_to_saved_query.py
@@ -86,7 +86,7 @@ def upgrade():
def downgrade():
bind = op.get_bind()
- session = db.Session(bind=bind)
+ session = db.Session(bind=bind) # noqa: F841
# Remove uuid column
with op.batch_alter_table("saved_query") as batch_op:
diff --git a/superset/migrations/versions/2020-11-04_11-06_49b5a32daba5_add_report_schedules.py b/superset/migrations/versions/2020-11-04_11-06_49b5a32daba5_add_report_schedules.py
index 3a3b172bf..7446b9a75 100644
--- a/superset/migrations/versions/2020-11-04_11-06_49b5a32daba5_add_report_schedules.py
+++ b/superset/migrations/versions/2020-11-04_11-06_49b5a32daba5_add_report_schedules.py
@@ -26,10 +26,9 @@ Create Date: 2020-11-04 11:06:59.249758
revision = "49b5a32daba5"
down_revision = "96e99fb176a0"
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy.engine.reflection import Inspector
-from sqlalchemy.exc import OperationalError
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
+from sqlalchemy.engine.reflection import Inspector # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2020-11-15_16-08_a8173232b786_add_path_to_logs.py b/superset/migrations/versions/2020-11-15_16-08_a8173232b786_add_path_to_logs.py
index ea4900a16..b6efc56f8 100644
--- a/superset/migrations/versions/2020-11-15_16-08_a8173232b786_add_path_to_logs.py
+++ b/superset/migrations/versions/2020-11-15_16-08_a8173232b786_add_path_to_logs.py
@@ -26,11 +26,9 @@ Create Date: 2020-11-15 16:08:24.580764
revision = "a8173232b786"
down_revision = "49b5a32daba5"
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy.dialects import mysql
+from alembic import op # noqa: E402
-from superset.migrations.shared import utils
+from superset.migrations.shared import utils # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2020-11-20_14-24_e38177dbf641_security_converge_saved_queries.py b/superset/migrations/versions/2020-11-20_14-24_e38177dbf641_security_converge_saved_queries.py
index d3342fe53..bf033c651 100644
--- a/superset/migrations/versions/2020-11-20_14-24_e38177dbf641_security_converge_saved_queries.py
+++ b/superset/migrations/versions/2020-11-20_14-24_e38177dbf641_security_converge_saved_queries.py
@@ -27,11 +27,11 @@ revision = "e38177dbf641"
down_revision = "a8173232b786"
-from alembic import op
-from sqlalchemy.exc import SQLAlchemyError
-from sqlalchemy.orm import Session
+from alembic import op # noqa: E402
+from sqlalchemy.exc import SQLAlchemyError # noqa: E402
+from sqlalchemy.orm import Session # noqa: E402
-from superset.migrations.shared.security_converge import (
+from superset.migrations.shared.security_converge import ( # noqa: E402
add_pvms,
get_reversed_new_pvms,
get_reversed_pvm_map,
diff --git a/superset/migrations/versions/2020-11-30_15-25_40f16acf1ba7_security_converge_reports.py b/superset/migrations/versions/2020-11-30_15-25_40f16acf1ba7_security_converge_reports.py
index 2886bfbb1..4535b793b 100644
--- a/superset/migrations/versions/2020-11-30_15-25_40f16acf1ba7_security_converge_reports.py
+++ b/superset/migrations/versions/2020-11-30_15-25_40f16acf1ba7_security_converge_reports.py
@@ -27,11 +27,11 @@ revision = "40f16acf1ba7"
down_revision = "5daced1f0e76"
-from alembic import op
-from sqlalchemy.exc import SQLAlchemyError
-from sqlalchemy.orm import Session
+from alembic import op # noqa: E402
+from sqlalchemy.exc import SQLAlchemyError # noqa: E402
+from sqlalchemy.orm import Session # noqa: E402
-from superset.migrations.shared.security_converge import (
+from superset.migrations.shared.security_converge import ( # noqa: E402
add_pvms,
get_reversed_new_pvms,
get_reversed_pvm_map,
diff --git a/superset/migrations/versions/2020-11-30_17-54_8ee129739cf9_security_converge_css_templates.py b/superset/migrations/versions/2020-11-30_17-54_8ee129739cf9_security_converge_css_templates.py
index 401dc5c4b..7a57de742 100644
--- a/superset/migrations/versions/2020-11-30_17-54_8ee129739cf9_security_converge_css_templates.py
+++ b/superset/migrations/versions/2020-11-30_17-54_8ee129739cf9_security_converge_css_templates.py
@@ -27,11 +27,11 @@ revision = "8ee129739cf9"
down_revision = "e38177dbf641"
-from alembic import op
-from sqlalchemy.exc import SQLAlchemyError
-from sqlalchemy.orm import Session
+from alembic import op # noqa: E402
+from sqlalchemy.exc import SQLAlchemyError # noqa: E402
+from sqlalchemy.orm import Session # noqa: E402
-from superset.migrations.shared.security_converge import (
+from superset.migrations.shared.security_converge import ( # noqa: E402
add_pvms,
get_reversed_new_pvms,
get_reversed_pvm_map,
diff --git a/superset/migrations/versions/2020-12-03_10-11_5daced1f0e76_reports_add_working_timeout_column.py b/superset/migrations/versions/2020-12-03_10-11_5daced1f0e76_reports_add_working_timeout_column.py
index b836ef01f..2cd74a56c 100644
--- a/superset/migrations/versions/2020-12-03_10-11_5daced1f0e76_reports_add_working_timeout_column.py
+++ b/superset/migrations/versions/2020-12-03_10-11_5daced1f0e76_reports_add_working_timeout_column.py
@@ -26,8 +26,8 @@ Create Date: 2020-12-03 10:11:22.894977
revision = "5daced1f0e76"
down_revision = "811494c0cc23"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2020-12-03_16-21_811494c0cc23_remove_path_from_logs.py b/superset/migrations/versions/2020-12-03_16-21_811494c0cc23_remove_path_from_logs.py
index f0731f926..13e648f85 100644
--- a/superset/migrations/versions/2020-12-03_16-21_811494c0cc23_remove_path_from_logs.py
+++ b/superset/migrations/versions/2020-12-03_16-21_811494c0cc23_remove_path_from_logs.py
@@ -26,10 +26,9 @@ Create Date: 2020-12-03 16:21:06.771684
revision = "811494c0cc23"
down_revision = "8ee129739cf9"
-import sqlalchemy as sa
-from alembic import op
+from alembic import op # noqa: E402
-from superset.migrations.shared import utils
+from superset.migrations.shared import utils # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2020-12-09_14-13_ccb74baaa89b_security_converge_charts.py b/superset/migrations/versions/2020-12-09_14-13_ccb74baaa89b_security_converge_charts.py
index 66fc547d5..7697b4981 100644
--- a/superset/migrations/versions/2020-12-09_14-13_ccb74baaa89b_security_converge_charts.py
+++ b/superset/migrations/versions/2020-12-09_14-13_ccb74baaa89b_security_converge_charts.py
@@ -27,11 +27,11 @@ revision = "ccb74baaa89b"
down_revision = "40f16acf1ba7"
-from alembic import op
-from sqlalchemy.exc import SQLAlchemyError
-from sqlalchemy.orm import Session
+from alembic import op # noqa: E402
+from sqlalchemy.exc import SQLAlchemyError # noqa: E402
+from sqlalchemy.orm import Session # noqa: E402
-from superset.migrations.shared.security_converge import (
+from superset.migrations.shared.security_converge import ( # noqa: E402
add_pvms,
get_reversed_new_pvms,
get_reversed_pvm_map,
diff --git a/superset/migrations/versions/2020-12-10_15-05_45731db65d9c_security_converge_datasets.py b/superset/migrations/versions/2020-12-10_15-05_45731db65d9c_security_converge_datasets.py
index c7a1c8162..645ee7e58 100644
--- a/superset/migrations/versions/2020-12-10_15-05_45731db65d9c_security_converge_datasets.py
+++ b/superset/migrations/versions/2020-12-10_15-05_45731db65d9c_security_converge_datasets.py
@@ -26,11 +26,11 @@ Create Date: 2020-12-10 15:05:44.928020
revision = "45731db65d9c"
down_revision = "c25cb2c78727"
-from alembic import op
-from sqlalchemy.exc import SQLAlchemyError
-from sqlalchemy.orm import Session
+from alembic import op # noqa: E402
+from sqlalchemy.exc import SQLAlchemyError # noqa: E402
+from sqlalchemy.orm import Session # noqa: E402
-from superset.migrations.shared.security_converge import (
+from superset.migrations.shared.security_converge import ( # noqa: E402
add_pvms,
get_reversed_new_pvms,
get_reversed_pvm_map,
diff --git a/superset/migrations/versions/2020-12-11_11-45_1f6dca87d1a2_security_converge_dashboards.py b/superset/migrations/versions/2020-12-11_11-45_1f6dca87d1a2_security_converge_dashboards.py
index ae350848c..e72fb416e 100644
--- a/superset/migrations/versions/2020-12-11_11-45_1f6dca87d1a2_security_converge_dashboards.py
+++ b/superset/migrations/versions/2020-12-11_11-45_1f6dca87d1a2_security_converge_dashboards.py
@@ -27,11 +27,11 @@ revision = "1f6dca87d1a2"
down_revision = "4b84f97828aa"
-from alembic import op
-from sqlalchemy.exc import SQLAlchemyError
-from sqlalchemy.orm import Session
+from alembic import op # noqa: E402
+from sqlalchemy.exc import SQLAlchemyError # noqa: E402
+from sqlalchemy.orm import Session # noqa: E402
-from superset.migrations.shared.security_converge import (
+from superset.migrations.shared.security_converge import ( # noqa: E402
add_pvms,
get_reversed_new_pvms,
get_reversed_pvm_map,
diff --git a/superset/migrations/versions/2020-12-14_10-49_42b4c9e01447_security_converge_databases.py b/superset/migrations/versions/2020-12-14_10-49_42b4c9e01447_security_converge_databases.py
index d8d6a2a33..a56a514a4 100644
--- a/superset/migrations/versions/2020-12-14_10-49_42b4c9e01447_security_converge_databases.py
+++ b/superset/migrations/versions/2020-12-14_10-49_42b4c9e01447_security_converge_databases.py
@@ -26,12 +26,11 @@ Create Date: 2020-12-14 10:49:36.110805
revision = "42b4c9e01447"
down_revision = "1f6dca87d1a2"
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy.exc import SQLAlchemyError
-from sqlalchemy.orm import Session
+from alembic import op # noqa: E402
+from sqlalchemy.exc import SQLAlchemyError # noqa: E402
+from sqlalchemy.orm import Session # noqa: E402
-from superset.migrations.shared.security_converge import (
+from superset.migrations.shared.security_converge import ( # noqa: E402
add_pvms,
get_reversed_new_pvms,
get_reversed_pvm_map,
diff --git a/superset/migrations/versions/2020-12-15_09-07_ab104a954a8f_reports_alter_crontab_size.py b/superset/migrations/versions/2020-12-15_09-07_ab104a954a8f_reports_alter_crontab_size.py
index bb1a501de..2839a1c67 100644
--- a/superset/migrations/versions/2020-12-15_09-07_ab104a954a8f_reports_alter_crontab_size.py
+++ b/superset/migrations/versions/2020-12-15_09-07_ab104a954a8f_reports_alter_crontab_size.py
@@ -26,8 +26,8 @@ Create Date: 2020-12-15 09:07:24.730545
revision = "ab104a954a8f"
down_revision = "e37912a26567"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2020-12-16_12-15_e37912a26567_security_converge_queries.py b/superset/migrations/versions/2020-12-16_12-15_e37912a26567_security_converge_queries.py
index 99928e64c..8c2b1b6c5 100644
--- a/superset/migrations/versions/2020-12-16_12-15_e37912a26567_security_converge_queries.py
+++ b/superset/migrations/versions/2020-12-16_12-15_e37912a26567_security_converge_queries.py
@@ -26,11 +26,11 @@ Create Date: 2020-12-16 12:15:28.291777
revision = "e37912a26567"
down_revision = "42b4c9e01447"
-from alembic import op
-from sqlalchemy.exc import SQLAlchemyError
-from sqlalchemy.orm import Session
+from alembic import op # noqa: E402
+from sqlalchemy.exc import SQLAlchemyError # noqa: E402
+from sqlalchemy.orm import Session # noqa: E402
-from superset.migrations.shared.security_converge import (
+from superset.migrations.shared.security_converge import ( # noqa: E402
add_pvms,
get_reversed_new_pvms,
get_reversed_pvm_map,
diff --git a/superset/migrations/versions/2020-12-23_11-34_c878781977c6_alert_reports_shared_uniqueness.py b/superset/migrations/versions/2020-12-23_11-34_c878781977c6_alert_reports_shared_uniqueness.py
index bb8f628bd..2244835c1 100644
--- a/superset/migrations/versions/2020-12-23_11-34_c878781977c6_alert_reports_shared_uniqueness.py
+++ b/superset/migrations/versions/2020-12-23_11-34_c878781977c6_alert_reports_shared_uniqueness.py
@@ -26,14 +26,12 @@ Create Date: 2020-12-23 11:34:53.882200
revision = "c878781977c6"
down_revision = "73fd22e742ab"
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy.dialects.mysql.base import MySQLDialect
-from sqlalchemy.dialects.postgresql.base import PGDialect
-from sqlalchemy.dialects.sqlite.base import SQLiteDialect
-from sqlalchemy.engine.reflection import Inspector
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
+from sqlalchemy.dialects.mysql.base import MySQLDialect # noqa: E402
+from sqlalchemy.dialects.postgresql.base import PGDialect # noqa: E402
+from sqlalchemy.dialects.sqlite.base import SQLiteDialect # noqa: E402
-from superset.utils.core import generic_find_uq_constraint_name
report_schedule = sa.Table(
"report_schedule",
diff --git a/superset/migrations/versions/2021-01-14_19-12_e11ccdd12658_add_roles_relationship_to_dashboard.py b/superset/migrations/versions/2021-01-14_19-12_e11ccdd12658_add_roles_relationship_to_dashboard.py
index bfa23bef3..78eaa98d5 100644
--- a/superset/migrations/versions/2021-01-14_19-12_e11ccdd12658_add_roles_relationship_to_dashboard.py
+++ b/superset/migrations/versions/2021-01-14_19-12_e11ccdd12658_add_roles_relationship_to_dashboard.py
@@ -20,11 +20,12 @@ Revision ID: e11ccdd12658
Revises: 260bf0649a77
Create Date: 2021-01-14 19:12:43.406230
"""
+
# revision identifiers, used by Alembic.
revision = "e11ccdd12658"
down_revision = "260bf0649a77"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2021-01-23_16-25_260bf0649a77_migrate_x_dateunit_in_time_range.py b/superset/migrations/versions/2021-01-23_16-25_260bf0649a77_migrate_x_dateunit_in_time_range.py
index 6704a0323..f56007fd0 100644
--- a/superset/migrations/versions/2021-01-23_16-25_260bf0649a77_migrate_x_dateunit_in_time_range.py
+++ b/superset/migrations/versions/2021-01-23_16-25_260bf0649a77_migrate_x_dateunit_in_time_range.py
@@ -26,19 +26,19 @@ Create Date: 2021-01-23 16:25:14.496774
revision = "260bf0649a77"
down_revision = "c878781977c6"
-import json
-import re
+import json # noqa: E402
+import re # noqa: E402
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy import Column, Integer, or_, Text
-from sqlalchemy.dialects.mysql.base import MySQLDialect
-from sqlalchemy.dialects.sqlite.base import SQLiteDialect
-from sqlalchemy.exc import OperationalError
-from sqlalchemy.ext.declarative import declarative_base
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
+from sqlalchemy import Column, Integer, or_, Text # noqa: E402
+from sqlalchemy.dialects.mysql.base import MySQLDialect # noqa: E402
+from sqlalchemy.dialects.sqlite.base import SQLiteDialect # noqa: E402
+from sqlalchemy.exc import OperationalError # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
-from superset.utils.date_parser import DateRangeMigration
+from superset import db # noqa: E402
+from superset.utils.date_parser import DateRangeMigration # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2021-02-04_09-34_070c043f2fdb_add_granularity_to_charts_where_missing.py b/superset/migrations/versions/2021-02-04_09-34_070c043f2fdb_add_granularity_to_charts_where_missing.py
index 18062b275..133db6649 100644
--- a/superset/migrations/versions/2021-02-04_09-34_070c043f2fdb_add_granularity_to_charts_where_missing.py
+++ b/superset/migrations/versions/2021-02-04_09-34_070c043f2fdb_add_granularity_to_charts_where_missing.py
@@ -26,13 +26,13 @@ Create Date: 2021-02-04 09:34:13.608891
revision = "070c043f2fdb"
down_revision = "41ce8799acc3"
-import json
+import json # noqa: E402
-from alembic import op
-from sqlalchemy import and_, Boolean, Column, Integer, String, Text
-from sqlalchemy.ext.declarative import declarative_base
+from alembic import op # noqa: E402
+from sqlalchemy import and_, Boolean, Column, Integer, String, Text # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
+from superset import db # noqa: E402
Base = declarative_base()
@@ -107,7 +107,7 @@ def upgrade():
table_columns = (
session.query(TableColumn)
.filter(TableColumn.table_id == table.id)
- .filter(TableColumn.is_dttm == True)
+ .filter(TableColumn.is_dttm == True) # noqa: E712
.all()
)
if len(table_columns):
diff --git a/superset/migrations/versions/2021-02-10_12-32_41ce8799acc3_rename_pie_label_type.py b/superset/migrations/versions/2021-02-10_12-32_41ce8799acc3_rename_pie_label_type.py
index 4328eb291..7e84392b1 100644
--- a/superset/migrations/versions/2021-02-10_12-32_41ce8799acc3_rename_pie_label_type.py
+++ b/superset/migrations/versions/2021-02-10_12-32_41ce8799acc3_rename_pie_label_type.py
@@ -26,13 +26,13 @@ Create Date: 2021-02-10 12:32:27.385579
revision = "41ce8799acc3"
down_revision = "e11ccdd12658"
-import json
+import json # noqa: E402
-from alembic import op
-from sqlalchemy import and_, Column, Integer, String, Text
-from sqlalchemy.ext.declarative import declarative_base
+from alembic import op # noqa: E402
+from sqlalchemy import and_, Column, Integer, String, Text # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
+from superset import db # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2021-02-14_11-46_1412ec1e5a7b_legacy_force_directed_to_echart.py b/superset/migrations/versions/2021-02-14_11-46_1412ec1e5a7b_legacy_force_directed_to_echart.py
index 24a81270d..862a629c1 100644
--- a/superset/migrations/versions/2021-02-14_11-46_1412ec1e5a7b_legacy_force_directed_to_echart.py
+++ b/superset/migrations/versions/2021-02-14_11-46_1412ec1e5a7b_legacy_force_directed_to_echart.py
@@ -21,11 +21,11 @@ Revises: c501b7c653a3
Create Date: 2021-02-14 11:46:02.379832
"""
+
import json
-import sqlalchemy as sa
from alembic import op
-from sqlalchemy import Column, Integer, or_, String, Text
+from sqlalchemy import Column, Integer, String, Text
from sqlalchemy.ext.declarative import declarative_base
from superset import db
diff --git a/superset/migrations/versions/2021-02-18_09-13_c501b7c653a3_add_missing_uuid_column.py b/superset/migrations/versions/2021-02-18_09-13_c501b7c653a3_add_missing_uuid_column.py
index 3dec28d2b..be921e3ac 100644
--- a/superset/migrations/versions/2021-02-18_09-13_c501b7c653a3_add_missing_uuid_column.py
+++ b/superset/migrations/versions/2021-02-18_09-13_c501b7c653a3_add_missing_uuid_column.py
@@ -26,17 +26,17 @@ Create Date: 2021-02-18 09:13:00.028317
revision = "c501b7c653a3"
down_revision = "070c043f2fdb"
-import logging
-from importlib import import_module
-from uuid import uuid4
+import logging # noqa: E402
+from importlib import import_module # noqa: E402
+from uuid import uuid4 # noqa: E402
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy.engine.reflection import Inspector
-from sqlalchemy.orm import load_only
-from sqlalchemy_utils import UUIDType
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
+from sqlalchemy.engine.reflection import Inspector # noqa: E402
+from sqlalchemy.orm import load_only # noqa: E402
+from sqlalchemy_utils import UUIDType # noqa: E402
-from superset import db
+from superset import db # noqa: E402
add_uuid_column_to_import_mixin = import_module(
"superset.migrations.versions."
diff --git a/superset/migrations/versions/2021-02-22_11-22_67da9ef1ef9c_add_hide_left_bar_to_tabstate.py b/superset/migrations/versions/2021-02-22_11-22_67da9ef1ef9c_add_hide_left_bar_to_tabstate.py
index 3cf8baa6a..02e367eb7 100644
--- a/superset/migrations/versions/2021-02-22_11-22_67da9ef1ef9c_add_hide_left_bar_to_tabstate.py
+++ b/superset/migrations/versions/2021-02-22_11-22_67da9ef1ef9c_add_hide_left_bar_to_tabstate.py
@@ -26,10 +26,9 @@ Create Date: 2021-02-22 11:22:10.156942
revision = "67da9ef1ef9c"
down_revision = "1412ec1e5a7b"
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy.dialects import mysql
-from sqlalchemy.sql import expression
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
+from sqlalchemy.sql import expression # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2021-03-23_05-23_301362411006_add_execution_id_to_report_execution_.py b/superset/migrations/versions/2021-03-23_05-23_301362411006_add_execution_id_to_report_execution_.py
index 3edc52f28..f6109fc57 100644
--- a/superset/migrations/versions/2021-03-23_05-23_301362411006_add_execution_id_to_report_execution_.py
+++ b/superset/migrations/versions/2021-03-23_05-23_301362411006_add_execution_id_to_report_execution_.py
@@ -26,9 +26,9 @@ Create Date: 2021-03-23 05:23:15.641856
revision = "301362411006"
down_revision = "989bbe479899"
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy_utils import UUIDType
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
+from sqlalchemy_utils import UUIDType # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2021-03-24_09-47_989bbe479899_rename_filter_configuration_in_.py b/superset/migrations/versions/2021-03-24_09-47_989bbe479899_rename_filter_configuration_in_.py
index 8330b75aa..d738b0e6d 100644
--- a/superset/migrations/versions/2021-03-24_09-47_989bbe479899_rename_filter_configuration_in_.py
+++ b/superset/migrations/versions/2021-03-24_09-47_989bbe479899_rename_filter_configuration_in_.py
@@ -26,13 +26,13 @@ Create Date: 2021-03-24 09:47:21.569508
revision = "989bbe479899"
down_revision = "67da9ef1ef9c"
-import json
+import json # noqa: E402
-from alembic import op
-from sqlalchemy import and_, Column, Integer, Text
-from sqlalchemy.ext.declarative import declarative_base
+from alembic import op # noqa: E402
+from sqlalchemy import Column, Integer, Text # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
+from superset import db # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2021-03-29_11-15_3ebe0993c770_filterset_table.py b/superset/migrations/versions/2021-03-29_11-15_3ebe0993c770_filterset_table.py
index b509f895a..8e092112d 100644
--- a/superset/migrations/versions/2021-03-29_11-15_3ebe0993c770_filterset_table.py
+++ b/superset/migrations/versions/2021-03-29_11-15_3ebe0993c770_filterset_table.py
@@ -26,8 +26,8 @@ Create Date: 2021-03-29 11:15:48.831225
revision = "3ebe0993c770"
down_revision = "181091c0ef16"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2021-04-06_21-39_19e978e1b9c3_add_report_format_to_report_schedule_.py b/superset/migrations/versions/2021-04-06_21-39_19e978e1b9c3_add_report_format_to_report_schedule_.py
index ff191d0e3..0d637748b 100644
--- a/superset/migrations/versions/2021-04-06_21-39_19e978e1b9c3_add_report_format_to_report_schedule_.py
+++ b/superset/migrations/versions/2021-04-06_21-39_19e978e1b9c3_add_report_format_to_report_schedule_.py
@@ -26,8 +26,8 @@ Create Date: 2021-04-06 21:39:52.259223
revision = "19e978e1b9c3"
down_revision = "fc3a3a8ff221"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2021-04-07_07-21_134cea61c5e7_remove_dataset_health_check_message.py b/superset/migrations/versions/2021-04-07_07-21_134cea61c5e7_remove_dataset_health_check_message.py
index b8cbdc301..adcc04fda 100644
--- a/superset/migrations/versions/2021-04-07_07-21_134cea61c5e7_remove_dataset_health_check_message.py
+++ b/superset/migrations/versions/2021-04-07_07-21_134cea61c5e7_remove_dataset_health_check_message.py
@@ -26,14 +26,14 @@ Create Date: 2021-04-07 07:21:27.324983
revision = "134cea61c5e7"
down_revision = "301362411006"
-import json
-import logging
+import json # noqa: E402
+import logging # noqa: E402
-from alembic import op
-from sqlalchemy import Column, Integer, Text
-from sqlalchemy.ext.declarative import declarative_base
+from alembic import op # noqa: E402
+from sqlalchemy import Column, Integer, Text # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
+from superset import db # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2021-04-09_16-14_085f06488938_country_map_use_lowercase_country_name.py b/superset/migrations/versions/2021-04-09_16-14_085f06488938_country_map_use_lowercase_country_name.py
index 7e6d904db..a303d12c8 100644
--- a/superset/migrations/versions/2021-04-09_16-14_085f06488938_country_map_use_lowercase_country_name.py
+++ b/superset/migrations/versions/2021-04-09_16-14_085f06488938_country_map_use_lowercase_country_name.py
@@ -21,6 +21,7 @@ Revises: 134cea61c5e7
Create Date: 2021-04-09 16:14:19.040884
"""
+
import json
from alembic import op
diff --git a/superset/migrations/versions/2021-04-12_12-38_fc3a3a8ff221_migrate_filter_sets_to_new_format.py b/superset/migrations/versions/2021-04-12_12-38_fc3a3a8ff221_migrate_filter_sets_to_new_format.py
index 64396b6ab..13cfab0a4 100644
--- a/superset/migrations/versions/2021-04-12_12-38_fc3a3a8ff221_migrate_filter_sets_to_new_format.py
+++ b/superset/migrations/versions/2021-04-12_12-38_fc3a3a8ff221_migrate_filter_sets_to_new_format.py
@@ -26,15 +26,15 @@ Create Date: 2021-04-12 12:38:03.913514
revision = "fc3a3a8ff221"
down_revision = "085f06488938"
-import json
-from collections.abc import Iterable
-from typing import Any
+import json # noqa: E402
+from collections.abc import Iterable # noqa: E402
+from typing import Any # noqa: E402
-from alembic import op
-from sqlalchemy import Column, Integer, Text
-from sqlalchemy.ext.declarative import declarative_base
+from alembic import op # noqa: E402
+from sqlalchemy import Column, Integer, Text # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
+from superset import db # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2021-04-16_17-38_d416d0d715cc_add_limiting_factor_column_to_query_.py b/superset/migrations/versions/2021-04-16_17-38_d416d0d715cc_add_limiting_factor_column_to_query_.py
index 532da4488..4ed937c1a 100644
--- a/superset/migrations/versions/2021-04-16_17-38_d416d0d715cc_add_limiting_factor_column_to_query_.py
+++ b/superset/migrations/versions/2021-04-16_17-38_d416d0d715cc_add_limiting_factor_column_to_query_.py
@@ -26,8 +26,8 @@ Create Date: 2021-04-16 17:38:40.342260
revision = "d416d0d715cc"
down_revision = "19e978e1b9c3"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2021-04-29_15-32_f1410ed7ec95_migrate_native_filters_to_new_schema.py b/superset/migrations/versions/2021-04-29_15-32_f1410ed7ec95_migrate_native_filters_to_new_schema.py
index 42368ce89..44bbc0c96 100644
--- a/superset/migrations/versions/2021-04-29_15-32_f1410ed7ec95_migrate_native_filters_to_new_schema.py
+++ b/superset/migrations/versions/2021-04-29_15-32_f1410ed7ec95_migrate_native_filters_to_new_schema.py
@@ -26,15 +26,15 @@ Create Date: 2021-04-29 15:32:21.939018
revision = "f1410ed7ec95"
down_revision = "d416d0d715cc"
-import json
-from collections.abc import Iterable
-from typing import Any
+import json # noqa: E402
+from collections.abc import Iterable # noqa: E402
+from typing import Any # noqa: E402
-from alembic import op
-from sqlalchemy import Column, Integer, Text
-from sqlalchemy.ext.declarative import declarative_base
+from alembic import op # noqa: E402
+from sqlalchemy import Column, Integer, Text # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
+from superset import db # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2021-04-30_10-55_453530256cea_add_save_option_column_to_db_model.py b/superset/migrations/versions/2021-04-30_10-55_453530256cea_add_save_option_column_to_db_model.py
index e3e3c20a3..a85551290 100644
--- a/superset/migrations/versions/2021-04-30_10-55_453530256cea_add_save_option_column_to_db_model.py
+++ b/superset/migrations/versions/2021-04-30_10-55_453530256cea_add_save_option_column_to_db_model.py
@@ -26,8 +26,8 @@ Create Date: 2021-04-30 10:55:07.009994
revision = "453530256cea"
down_revision = "f1410ed7ec95"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2021-07-09_12-18_ae1ed299413b_add_timezone_to_report_schedule.py b/superset/migrations/versions/2021-07-09_12-18_ae1ed299413b_add_timezone_to_report_schedule.py
index 000d581da..e9fd02556 100644
--- a/superset/migrations/versions/2021-07-09_12-18_ae1ed299413b_add_timezone_to_report_schedule.py
+++ b/superset/migrations/versions/2021-07-09_12-18_ae1ed299413b_add_timezone_to_report_schedule.py
@@ -26,8 +26,8 @@ Create Date: 2021-07-09 12:18:52.057815
revision = "ae1ed299413b"
down_revision = "030c840e3a1c"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2021-07-14_10-31_3317e9248280_add_creation_method_to_reports_model.py b/superset/migrations/versions/2021-07-14_10-31_3317e9248280_add_creation_method_to_reports_model.py
index 0a5608ae7..f77a69cea 100644
--- a/superset/migrations/versions/2021-07-14_10-31_3317e9248280_add_creation_method_to_reports_model.py
+++ b/superset/migrations/versions/2021-07-14_10-31_3317e9248280_add_creation_method_to_reports_model.py
@@ -26,8 +26,8 @@ Create Date: 2021-07-14 10:31:38.610095
revision = "3317e9248280"
down_revision = "453530256cea"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2021-07-21_12-09_030c840e3a1c_add_query_context_to_slices.py b/superset/migrations/versions/2021-07-21_12-09_030c840e3a1c_add_query_context_to_slices.py
index 5fa1dc2a5..f26314fef 100644
--- a/superset/migrations/versions/2021-07-21_12-09_030c840e3a1c_add_query_context_to_slices.py
+++ b/superset/migrations/versions/2021-07-21_12-09_030c840e3a1c_add_query_context_to_slices.py
@@ -26,9 +26,8 @@ Create Date: 2021-07-21 12:09:37.048337
revision = "030c840e3a1c"
down_revision = "3317e9248280"
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy.dialects import mysql
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2021-07-27_08-25_31b2a1039d4a_drop_tables_constraint.py b/superset/migrations/versions/2021-07-27_08-25_31b2a1039d4a_drop_tables_constraint.py
index 9773851ae..54dd1c5f3 100644
--- a/superset/migrations/versions/2021-07-27_08-25_31b2a1039d4a_drop_tables_constraint.py
+++ b/superset/migrations/versions/2021-07-27_08-25_31b2a1039d4a_drop_tables_constraint.py
@@ -24,7 +24,6 @@ Create Date: 2021-07-27 08:25:20.755453
from alembic import op
from sqlalchemy import engine
-from sqlalchemy.exc import OperationalError, ProgrammingError
from superset.utils.core import generic_find_uq_constraint_name
diff --git a/superset/migrations/versions/2021-08-02_16-39_e323605f370a_fix_schemas_allowed_for_csv_upload.py b/superset/migrations/versions/2021-08-02_16-39_e323605f370a_fix_schemas_allowed_for_csv_upload.py
index 245274726..6d8c1a4a1 100644
--- a/superset/migrations/versions/2021-08-02_16-39_e323605f370a_fix_schemas_allowed_for_csv_upload.py
+++ b/superset/migrations/versions/2021-08-02_16-39_e323605f370a_fix_schemas_allowed_for_csv_upload.py
@@ -21,6 +21,7 @@ Revises: 31b2a1039d4a
Create Date: 2021-08-02 16:39:45.329151
"""
+
import json
import logging
diff --git a/superset/migrations/versions/2021-08-02_21-14_6d20ba9ecb33_add_last_saved_at_to_slice_model.py b/superset/migrations/versions/2021-08-02_21-14_6d20ba9ecb33_add_last_saved_at_to_slice_model.py
index c149adbc5..e0aeea7c3 100644
--- a/superset/migrations/versions/2021-08-02_21-14_6d20ba9ecb33_add_last_saved_at_to_slice_model.py
+++ b/superset/migrations/versions/2021-08-02_21-14_6d20ba9ecb33_add_last_saved_at_to_slice_model.py
@@ -26,8 +26,8 @@ Create Date: 2021-08-02 21:14:58.200438
revision = "6d20ba9ecb33"
down_revision = "f6196627326f"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2021-08-03_15-36_143b6f2815da_migrate_pivot_table_v2_heatmaps_to_new_.py b/superset/migrations/versions/2021-08-03_15-36_143b6f2815da_migrate_pivot_table_v2_heatmaps_to_new_.py
index 8be11d3cf..861f163c3 100644
--- a/superset/migrations/versions/2021-08-03_15-36_143b6f2815da_migrate_pivot_table_v2_heatmaps_to_new_.py
+++ b/superset/migrations/versions/2021-08-03_15-36_143b6f2815da_migrate_pivot_table_v2_heatmaps_to_new_.py
@@ -26,13 +26,13 @@ Create Date: 2021-08-03 15:36:35.925420
revision = "143b6f2815da"
down_revision = "e323605f370a"
-import json
+import json # noqa: E402
-from alembic import op
-from sqlalchemy import and_, Column, Integer, String, Text
-from sqlalchemy.ext.declarative import declarative_base
+from alembic import op # noqa: E402
+from sqlalchemy import and_, Column, Integer, String, Text # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
+from superset import db # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2021-08-09_17-32_07071313dd52_change_fetch_values_predicate_to_text.py b/superset/migrations/versions/2021-08-09_17-32_07071313dd52_change_fetch_values_predicate_to_text.py
index ce90e37c8..b5d9715da 100644
--- a/superset/migrations/versions/2021-08-09_17-32_07071313dd52_change_fetch_values_predicate_to_text.py
+++ b/superset/migrations/versions/2021-08-09_17-32_07071313dd52_change_fetch_values_predicate_to_text.py
@@ -26,14 +26,14 @@ Create Date: 2021-08-09 17:32:56.204184
revision = "07071313dd52"
down_revision = "6d20ba9ecb33"
-import logging
+import logging # noqa: E402
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy import func
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
+from sqlalchemy import func # noqa: E402
-from superset import db
-from superset.connectors.sqla.models import SqlaTable
+from superset import db # noqa: E402
+from superset.connectors.sqla.models import SqlaTable # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2021-08-24_23-27_181091c0ef16_add_extra_column_to_columns_model.py b/superset/migrations/versions/2021-08-24_23-27_181091c0ef16_add_extra_column_to_columns_model.py
index 8ed0f0059..538f738d0 100644
--- a/superset/migrations/versions/2021-08-24_23-27_181091c0ef16_add_extra_column_to_columns_model.py
+++ b/superset/migrations/versions/2021-08-24_23-27_181091c0ef16_add_extra_column_to_columns_model.py
@@ -26,8 +26,8 @@ Create Date: 2021-08-24 23:27:30.403308
revision = "181091c0ef16"
down_revision = "021b81fe4fbb"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2021-08-31_11-37_021b81fe4fbb_add_type_to_native_filter_configuration.py b/superset/migrations/versions/2021-08-31_11-37_021b81fe4fbb_add_type_to_native_filter_configuration.py
index ae1b6c82e..32b5b1ade 100644
--- a/superset/migrations/versions/2021-08-31_11-37_021b81fe4fbb_add_type_to_native_filter_configuration.py
+++ b/superset/migrations/versions/2021-08-31_11-37_021b81fe4fbb_add_type_to_native_filter_configuration.py
@@ -26,14 +26,14 @@ Create Date: 2021-08-31 11:37:40.604081
revision = "021b81fe4fbb"
down_revision = "07071313dd52"
-import json
-import logging
+import json # noqa: E402
+import logging # noqa: E402
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy.ext.declarative import declarative_base
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
+from superset import db # noqa: E402
Base = declarative_base()
@@ -63,7 +63,7 @@ def upgrade():
continue
try:
json_meta = json.loads(dashboard.json_metadata)
- except:
+ except: # noqa: E722
logger.exception("[AddTypeToNativeFilter] Error loading json_metadata")
continue
@@ -103,7 +103,7 @@ def downgrade():
continue
try:
json_meta = json.loads(dashboard.json_metadata)
- except:
+ except: # noqa: E722
logger.exception("[RemoveTypeToNativeFilter] Error loading json_metadata")
continue
diff --git a/superset/migrations/versions/2021-09-19_14-42_b92d69a6643c_rename_csv_to_file.py b/superset/migrations/versions/2021-09-19_14-42_b92d69a6643c_rename_csv_to_file.py
index d86c8e911..2b8368407 100644
--- a/superset/migrations/versions/2021-09-19_14-42_b92d69a6643c_rename_csv_to_file.py
+++ b/superset/migrations/versions/2021-09-19_14-42_b92d69a6643c_rename_csv_to_file.py
@@ -26,9 +26,9 @@ Create Date: 2021-09-19 14:42:20.130368
revision = "b92d69a6643c"
down_revision = "aea15018d53b"
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy.engine.reflection import Inspector
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
+from sqlalchemy.engine.reflection import Inspector # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2021-09-27_11-31_60dc453f4e2e_migrate_timeseries_limit_metric_to_.py b/superset/migrations/versions/2021-09-27_11-31_60dc453f4e2e_migrate_timeseries_limit_metric_to_.py
index ab852c324..449cf7273 100644
--- a/superset/migrations/versions/2021-09-27_11-31_60dc453f4e2e_migrate_timeseries_limit_metric_to_.py
+++ b/superset/migrations/versions/2021-09-27_11-31_60dc453f4e2e_migrate_timeseries_limit_metric_to_.py
@@ -26,13 +26,13 @@ Create Date: 2021-09-27 11:31:53.453164
revision = "60dc453f4e2e"
down_revision = "3ebe0993c770"
-import json
+import json # noqa: E402
-from alembic import op
-from sqlalchemy import and_, Column, Integer, String, Text
-from sqlalchemy.ext.declarative import declarative_base
+from alembic import op # noqa: E402
+from sqlalchemy import and_, Column, Integer, String, Text # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
+from superset import db # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2021-10-12_11-15_32646df09c64_update_time_grain_sqla.py b/superset/migrations/versions/2021-10-12_11-15_32646df09c64_update_time_grain_sqla.py
index b85e9397e..f3cd97cca 100644
--- a/superset/migrations/versions/2021-10-12_11-15_32646df09c64_update_time_grain_sqla.py
+++ b/superset/migrations/versions/2021-10-12_11-15_32646df09c64_update_time_grain_sqla.py
@@ -26,13 +26,13 @@ Create Date: 2021-10-12 11:15:25.559532
revision = "32646df09c64"
down_revision = "60dc453f4e2e"
-import json
+import json # noqa: E402
-from alembic import op
-from sqlalchemy import Column, Integer, Text
-from sqlalchemy.ext.declarative import declarative_base
+from alembic import op # noqa: E402
+from sqlalchemy import Column, Integer, Text # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
+from superset import db # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2021-11-02_17-44_3ba29ecbaac5_change_datatype_of_type_in_basecolumn.py b/superset/migrations/versions/2021-11-02_17-44_3ba29ecbaac5_change_datatype_of_type_in_basecolumn.py
index 3488650bf..b97a6f14e 100644
--- a/superset/migrations/versions/2021-11-02_17-44_3ba29ecbaac5_change_datatype_of_type_in_basecolumn.py
+++ b/superset/migrations/versions/2021-11-02_17-44_3ba29ecbaac5_change_datatype_of_type_in_basecolumn.py
@@ -26,8 +26,8 @@ Create Date: 2021-11-02 17:44:51.792138
revision = "3ba29ecbaac5"
down_revision = "abe27eaf93db"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2021-11-11_04-18_0ca9e5f1dacd_rename_to_schemas_allowed_for_file_.py b/superset/migrations/versions/2021-11-11_04-18_0ca9e5f1dacd_rename_to_schemas_allowed_for_file_.py
index 629c29ecd..370cf508c 100644
--- a/superset/migrations/versions/2021-11-11_04-18_0ca9e5f1dacd_rename_to_schemas_allowed_for_file_.py
+++ b/superset/migrations/versions/2021-11-11_04-18_0ca9e5f1dacd_rename_to_schemas_allowed_for_file_.py
@@ -26,14 +26,14 @@ Create Date: 2021-11-11 04:18:26.171851
revision = "0ca9e5f1dacd"
down_revision = "b92d69a6643c"
-import json
-import logging
+import json # noqa: E402
+import logging # noqa: E402
-from alembic import op
-from sqlalchemy import Column, Integer, Text
-from sqlalchemy.ext.declarative import declarative_base
+from alembic import op # noqa: E402
+from sqlalchemy import Column, Integer, Text # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
+from superset import db # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2021-11-11_16-41_b8d3a24d9131_new_dataset_models.py b/superset/migrations/versions/2021-11-11_16-41_b8d3a24d9131_new_dataset_models.py
index e69d1606e..598712434 100644
--- a/superset/migrations/versions/2021-11-11_16-41_b8d3a24d9131_new_dataset_models.py
+++ b/superset/migrations/versions/2021-11-11_16-41_b8d3a24d9131_new_dataset_models.py
@@ -23,6 +23,7 @@ Revises: 5afbb1a5849b
Create Date: 2021-11-11 16:41:53.266965
"""
+
# revision identifiers, used by Alembic.
revision = "b8d3a24d9131"
down_revision = "5afbb1a5849b"
diff --git a/superset/migrations/versions/2021-12-02_12-03_abe27eaf93db_add_extra_config_column_to_alerts.py b/superset/migrations/versions/2021-12-02_12-03_abe27eaf93db_add_extra_config_column_to_alerts.py
index 2bc22cc2c..1cce70c00 100644
--- a/superset/migrations/versions/2021-12-02_12-03_abe27eaf93db_add_extra_config_column_to_alerts.py
+++ b/superset/migrations/versions/2021-12-02_12-03_abe27eaf93db_add_extra_config_column_to_alerts.py
@@ -26,10 +26,10 @@ Create Date: 2021-12-02 12:03:20.691171
revision = "abe27eaf93db"
down_revision = "0ca9e5f1dacd"
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy import String
-from sqlalchemy.sql import column, table
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
+from sqlalchemy import String # noqa: E402
+from sqlalchemy.sql import column, table # noqa: E402
report_schedule = table("report_schedule", column("extra", String))
diff --git a/superset/migrations/versions/2021-12-10_19-25_bb38f40aa3ff_add_force_screenshot_to_alerts_reports.py b/superset/migrations/versions/2021-12-10_19-25_bb38f40aa3ff_add_force_screenshot_to_alerts_reports.py
index 3495bea9e..38d8275d9 100644
--- a/superset/migrations/versions/2021-12-10_19-25_bb38f40aa3ff_add_force_screenshot_to_alerts_reports.py
+++ b/superset/migrations/versions/2021-12-10_19-25_bb38f40aa3ff_add_force_screenshot_to_alerts_reports.py
@@ -26,11 +26,11 @@ Create Date: 2021-12-10 19:25:29.802949
revision = "bb38f40aa3ff"
down_revision = "31bb738bd1d2"
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy.ext.declarative import declarative_base
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
+from superset import db # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2021-12-13_14-06_fe23025b9441_rename_big_viz_total_form_data_fields.py b/superset/migrations/versions/2021-12-13_14-06_fe23025b9441_rename_big_viz_total_form_data_fields.py
index d391348ee..11a46a919 100644
--- a/superset/migrations/versions/2021-12-13_14-06_fe23025b9441_rename_big_viz_total_form_data_fields.py
+++ b/superset/migrations/versions/2021-12-13_14-06_fe23025b9441_rename_big_viz_total_form_data_fields.py
@@ -26,14 +26,14 @@ Create Date: 2021-12-13 14:06:24.426970
revision = "fe23025b9441"
down_revision = "3ba29ecbaac5"
-import json
-import logging
+import json # noqa: E402
+import logging # noqa: E402
-from alembic import op
-from sqlalchemy import Column, Integer, String, Text
-from sqlalchemy.ext.declarative import declarative_base
+from alembic import op # noqa: E402
+from sqlalchemy import Column, Integer, String, Text # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
+from superset import db # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2021-12-15_15-05_c53bae8f08dd_add_saved_query_foreign_key_to_tab_state.py b/superset/migrations/versions/2021-12-15_15-05_c53bae8f08dd_add_saved_query_foreign_key_to_tab_state.py
index 2a2d66a1f..2f2a121bc 100644
--- a/superset/migrations/versions/2021-12-15_15-05_c53bae8f08dd_add_saved_query_foreign_key_to_tab_state.py
+++ b/superset/migrations/versions/2021-12-15_15-05_c53bae8f08dd_add_saved_query_foreign_key_to_tab_state.py
@@ -25,8 +25,8 @@ Create Date: 2021-12-15 15:05:21.845777
revision = "c53bae8f08dd"
down_revision = "bb38f40aa3ff"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2021-12-17_16-56_31bb738bd1d2_move_pivot_table_v2_legacy_order_by_to_.py b/superset/migrations/versions/2021-12-17_16-56_31bb738bd1d2_move_pivot_table_v2_legacy_order_by_to_.py
index 0adaa0d4f..7740e23da 100644
--- a/superset/migrations/versions/2021-12-17_16-56_31bb738bd1d2_move_pivot_table_v2_legacy_order_by_to_.py
+++ b/superset/migrations/versions/2021-12-17_16-56_31bb738bd1d2_move_pivot_table_v2_legacy_order_by_to_.py
@@ -27,14 +27,14 @@ revision = "31bb738bd1d2"
down_revision = "fe23025b9441"
-import json
-import logging
+import json # noqa: E402
+import logging # noqa: E402
-from alembic import op
-from sqlalchemy import Column, Integer, String, Text
-from sqlalchemy.ext.declarative import declarative_base
+from alembic import op # noqa: E402
+from sqlalchemy import Column, Integer, String, Text # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
+from superset import db # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2022-01-19_07-34_5fd49410a97a_add_columns_for_external_management.py b/superset/migrations/versions/2022-01-19_07-34_5fd49410a97a_add_columns_for_external_management.py
index 466c8dd5e..984c1fc7b 100644
--- a/superset/migrations/versions/2022-01-19_07-34_5fd49410a97a_add_columns_for_external_management.py
+++ b/superset/migrations/versions/2022-01-19_07-34_5fd49410a97a_add_columns_for_external_management.py
@@ -26,8 +26,8 @@ Create Date: 2022-01-19 07:34:20.594786
revision = "5fd49410a97a"
down_revision = "c53bae8f08dd"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2022-01-28_16-03_5afbb1a5849b_add_embedded_dahshoard_table.py b/superset/migrations/versions/2022-01-28_16-03_5afbb1a5849b_add_embedded_dahshoard_table.py
index 5d415e159..c78459573 100644
--- a/superset/migrations/versions/2022-01-28_16-03_5afbb1a5849b_add_embedded_dahshoard_table.py
+++ b/superset/migrations/versions/2022-01-28_16-03_5afbb1a5849b_add_embedded_dahshoard_table.py
@@ -26,11 +26,11 @@ Create Date: 2022-01-28 16:03:02.944080
revision = "5afbb1a5849b"
down_revision = "5fd49410a97a"
-from uuid import uuid4
+from uuid import uuid4 # noqa: E402
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy_utils import UUIDType
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
+from sqlalchemy_utils import UUIDType # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2022-02-25_08-06_ab9a9d86e695_deprecate_time_range_endpoints.py b/superset/migrations/versions/2022-02-25_08-06_ab9a9d86e695_deprecate_time_range_endpoints.py
index 148804d25..ea1464d97 100644
--- a/superset/migrations/versions/2022-02-25_08-06_ab9a9d86e695_deprecate_time_range_endpoints.py
+++ b/superset/migrations/versions/2022-02-25_08-06_ab9a9d86e695_deprecate_time_range_endpoints.py
@@ -21,6 +21,7 @@ Revises: b5a422d8e252
Create Date: 2022-02-25 08:06:14.835094
"""
+
# revision identifiers, used by Alembic.
revision = "ab9a9d86e695"
down_revision = "b5a422d8e252"
diff --git a/superset/migrations/versions/2022-03-02_16-41_7293b0ca7944_change_adhoc_filter_b_from_none_to_.py b/superset/migrations/versions/2022-03-02_16-41_7293b0ca7944_change_adhoc_filter_b_from_none_to_.py
index 65299701a..3c0eb7844 100644
--- a/superset/migrations/versions/2022-03-02_16-41_7293b0ca7944_change_adhoc_filter_b_from_none_to_.py
+++ b/superset/migrations/versions/2022-03-02_16-41_7293b0ca7944_change_adhoc_filter_b_from_none_to_.py
@@ -27,13 +27,13 @@ revision = "7293b0ca7944"
down_revision = "ab9a9d86e695"
-import json
+import json # noqa: E402
-from alembic import op
-from sqlalchemy import Column, Integer, String, Text
-from sqlalchemy.ext.declarative import declarative_base
+from alembic import op # noqa: E402
+from sqlalchemy import Column, Integer, String, Text # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
+from superset import db # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2022-03-04_09-59_6766938c6065_add_key_value_store.py b/superset/migrations/versions/2022-03-04_09-59_6766938c6065_add_key_value_store.py
index 26b1d28e0..f22ac167e 100644
--- a/superset/migrations/versions/2022-03-04_09-59_6766938c6065_add_key_value_store.py
+++ b/superset/migrations/versions/2022-03-04_09-59_6766938c6065_add_key_value_store.py
@@ -26,11 +26,11 @@ Create Date: 2022-03-04 09:59:26.922329
revision = "6766938c6065"
down_revision = "7293b0ca7944"
-from uuid import uuid4
+from uuid import uuid4 # noqa: E402
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy_utils import UUIDType
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
+from sqlalchemy_utils import UUIDType # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2022-03-16_21-07_8b841273bec3_sql_lab_models_database_constraint_updates.py b/superset/migrations/versions/2022-03-16_21-07_8b841273bec3_sql_lab_models_database_constraint_updates.py
index a497cf80f..c37cda732 100644
--- a/superset/migrations/versions/2022-03-16_21-07_8b841273bec3_sql_lab_models_database_constraint_updates.py
+++ b/superset/migrations/versions/2022-03-16_21-07_8b841273bec3_sql_lab_models_database_constraint_updates.py
@@ -26,10 +26,10 @@ Create Date: 2022-03-16 21:07:48.768425
revision = "8b841273bec3"
down_revision = "2ed890b36b94"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
-from superset.utils.core import generic_find_fk_constraint_name
+from superset.utils.core import generic_find_fk_constraint_name # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2022-03-16_23-24_58df9d617f14_add_on_saved_query_delete_tab_state_.py b/superset/migrations/versions/2022-03-16_23-24_58df9d617f14_add_on_saved_query_delete_tab_state_.py
index 57e13cf14..e57203fb0 100644
--- a/superset/migrations/versions/2022-03-16_23-24_58df9d617f14_add_on_saved_query_delete_tab_state_.py
+++ b/superset/migrations/versions/2022-03-16_23-24_58df9d617f14_add_on_saved_query_delete_tab_state_.py
@@ -26,10 +26,10 @@ Create Date: 2022-03-16 23:24:40.278937
revision = "58df9d617f14"
down_revision = "6766938c6065"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
-from superset.utils.core import generic_find_fk_constraint_name
+from superset.utils.core import generic_find_fk_constraint_name # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2022-04-01_14-38_a9422eeaae74_new_dataset_models_take_2.py b/superset/migrations/versions/2022-04-01_14-38_a9422eeaae74_new_dataset_models_take_2.py
index a1e0f855f..fd8825820 100644
--- a/superset/migrations/versions/2022-04-01_14-38_a9422eeaae74_new_dataset_models_take_2.py
+++ b/superset/migrations/versions/2022-04-01_14-38_a9422eeaae74_new_dataset_models_take_2.py
@@ -26,28 +26,28 @@ Create Date: 2022-04-01 14:38:09.499483
revision = "a9422eeaae74"
down_revision = "ad07e4fdbaba"
-import json
-import os
-from datetime import datetime
-from typing import Optional, Union
-from uuid import uuid4
+import json # noqa: E402
+import os # noqa: E402
+from datetime import datetime # noqa: E402
+from typing import Optional, Union # noqa: E402
+from uuid import uuid4 # noqa: E402
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy import select
-from sqlalchemy.ext.declarative import declarative_base, declared_attr
-from sqlalchemy.orm import backref, relationship, Session
-from sqlalchemy.schema import UniqueConstraint
-from sqlalchemy.sql import functions as func
-from sqlalchemy.sql.expression import and_, or_
-from sqlalchemy_utils import UUIDType
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
+from sqlalchemy import select # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base, declared_attr # noqa: E402
+from sqlalchemy.orm import backref, relationship, Session # noqa: E402
+from sqlalchemy.schema import UniqueConstraint # noqa: E402
+from sqlalchemy.sql import functions as func # noqa: E402
+from sqlalchemy.sql.expression import and_, or_ # noqa: E402
+from sqlalchemy_utils import UUIDType # noqa: E402
-from superset.connectors.sqla.models import ADDITIVE_METRIC_TYPES_LOWER
-from superset.connectors.sqla.utils import get_dialect_name, get_identifier_quoter
-from superset.extensions import encrypted_field_factory
-from superset.migrations.shared.utils import assign_uuids
-from superset.sql_parse import extract_table_references, Table
-from superset.utils.core import MediumText
+from superset.connectors.sqla.models import ADDITIVE_METRIC_TYPES_LOWER # noqa: E402
+from superset.connectors.sqla.utils import get_dialect_name, get_identifier_quoter # noqa: E402
+from superset.extensions import encrypted_field_factory # noqa: E402
+from superset.migrations.shared.utils import assign_uuids # noqa: E402
+from superset.sql_parse import extract_table_references, Table # noqa: E402
+from superset.utils.core import MediumText # noqa: E402
Base = declarative_base()
SHOW_PROGRESS = os.environ.get("SHOW_PROGRESS") == "1"
@@ -336,9 +336,8 @@ def copy_tables(session: Session) -> None:
]
)
# use an inner join to filter out only tables with valid database ids
- .select_from(
- sa.join(SqlaTable, Database, SqlaTable.database_id == Database.id)
- ).where(is_physical_table),
+ .select_from(sa.join(SqlaTable, Database, SqlaTable.database_id == Database.id))
+ .where(is_physical_table),
)
diff --git a/superset/migrations/versions/2022-04-04_15-04_b0d0249074e4_deprecate_time_range_endpoints_v2.py b/superset/migrations/versions/2022-04-04_15-04_b0d0249074e4_deprecate_time_range_endpoints_v2.py
index 90ee62d3f..2d0f01477 100644
--- a/superset/migrations/versions/2022-04-04_15-04_b0d0249074e4_deprecate_time_range_endpoints_v2.py
+++ b/superset/migrations/versions/2022-04-04_15-04_b0d0249074e4_deprecate_time_range_endpoints_v2.py
@@ -21,6 +21,7 @@ Revises: 2ed890b36b94
Create Date: 2022-04-04 15:04:05.606340
"""
+
import json
from alembic import op
diff --git a/superset/migrations/versions/2022-04-06_14-10_9d8a8d575284_.py b/superset/migrations/versions/2022-04-06_14-10_9d8a8d575284_.py
index fbbfac231..2eee84b83 100644
--- a/superset/migrations/versions/2022-04-06_14-10_9d8a8d575284_.py
+++ b/superset/migrations/versions/2022-04-06_14-10_9d8a8d575284_.py
@@ -26,9 +26,6 @@ Create Date: 2022-04-06 14:10:40.433050
revision = "9d8a8d575284"
down_revision = ("8b841273bec3", "b0d0249074e4")
-import sqlalchemy as sa
-from alembic import op
-
def upgrade():
pass
diff --git a/superset/migrations/versions/2022-04-18_11-20_ad07e4fdbaba_rm_time_range_endpoints_from_qc_3.py b/superset/migrations/versions/2022-04-18_11-20_ad07e4fdbaba_rm_time_range_endpoints_from_qc_3.py
index 30efb1a08..2087bc3e0 100644
--- a/superset/migrations/versions/2022-04-18_11-20_ad07e4fdbaba_rm_time_range_endpoints_from_qc_3.py
+++ b/superset/migrations/versions/2022-04-18_11-20_ad07e4fdbaba_rm_time_range_endpoints_from_qc_3.py
@@ -26,13 +26,13 @@ Create Date: 2022-04-18 11:20:47.390901
revision = "ad07e4fdbaba"
down_revision = "cecc6bf46990"
-import json
+import json # noqa: E402
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy.ext.declarative import declarative_base
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
+from superset import db # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2022-05-03_19-39_cbe71abde154_fix_report_schedule_and_log.py b/superset/migrations/versions/2022-05-03_19-39_cbe71abde154_fix_report_schedule_and_log.py
index d00b60cd1..89f9dc327 100644
--- a/superset/migrations/versions/2022-05-03_19-39_cbe71abde154_fix_report_schedule_and_log.py
+++ b/superset/migrations/versions/2022-05-03_19-39_cbe71abde154_fix_report_schedule_and_log.py
@@ -26,12 +26,12 @@ Create Date: 2022-05-03 19:39:32.074608
revision = "cbe71abde154"
down_revision = "a9422eeaae74"
-from alembic import op
-from sqlalchemy import Column, Float, Integer, String, Text
-from sqlalchemy.ext.declarative import declarative_base
+from alembic import op # noqa: E402
+from sqlalchemy import Column, Float, Integer, String, Text # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
-from superset.reports.models import ReportState
+from superset import db # noqa: E402
+from superset.reports.models import ReportState # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2022-05-18_16-07_e786798587de_delete_none_permissions.py b/superset/migrations/versions/2022-05-18_16-07_e786798587de_delete_none_permissions.py
index e79c7a437..d89ccf653 100644
--- a/superset/migrations/versions/2022-05-18_16-07_e786798587de_delete_none_permissions.py
+++ b/superset/migrations/versions/2022-05-18_16-07_e786798587de_delete_none_permissions.py
@@ -26,8 +26,8 @@ Create Date: 2022-05-18 16:07:47.648514
revision = "e786798587de"
down_revision = "6f139c533bea"
-from alembic import op
-from sqlalchemy import (
+from alembic import op # noqa: E402
+from sqlalchemy import ( # noqa: E402
Column,
ForeignKey,
Integer,
@@ -36,8 +36,8 @@ from sqlalchemy import (
Table,
UniqueConstraint,
)
-from sqlalchemy.ext.declarative import declarative_base
-from sqlalchemy.orm import relationship, Session
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
+from sqlalchemy.orm import relationship, Session # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2022-06-14_15-28_e09b4ae78457_resize_key_value_blob.py b/superset/migrations/versions/2022-06-14_15-28_e09b4ae78457_resize_key_value_blob.py
index 34800d4a5..095a03190 100644
--- a/superset/migrations/versions/2022-06-14_15-28_e09b4ae78457_resize_key_value_blob.py
+++ b/superset/migrations/versions/2022-06-14_15-28_e09b4ae78457_resize_key_value_blob.py
@@ -26,8 +26,8 @@ Create Date: 2022-06-14 15:28:42.746349
revision = "e09b4ae78457"
down_revision = "e786798587de"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2022-06-19_16-17_f3afaf1f11f0_add_unique_name_desc_rls.py b/superset/migrations/versions/2022-06-19_16-17_f3afaf1f11f0_add_unique_name_desc_rls.py
index 474824fcc..34f5522eb 100644
--- a/superset/migrations/versions/2022-06-19_16-17_f3afaf1f11f0_add_unique_name_desc_rls.py
+++ b/superset/migrations/versions/2022-06-19_16-17_f3afaf1f11f0_add_unique_name_desc_rls.py
@@ -26,10 +26,10 @@ Create Date: 2022-06-19 16:17:23.318618
revision = "f3afaf1f11f0"
down_revision = "e09b4ae78457"
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy.ext.declarative import declarative_base
-from sqlalchemy.orm import Session
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
+from sqlalchemy.orm import Session # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2022-06-27_14-59_7fb8bca906d2_permalink_rename_filterstate.py b/superset/migrations/versions/2022-06-27_14-59_7fb8bca906d2_permalink_rename_filterstate.py
index 0b76404dc..194760d2f 100644
--- a/superset/migrations/versions/2022-06-27_14-59_7fb8bca906d2_permalink_rename_filterstate.py
+++ b/superset/migrations/versions/2022-06-27_14-59_7fb8bca906d2_permalink_rename_filterstate.py
@@ -26,15 +26,15 @@ Create Date: 2022-06-27 14:59:20.740380
revision = "7fb8bca906d2"
down_revision = "f3afaf1f11f0"
-import pickle
+import pickle # noqa: E402
-from alembic import op
-from sqlalchemy import Column, Integer, LargeBinary, String
-from sqlalchemy.ext.declarative import declarative_base
-from sqlalchemy.orm import Session
+from alembic import op # noqa: E402
+from sqlalchemy import Column, Integer, LargeBinary, String # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
+from sqlalchemy.orm import Session # noqa: E402
-from superset import db
-from superset.migrations.shared.utils import paginated_update
+from superset import db # noqa: E402
+from superset.migrations.shared.utils import paginated_update # noqa: E402
Base = declarative_base()
VALUE_MAX_SIZE = 2**24 - 1
diff --git a/superset/migrations/versions/2022-07-05_15-48_409c7b420ab0_add_created_by_fk_as_owner.py b/superset/migrations/versions/2022-07-05_15-48_409c7b420ab0_add_created_by_fk_as_owner.py
index 6cdf9f689..57c5af4f2 100644
--- a/superset/migrations/versions/2022-07-05_15-48_409c7b420ab0_add_created_by_fk_as_owner.py
+++ b/superset/migrations/versions/2022-07-05_15-48_409c7b420ab0_add_created_by_fk_as_owner.py
@@ -96,7 +96,7 @@ def upgrade():
DatasetUser.user_id == Dataset.created_by_fk,
),
)
- .filter(DatasetUser.dataset_id == None, Dataset.created_by_fk != None),
+ .filter(DatasetUser.dataset_id == None, Dataset.created_by_fk != None), # noqa: E711
)
)
@@ -111,7 +111,7 @@ def upgrade():
SliceUser.user_id == Slice.created_by_fk,
),
)
- .filter(SliceUser.slice_id == None),
+ .filter(SliceUser.slice_id == None), # noqa: E711
)
)
@@ -126,7 +126,7 @@ def upgrade():
SqlaTableUser.user_id == SqlaTable.created_by_fk,
),
)
- .filter(SqlaTableUser.table_id == None),
+ .filter(SqlaTableUser.table_id == None), # noqa: E711
)
)
diff --git a/superset/migrations/versions/2022-07-07_00-00_cdcf3d64daf4_add_user_id_dttm_idx_to_log_model.py b/superset/migrations/versions/2022-07-07_00-00_cdcf3d64daf4_add_user_id_dttm_idx_to_log_model.py
index 1122571e1..9a2bfad53 100644
--- a/superset/migrations/versions/2022-07-07_00-00_cdcf3d64daf4_add_user_id_dttm_idx_to_log_model.py
+++ b/superset/migrations/versions/2022-07-07_00-00_cdcf3d64daf4_add_user_id_dttm_idx_to_log_model.py
@@ -27,7 +27,7 @@ revision = "cdcf3d64daf4"
down_revision = "7fb8bca906d2"
-from alembic import op
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2022-07-07_13-00_c747c78868b6_migrating_legacy_treemap.py b/superset/migrations/versions/2022-07-07_13-00_c747c78868b6_migrating_legacy_treemap.py
index be222cdfd..ff67fc5f2 100644
--- a/superset/migrations/versions/2022-07-07_13-00_c747c78868b6_migrating_legacy_treemap.py
+++ b/superset/migrations/versions/2022-07-07_13-00_c747c78868b6_migrating_legacy_treemap.py
@@ -21,6 +21,7 @@ Revises: cdcf3d64daf4
Create Date: 2022-06-30 22:04:17.686635
"""
+
from alembic import op
from sqlalchemy.dialects.mysql.base import MySQLDialect
diff --git a/superset/migrations/versions/2022-07-07_14-00_06e1e70058c7_migrating_legacy_area.py b/superset/migrations/versions/2022-07-07_14-00_06e1e70058c7_migrating_legacy_area.py
index a43f027e2..adeaabac0 100644
--- a/superset/migrations/versions/2022-07-07_14-00_06e1e70058c7_migrating_legacy_area.py
+++ b/superset/migrations/versions/2022-07-07_14-00_06e1e70058c7_migrating_legacy_area.py
@@ -21,6 +21,7 @@ Revises: c747c78868b6
Create Date: 2022-06-13 14:17:51.872706
"""
+
from alembic import op
from superset import db
diff --git a/superset/migrations/versions/2022-07-11_11-26_ffa79af61a56_rename_report_schedule_extra_to_extra_.py b/superset/migrations/versions/2022-07-11_11-26_ffa79af61a56_rename_report_schedule_extra_to_extra_.py
index 8de19e126..e3e973b18 100644
--- a/superset/migrations/versions/2022-07-11_11-26_ffa79af61a56_rename_report_schedule_extra_to_extra_.py
+++ b/superset/migrations/versions/2022-07-11_11-26_ffa79af61a56_rename_report_schedule_extra_to_extra_.py
@@ -28,8 +28,8 @@ Create Date: 2022-07-11 11:26:00.010714
revision = "ffa79af61a56"
down_revision = "409c7b420ab0"
-from alembic import op
-from sqlalchemy.types import Text
+from alembic import op # noqa: E402
+from sqlalchemy.types import Text # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2022-07-19_15-16_a39867932713_query_context_to_mediumtext.py b/superset/migrations/versions/2022-07-19_15-16_a39867932713_query_context_to_mediumtext.py
index 027b8c77d..39651ccec 100644
--- a/superset/migrations/versions/2022-07-19_15-16_a39867932713_query_context_to_mediumtext.py
+++ b/superset/migrations/versions/2022-07-19_15-16_a39867932713_query_context_to_mediumtext.py
@@ -21,6 +21,7 @@ Revises: 06e1e70058c7
Create Date: 2022-07-19 15:16:06.091961
"""
+
from alembic import op
from sqlalchemy.dialects.mysql.base import MySQLDialect
diff --git a/superset/migrations/versions/2022-08-16_15-23_6d3c6f9d665d_fix_table_chart_conditional_formatting_.py b/superset/migrations/versions/2022-08-16_15-23_6d3c6f9d665d_fix_table_chart_conditional_formatting_.py
index 8d9f07093..37b28c0ac 100644
--- a/superset/migrations/versions/2022-08-16_15-23_6d3c6f9d665d_fix_table_chart_conditional_formatting_.py
+++ b/superset/migrations/versions/2022-08-16_15-23_6d3c6f9d665d_fix_table_chart_conditional_formatting_.py
@@ -21,6 +21,7 @@ Revises: ffa79af61a56
Create Date: 2022-08-16 15:23:42.860038
"""
+
import json
from alembic import op
diff --git a/superset/migrations/versions/2022-08-31_19-30_291f024254b5_drop_column_allow_multi_schema_metadata_fetch.py b/superset/migrations/versions/2022-08-31_19-30_291f024254b5_drop_column_allow_multi_schema_metadata_fetch.py
index fadcb3dda..3060bef0e 100644
--- a/superset/migrations/versions/2022-08-31_19-30_291f024254b5_drop_column_allow_multi_schema_metadata_fetch.py
+++ b/superset/migrations/versions/2022-08-31_19-30_291f024254b5_drop_column_allow_multi_schema_metadata_fetch.py
@@ -27,8 +27,8 @@ Create Date: 2022-08-31 19:30:33.665025
revision = "291f024254b5"
down_revision = "6d3c6f9d665d"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2022-10-03_17-34_deb4c9d4a4ef_parameters_in_saved_queries.py b/superset/migrations/versions/2022-10-03_17-34_deb4c9d4a4ef_parameters_in_saved_queries.py
index af3f6157a..2d6f74108 100644
--- a/superset/migrations/versions/2022-10-03_17-34_deb4c9d4a4ef_parameters_in_saved_queries.py
+++ b/superset/migrations/versions/2022-10-03_17-34_deb4c9d4a4ef_parameters_in_saved_queries.py
@@ -26,8 +26,8 @@ Create Date: 2022-10-03 17:34:00.721559
revision = "deb4c9d4a4ef"
down_revision = "291f024254b5"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2022-10-20_10-48_f3c2d8ec8595_create_ssh_tunnel_credentials_tbl.py b/superset/migrations/versions/2022-10-20_10-48_f3c2d8ec8595_create_ssh_tunnel_credentials_tbl.py
index b373020cb..147ad8d37 100644
--- a/superset/migrations/versions/2022-10-20_10-48_f3c2d8ec8595_create_ssh_tunnel_credentials_tbl.py
+++ b/superset/migrations/versions/2022-10-20_10-48_f3c2d8ec8595_create_ssh_tunnel_credentials_tbl.py
@@ -26,14 +26,14 @@ Create Date: 2022-10-20 10:48:08.722861
revision = "f3c2d8ec8595"
down_revision = "4ce1d9b25135"
-from uuid import uuid4
+from uuid import uuid4 # noqa: E402
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy_utils import UUIDType
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
+from sqlalchemy_utils import UUIDType # noqa: E402
-from superset import app
-from superset.extensions import encrypted_field_factory
+from superset import app # noqa: E402
+from superset.extensions import encrypted_field_factory # noqa: E402
app_config = app.config
diff --git a/superset/migrations/versions/2022-11-28_17-51_4ce1d9b25135_remove_filter_bar_orientation.py b/superset/migrations/versions/2022-11-28_17-51_4ce1d9b25135_remove_filter_bar_orientation.py
index 07ee47b98..86ee15350 100644
--- a/superset/migrations/versions/2022-11-28_17-51_4ce1d9b25135_remove_filter_bar_orientation.py
+++ b/superset/migrations/versions/2022-11-28_17-51_4ce1d9b25135_remove_filter_bar_orientation.py
@@ -26,13 +26,13 @@ Create Date: 2022-11-28 17:51:08.954439
revision = "4ce1d9b25135"
down_revision = "deb4c9d4a4ef"
-import json
+import json # noqa: E402
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy.ext.declarative import declarative_base
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
+from superset import db # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2023-02-28_14-46_c0a3ea245b61_remove_show_native_filters.py b/superset/migrations/versions/2023-02-28_14-46_c0a3ea245b61_remove_show_native_filters.py
index b5a00eec8..3954a7cb9 100644
--- a/superset/migrations/versions/2023-02-28_14-46_c0a3ea245b61_remove_show_native_filters.py
+++ b/superset/migrations/versions/2023-02-28_14-46_c0a3ea245b61_remove_show_native_filters.py
@@ -26,13 +26,13 @@ Create Date: 2023-02-28 14:46:59.597847
revision = "c0a3ea245b61"
down_revision = "9c2a5681ddfd"
-import json
+import json # noqa: E402
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy.ext.declarative import declarative_base
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
+from superset import db # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2023-03-05_10-06_d0ac08bb5b83_invert_horizontal_bar_chart_order.py b/superset/migrations/versions/2023-03-05_10-06_d0ac08bb5b83_invert_horizontal_bar_chart_order.py
index 6003c70d6..df97cdb21 100644
--- a/superset/migrations/versions/2023-03-05_10-06_d0ac08bb5b83_invert_horizontal_bar_chart_order.py
+++ b/superset/migrations/versions/2023-03-05_10-06_d0ac08bb5b83_invert_horizontal_bar_chart_order.py
@@ -26,14 +26,13 @@ Create Date: 2023-03-05 10:06:23.250310
revision = "d0ac08bb5b83"
down_revision = "c0a3ea245b61"
-import json
+import json # noqa: E402
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy import and_, Column, Integer, String, Text
-from sqlalchemy.ext.declarative import declarative_base
+from alembic import op # noqa: E402
+from sqlalchemy import and_, Column, Integer, String, Text # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
+from superset import db # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2023-03-17_13-24_b5ea9d343307_bar_chart_stack_options.py b/superset/migrations/versions/2023-03-17_13-24_b5ea9d343307_bar_chart_stack_options.py
index 49844cda1..983384b3f 100644
--- a/superset/migrations/versions/2023-03-17_13-24_b5ea9d343307_bar_chart_stack_options.py
+++ b/superset/migrations/versions/2023-03-17_13-24_b5ea9d343307_bar_chart_stack_options.py
@@ -26,14 +26,13 @@ Create Date: 2023-03-17 13:24:54.662754
revision = "b5ea9d343307"
down_revision = "d0ac08bb5b83"
-import json
+import json # noqa: E402
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy import and_, Column, Integer, String, Text
-from sqlalchemy.ext.declarative import declarative_base
+from alembic import op # noqa: E402
+from sqlalchemy import Column, Integer, String, Text # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
+from superset import db # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2023-03-27_12-30_7e67aecbf3f1_chart_ds_constraint.py b/superset/migrations/versions/2023-03-27_12-30_7e67aecbf3f1_chart_ds_constraint.py
index 4c6941fe8..7b877d17e 100644
--- a/superset/migrations/versions/2023-03-27_12-30_7e67aecbf3f1_chart_ds_constraint.py
+++ b/superset/migrations/versions/2023-03-27_12-30_7e67aecbf3f1_chart_ds_constraint.py
@@ -26,14 +26,14 @@ Create Date: 2023-03-27 12:30:01.164594
revision = "7e67aecbf3f1"
down_revision = "07f9a902af1b"
-import json
-import logging
+import json # noqa: E402
+import logging # noqa: E402
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy.ext.declarative import declarative_base
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
+from superset import db # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2023-03-29_20-30_07f9a902af1b_drop_postgres_enum_constrains_for_tags.py b/superset/migrations/versions/2023-03-29_20-30_07f9a902af1b_drop_postgres_enum_constrains_for_tags.py
index 638ba3e46..f308e8667 100644
--- a/superset/migrations/versions/2023-03-29_20-30_07f9a902af1b_drop_postgres_enum_constrains_for_tags.py
+++ b/superset/migrations/versions/2023-03-29_20-30_07f9a902af1b_drop_postgres_enum_constrains_for_tags.py
@@ -26,8 +26,8 @@ Create Date: 2023-03-29 20:30:10.214951
revision = "07f9a902af1b"
down_revision = "b5ea9d343307"
-from alembic import op
-from sqlalchemy.dialects import postgresql
+from alembic import op # noqa: E402
+from sqlalchemy.dialects import postgresql # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2023-05-01_12-03_9c2a5681ddfd_convert_key_value_entries_to_json.py b/superset/migrations/versions/2023-05-01_12-03_9c2a5681ddfd_convert_key_value_entries_to_json.py
index 6e55f3ddc..fd1d728f0 100644
--- a/superset/migrations/versions/2023-05-01_12-03_9c2a5681ddfd_convert_key_value_entries_to_json.py
+++ b/superset/migrations/versions/2023-05-01_12-03_9c2a5681ddfd_convert_key_value_entries_to_json.py
@@ -26,17 +26,17 @@ Create Date: 2023-05-01 12:03:17.079862
revision = "9c2a5681ddfd"
down_revision = "f3c2d8ec8595"
-import io
-import json
-import pickle
+import io # noqa: E402
+import json # noqa: E402
+import pickle # noqa: E402
-from alembic import op
-from sqlalchemy import Column, Integer, LargeBinary, String
-from sqlalchemy.ext.declarative import declarative_base
-from sqlalchemy.orm import Session
+from alembic import op # noqa: E402
+from sqlalchemy import Column, Integer, LargeBinary, String # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
+from sqlalchemy.orm import Session # noqa: E402
-from superset import db
-from superset.migrations.shared.utils import paginated_update
+from superset import db # noqa: E402
+from superset.migrations.shared.utils import paginated_update # noqa: E402
Base = declarative_base()
VALUE_MAX_SIZE = 2**24 - 1
diff --git a/superset/migrations/versions/2023-05-11_12-41_4ea966691069_cross_filter_global_scoping.py b/superset/migrations/versions/2023-05-11_12-41_4ea966691069_cross_filter_global_scoping.py
index 76bcc932f..52128f8fd 100644
--- a/superset/migrations/versions/2023-05-11_12-41_4ea966691069_cross_filter_global_scoping.py
+++ b/superset/migrations/versions/2023-05-11_12-41_4ea966691069_cross_filter_global_scoping.py
@@ -26,16 +26,16 @@ Create Date: 2023-05-11 12:41:38.095717
revision = "4ea966691069"
down_revision = "7e67aecbf3f1"
-import copy
-import json
-import logging
+import copy # noqa: E402
+import json # noqa: E402
+import logging # noqa: E402
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy.ext.declarative import declarative_base
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
-from superset.migrations.shared.utils import paginated_update
+from superset import db # noqa: E402
+from superset.migrations.shared.utils import paginated_update # noqa: E402
Base = declarative_base()
logger = logging.getLogger(__name__)
@@ -81,9 +81,9 @@ def upgrade():
"excluded"
] = excluded
if scope.get("rootPath") == ["ROOT_ID"] and excluded == [chart_id]:
- new_chart_configuration[chart_id]["crossFilters"][
- "scope"
- ] = "global"
+ new_chart_configuration[chart_id]["crossFilters"]["scope"] = (
+ "global"
+ )
json_metadata["chart_configuration"] = new_chart_configuration
diff --git a/superset/migrations/versions/2023-06-01_13-13_83e1abbe777f_drop_access_request.py b/superset/migrations/versions/2023-06-01_13-13_83e1abbe777f_drop_access_request.py
index a95650ec5..4e08a1b12 100644
--- a/superset/migrations/versions/2023-06-01_13-13_83e1abbe777f_drop_access_request.py
+++ b/superset/migrations/versions/2023-06-01_13-13_83e1abbe777f_drop_access_request.py
@@ -26,8 +26,8 @@ Create Date: 2023-06-01 13:13:18.147362
revision = "83e1abbe777f"
down_revision = "ae58e1e58e5c"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2023-06-08_09-02_9ba2ce3086e5_migrate_pivot_table_v1_to_v2.py b/superset/migrations/versions/2023-06-08_09-02_9ba2ce3086e5_migrate_pivot_table_v1_to_v2.py
index 917408a5c..f7c7a63ae 100644
--- a/superset/migrations/versions/2023-06-08_09-02_9ba2ce3086e5_migrate_pivot_table_v1_to_v2.py
+++ b/superset/migrations/versions/2023-06-08_09-02_9ba2ce3086e5_migrate_pivot_table_v1_to_v2.py
@@ -21,6 +21,7 @@ Revises: 4ea966691069
Create Date: 2023-08-06 09:02:10.148992
"""
+
from alembic import op
from superset import db
diff --git a/superset/migrations/versions/2023-06-08_10-22_4c5da39be729_migrate_treemap_chart.py b/superset/migrations/versions/2023-06-08_10-22_4c5da39be729_migrate_treemap_chart.py
index 2f36ed850..e80102b02 100644
--- a/superset/migrations/versions/2023-06-08_10-22_4c5da39be729_migrate_treemap_chart.py
+++ b/superset/migrations/versions/2023-06-08_10-22_4c5da39be729_migrate_treemap_chart.py
@@ -21,6 +21,7 @@ Revises: 9ba2ce3086e5
Create Date: 2023-06-08 10:22:23.192064
"""
+
from alembic import op
from sqlalchemy.dialects.mysql.base import MySQLDialect
diff --git a/superset/migrations/versions/2023-06-08_11-34_ae58e1e58e5c_migrate_dual_line_to_mixed_chart.py b/superset/migrations/versions/2023-06-08_11-34_ae58e1e58e5c_migrate_dual_line_to_mixed_chart.py
index 5b371e727..ba98d6e3f 100644
--- a/superset/migrations/versions/2023-06-08_11-34_ae58e1e58e5c_migrate_dual_line_to_mixed_chart.py
+++ b/superset/migrations/versions/2023-06-08_11-34_ae58e1e58e5c_migrate_dual_line_to_mixed_chart.py
@@ -21,6 +21,7 @@ Revises: 4c5da39be729
Create Date: 2023-06-08 11:34:36.241939
"""
+
from alembic import op
from superset import db
@@ -29,7 +30,7 @@ from superset import db
revision = "ae58e1e58e5c"
down_revision = "4c5da39be729"
-from superset.migrations.shared.migrate_viz.processors import MigrateDualLine
+from superset.migrations.shared.migrate_viz.processors import MigrateDualLine # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2023-06-21_14-02_90139bf715e4_add_currency_column_to_metrics.py b/superset/migrations/versions/2023-06-21_14-02_90139bf715e4_add_currency_column_to_metrics.py
index 7d6f0f2ba..005f4ce40 100644
--- a/superset/migrations/versions/2023-06-21_14-02_90139bf715e4_add_currency_column_to_metrics.py
+++ b/superset/migrations/versions/2023-06-21_14-02_90139bf715e4_add_currency_column_to_metrics.py
@@ -26,8 +26,8 @@ Create Date: 2023-06-21 14:02:08.200955
revision = "90139bf715e4"
down_revision = "83e1abbe777f"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2023-06-22_13-39_6fbe660cac39_add_on_delete_cascade_for_tables_references.py b/superset/migrations/versions/2023-06-22_13-39_6fbe660cac39_add_on_delete_cascade_for_tables_references.py
index 0eab2b8bb..ce25989b4 100644
--- a/superset/migrations/versions/2023-06-22_13-39_6fbe660cac39_add_on_delete_cascade_for_tables_references.py
+++ b/superset/migrations/versions/2023-06-22_13-39_6fbe660cac39_add_on_delete_cascade_for_tables_references.py
@@ -26,7 +26,7 @@ Create Date: 2023-06-22 13:39:47.989373
revision = "6fbe660cac39"
down_revision = "90139bf715e4"
-from superset.migrations.shared.constraints import ForeignKey, redefine
+from superset.migrations.shared.constraints import ForeignKey, redefine # noqa: E402
foreign_keys = [
ForeignKey(
diff --git a/superset/migrations/versions/2023-06-28_19-49_bf646a0c1501_json_metadata.py b/superset/migrations/versions/2023-06-28_19-49_bf646a0c1501_json_metadata.py
index ad6845693..3f409af73 100644
--- a/superset/migrations/versions/2023-06-28_19-49_bf646a0c1501_json_metadata.py
+++ b/superset/migrations/versions/2023-06-28_19-49_bf646a0c1501_json_metadata.py
@@ -22,7 +22,6 @@ Create Date: 2023-06-28 19:49:59.217255
"""
-
import sqlalchemy as sa
from alembic import op
diff --git a/superset/migrations/versions/2023-06-29_18-38_240d23c7f86f_update_tag_model_w_description.py b/superset/migrations/versions/2023-06-29_18-38_240d23c7f86f_update_tag_model_w_description.py
index dd3558201..a6d05e11a 100644
--- a/superset/migrations/versions/2023-06-29_18-38_240d23c7f86f_update_tag_model_w_description.py
+++ b/superset/migrations/versions/2023-06-29_18-38_240d23c7f86f_update_tag_model_w_description.py
@@ -26,9 +26,8 @@ Create Date: 2023-06-29 18:38:30.033529
revision = "240d23c7f86f"
down_revision = "8e5b0fb85b9a"
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy.dialects import postgresql
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2023-07-07_20-06_f92a3124dd66_drop_rouge_constraints_and_tables.py b/superset/migrations/versions/2023-07-07_20-06_f92a3124dd66_drop_rouge_constraints_and_tables.py
index 7cf346511..b9aae9595 100644
--- a/superset/migrations/versions/2023-07-07_20-06_f92a3124dd66_drop_rouge_constraints_and_tables.py
+++ b/superset/migrations/versions/2023-07-07_20-06_f92a3124dd66_drop_rouge_constraints_and_tables.py
@@ -26,10 +26,10 @@ Create Date: 2023-07-07 20:06:22.659096
revision = "f92a3124dd66"
down_revision = "240d23c7f86f"
-from alembic import op
-from sqlalchemy.engine.reflection import Inspector
+from alembic import op # noqa: E402
+from sqlalchemy.engine.reflection import Inspector # noqa: E402
-from superset.utils.core import generic_find_fk_constraint_name
+from superset.utils.core import generic_find_fk_constraint_name # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2023-07-11_15-51_6d05b0a70c89_add_on_delete_cascade_for_owners_references.py b/superset/migrations/versions/2023-07-11_15-51_6d05b0a70c89_add_on_delete_cascade_for_owners_references.py
index 1303f9b39..bad2801a6 100644
--- a/superset/migrations/versions/2023-07-11_15-51_6d05b0a70c89_add_on_delete_cascade_for_owners_references.py
+++ b/superset/migrations/versions/2023-07-11_15-51_6d05b0a70c89_add_on_delete_cascade_for_owners_references.py
@@ -26,7 +26,7 @@ Create Date: 2023-07-11 15:51:57.964635
revision = "6d05b0a70c89"
down_revision = "f92a3124dd66"
-from superset.migrations.shared.constraints import ForeignKey, redefine
+from superset.migrations.shared.constraints import ForeignKey, redefine # noqa: E402
foreign_keys = [
ForeignKey(
diff --git a/superset/migrations/versions/2023-07-12_20-34_e0f6f91c2055_create_user_favorite_table.py b/superset/migrations/versions/2023-07-12_20-34_e0f6f91c2055_create_user_favorite_table.py
index 5a519bae1..2cf0e02ce 100644
--- a/superset/migrations/versions/2023-07-12_20-34_e0f6f91c2055_create_user_favorite_table.py
+++ b/superset/migrations/versions/2023-07-12_20-34_e0f6f91c2055_create_user_favorite_table.py
@@ -26,9 +26,8 @@ Create Date: 2023-07-12 20:34:57.553981
revision = "e0f6f91c2055"
down_revision = "bf646a0c1501"
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy.dialects import postgresql
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2023-07-18_15-30_863adcf72773_delete_obsolete_druid_nosql_slice_parameters.py b/superset/migrations/versions/2023-07-18_15-30_863adcf72773_delete_obsolete_druid_nosql_slice_parameters.py
index ce25bd85e..4104c2a59 100644
--- a/superset/migrations/versions/2023-07-18_15-30_863adcf72773_delete_obsolete_druid_nosql_slice_parameters.py
+++ b/superset/migrations/versions/2023-07-18_15-30_863adcf72773_delete_obsolete_druid_nosql_slice_parameters.py
@@ -26,14 +26,14 @@ Create Date: 2023-07-18 15:30:43.695135
revision = "863adcf72773"
down_revision = "6d05b0a70c89"
-import json
-import logging
+import json # noqa: E402
+import logging # noqa: E402
-from alembic import op
-from sqlalchemy import Column, Integer, Text
-from sqlalchemy.ext.declarative import declarative_base
+from alembic import op # noqa: E402
+from sqlalchemy import Column, Integer, Text # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
+from superset import db # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2023-07-19_16-48_a23c6f8b1280_cleanup_erroneous_parent_filter_ids.py b/superset/migrations/versions/2023-07-19_16-48_a23c6f8b1280_cleanup_erroneous_parent_filter_ids.py
index f5bdb29a9..a41dea497 100644
--- a/superset/migrations/versions/2023-07-19_16-48_a23c6f8b1280_cleanup_erroneous_parent_filter_ids.py
+++ b/superset/migrations/versions/2023-07-19_16-48_a23c6f8b1280_cleanup_erroneous_parent_filter_ids.py
@@ -27,14 +27,14 @@ revision = "a23c6f8b1280"
down_revision = "863adcf72773"
-import json
-import logging
+import json # noqa: E402
+import logging # noqa: E402
-from alembic import op
-from sqlalchemy import Column, Integer, Text
-from sqlalchemy.ext.declarative import declarative_base
+from alembic import op # noqa: E402
+from sqlalchemy import Column, Integer, Text # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
+from superset import db # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2023-07-19_17-54_ee179a490af9_deckgl_path_width_units.py b/superset/migrations/versions/2023-07-19_17-54_ee179a490af9_deckgl_path_width_units.py
index f014cf857..8110c42dc 100644
--- a/superset/migrations/versions/2023-07-19_17-54_ee179a490af9_deckgl_path_width_units.py
+++ b/superset/migrations/versions/2023-07-19_17-54_ee179a490af9_deckgl_path_width_units.py
@@ -21,6 +21,7 @@ Revises: e0f6f91c2055
Create Date: 2023-07-19 17:54:06.752360
"""
+
import json
import logging
diff --git a/superset/migrations/versions/2023-08-02_15-23_0769ef90fddd_fix_schema_perm_for_datasets.py b/superset/migrations/versions/2023-08-02_15-23_0769ef90fddd_fix_schema_perm_for_datasets.py
index f96dcd26d..428677b37 100644
--- a/superset/migrations/versions/2023-08-02_15-23_0769ef90fddd_fix_schema_perm_for_datasets.py
+++ b/superset/migrations/versions/2023-08-02_15-23_0769ef90fddd_fix_schema_perm_for_datasets.py
@@ -26,12 +26,12 @@ Create Date: 2023-08-02 15:23:58.242396
revision = "0769ef90fddd"
down_revision = "ee179a490af9"
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy.dialects.sqlite.base import SQLiteDialect
-from sqlalchemy.ext.declarative import declarative_base
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
+from sqlalchemy.dialects.sqlite.base import SQLiteDialect # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
+from superset import db # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2023-08-08_14-14_2e826adca42c_log_json.py b/superset/migrations/versions/2023-08-08_14-14_2e826adca42c_log_json.py
index aa77fa4f8..4fc0d0878 100644
--- a/superset/migrations/versions/2023-08-08_14-14_2e826adca42c_log_json.py
+++ b/superset/migrations/versions/2023-08-08_14-14_2e826adca42c_log_json.py
@@ -22,7 +22,6 @@ Create Date: 2023-08-08 14:14:23.381364
"""
-
import sqlalchemy as sa
from alembic import op
diff --git a/superset/migrations/versions/2023-08-09_14-17_8ace289026f3_add_on_delete_cascade_for_dashboard_slices.py b/superset/migrations/versions/2023-08-09_14-17_8ace289026f3_add_on_delete_cascade_for_dashboard_slices.py
index 42e3507f7..ff2ea71d0 100644
--- a/superset/migrations/versions/2023-08-09_14-17_8ace289026f3_add_on_delete_cascade_for_dashboard_slices.py
+++ b/superset/migrations/versions/2023-08-09_14-17_8ace289026f3_add_on_delete_cascade_for_dashboard_slices.py
@@ -27,7 +27,7 @@ revision = "8ace289026f3"
down_revision = "2e826adca42c"
-from superset.migrations.shared.constraints import ForeignKey, redefine
+from superset.migrations.shared.constraints import ForeignKey, redefine # noqa: E402
foreign_keys = [
ForeignKey(
diff --git a/superset/migrations/versions/2023-08-09_15-39_4448fa6deeb1__dd_on_delete_cascade_for_embedded_dashboards.py.py b/superset/migrations/versions/2023-08-09_15-39_4448fa6deeb1__dd_on_delete_cascade_for_embedded_dashboards.py.py
index 8d298863a..54aa5bc57 100644
--- a/superset/migrations/versions/2023-08-09_15-39_4448fa6deeb1__dd_on_delete_cascade_for_embedded_dashboards.py.py
+++ b/superset/migrations/versions/2023-08-09_15-39_4448fa6deeb1__dd_on_delete_cascade_for_embedded_dashboards.py.py
@@ -26,7 +26,7 @@ Create Date: 2023-08-09 15:39:58.130228
revision = "4448fa6deeb1"
down_revision = "8ace289026f3"
-from superset.migrations.shared.constraints import ForeignKey, redefine
+from superset.migrations.shared.constraints import ForeignKey, redefine # noqa: E402
foreign_keys = [
ForeignKey(
diff --git a/superset/migrations/versions/2023-08-14_09-38_9f4a086c2676_add_normalize_columns_to_sqla_model.py b/superset/migrations/versions/2023-08-14_09-38_9f4a086c2676_add_normalize_columns_to_sqla_model.py
index 8eaee8207..b79b032a0 100644
--- a/superset/migrations/versions/2023-08-14_09-38_9f4a086c2676_add_normalize_columns_to_sqla_model.py
+++ b/superset/migrations/versions/2023-08-14_09-38_9f4a086c2676_add_normalize_columns_to_sqla_model.py
@@ -26,13 +26,12 @@ Create Date: 2023-08-14 09:38:11.897437
revision = "9f4a086c2676"
down_revision = "4448fa6deeb1"
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy.ext.declarative import declarative_base
-from sqlalchemy.orm import Session
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
-from superset.migrations.shared.utils import paginated_update
+from superset import db # noqa: E402
+from superset.migrations.shared.utils import paginated_update # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2023-08-22_11-09_ec54aca4c8a2_increase_ab_user_email_field_size.py b/superset/migrations/versions/2023-08-22_11-09_ec54aca4c8a2_increase_ab_user_email_field_size.py
index 8e2072655..33f57953d 100644
--- a/superset/migrations/versions/2023-08-22_11-09_ec54aca4c8a2_increase_ab_user_email_field_size.py
+++ b/superset/migrations/versions/2023-08-22_11-09_ec54aca4c8a2_increase_ab_user_email_field_size.py
@@ -26,8 +26,8 @@ Create Date: 2023-08-22 11:09:48.577457
revision = "ec54aca4c8a2"
down_revision = "9f4a086c2676"
-import sqlalchemy as sa
-from alembic import op
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2023-09-06_13-18_317970b4400c_added_time_secondary_column_to_.py b/superset/migrations/versions/2023-09-06_13-18_317970b4400c_added_time_secondary_column_to_.py
index 4972a8691..1dbc216f8 100755
--- a/superset/migrations/versions/2023-09-06_13-18_317970b4400c_added_time_secondary_column_to_.py
+++ b/superset/migrations/versions/2023-09-06_13-18_317970b4400c_added_time_secondary_column_to_.py
@@ -26,13 +26,12 @@ Create Date: 2023-09-06 13:18:59.597259
revision = "317970b4400c"
down_revision = "ec54aca4c8a2"
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy.ext.declarative import declarative_base
-from sqlalchemy.orm import Session
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
-from superset.migrations.shared.utils import paginated_update, table_has_column
+from superset import db # noqa: E402
+from superset.migrations.shared.utils import paginated_update, table_has_column # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2023-09-15_12-58_4b85906e5b91_add_on_delete_cascade_for_dashboard_roles.py b/superset/migrations/versions/2023-09-15_12-58_4b85906e5b91_add_on_delete_cascade_for_dashboard_roles.py
index b712aca68..702751948 100644
--- a/superset/migrations/versions/2023-09-15_12-58_4b85906e5b91_add_on_delete_cascade_for_dashboard_roles.py
+++ b/superset/migrations/versions/2023-09-15_12-58_4b85906e5b91_add_on_delete_cascade_for_dashboard_roles.py
@@ -27,7 +27,7 @@ revision = "4b85906e5b91"
down_revision = "317970b4400c"
-from superset.migrations.shared.constraints import ForeignKey, redefine
+from superset.migrations.shared.constraints import ForeignKey, redefine # noqa: E402
foreign_keys = [
ForeignKey(
diff --git a/superset/migrations/versions/2023-12-01_12-03_b7851ee5522f_replay_317970b4400c.py b/superset/migrations/versions/2023-12-01_12-03_b7851ee5522f_replay_317970b4400c.py
index b4286736f..0833dcc55 100644
--- a/superset/migrations/versions/2023-12-01_12-03_b7851ee5522f_replay_317970b4400c.py
+++ b/superset/migrations/versions/2023-12-01_12-03_b7851ee5522f_replay_317970b4400c.py
@@ -26,10 +26,8 @@ Create Date: 2023-12-01 12:03:27.538945
revision = "b7851ee5522f"
down_revision = "4b85906e5b91"
-from importlib import import_module
+from importlib import import_module # noqa: E402
-import sqlalchemy as sa
-from alembic import op
module = import_module(
"superset.migrations.versions.2023-09-06_13-18_317970b4400c_added_time_secondary_column_to_"
diff --git a/superset/migrations/versions/2023-12-15_17-58_06dd9ff00fe8_add_percent_calculation_type_funnel_.py b/superset/migrations/versions/2023-12-15_17-58_06dd9ff00fe8_add_percent_calculation_type_funnel_.py
index 22b750b76..5811cb4b3 100644
--- a/superset/migrations/versions/2023-12-15_17-58_06dd9ff00fe8_add_percent_calculation_type_funnel_.py
+++ b/superset/migrations/versions/2023-12-15_17-58_06dd9ff00fe8_add_percent_calculation_type_funnel_.py
@@ -21,6 +21,7 @@ Revises: b7851ee5522f
Create Date: 2023-12-15 17:58:18.277951
"""
+
import json
from alembic import op
diff --git a/superset/migrations/versions/2024-01-05_16-20_65a167d4c62e_add_indexes_to_report_models.py b/superset/migrations/versions/2024-01-05_16-20_65a167d4c62e_add_indexes_to_report_models.py
index e82ed2421..19fd18c5e 100644
--- a/superset/migrations/versions/2024-01-05_16-20_65a167d4c62e_add_indexes_to_report_models.py
+++ b/superset/migrations/versions/2024-01-05_16-20_65a167d4c62e_add_indexes_to_report_models.py
@@ -26,9 +26,7 @@ Create Date: 2024-01-05 16:20:31.598995
revision = "65a167d4c62e"
down_revision = "06dd9ff00fe8"
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy.dialects import postgresql
+from alembic import op # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2024-01-16_13-14_59a1450b3c10_drop_filter_sets_table.py b/superset/migrations/versions/2024-01-16_13-14_59a1450b3c10_drop_filter_sets_table.py
index 6a365012b..798c528ae 100644
--- a/superset/migrations/versions/2024-01-16_13-14_59a1450b3c10_drop_filter_sets_table.py
+++ b/superset/migrations/versions/2024-01-16_13-14_59a1450b3c10_drop_filter_sets_table.py
@@ -26,9 +26,8 @@ Create Date: 2023-12-27 13:14:27.268232
revision = "59a1450b3c10"
down_revision = "65a167d4c62e"
-from importlib import import_module
+from importlib import import_module # noqa: E402
-from alembic import op
module = import_module(
"superset.migrations.versions.2021-03-29_11-15_3ebe0993c770_filterset_table"
diff --git a/superset/migrations/versions/2024-01-17_13-09_96164e3017c6_tagged_object_unique_constraint.py b/superset/migrations/versions/2024-01-17_13-09_96164e3017c6_tagged_object_unique_constraint.py
index 0b67ad502..fd3d883c9 100644
--- a/superset/migrations/versions/2024-01-17_13-09_96164e3017c6_tagged_object_unique_constraint.py
+++ b/superset/migrations/versions/2024-01-17_13-09_96164e3017c6_tagged_object_unique_constraint.py
@@ -17,10 +17,9 @@
import enum
import migration_utils as utils
-import sqlalchemy as sa
from alembic import op
from sqlalchemy import Column, Enum, Integer, MetaData, Table
-from sqlalchemy.sql import and_, func, select
+from sqlalchemy.sql import func, select
# revision identifiers, used by Alembic.
revision = "96164e3017c6"
diff --git a/superset/migrations/versions/2024-01-18_12-12_15a2c68a2e6b_merging_two_heads.py b/superset/migrations/versions/2024-01-18_12-12_15a2c68a2e6b_merging_two_heads.py
index 7904d9298..0dafb6091 100644
--- a/superset/migrations/versions/2024-01-18_12-12_15a2c68a2e6b_merging_two_heads.py
+++ b/superset/migrations/versions/2024-01-18_12-12_15a2c68a2e6b_merging_two_heads.py
@@ -26,9 +26,6 @@ Create Date: 2024-01-18 12:12:52.174742
revision = "15a2c68a2e6b"
down_revision = ("96164e3017c6", "a32e0c4d8646")
-import sqlalchemy as sa
-from alembic import op
-
def upgrade():
pass
diff --git a/superset/migrations/versions/2024-01-18_14-41_a32e0c4d8646_migrate_sunburst_chart.py b/superset/migrations/versions/2024-01-18_14-41_a32e0c4d8646_migrate_sunburst_chart.py
index 10644e407..1ed8d4662 100644
--- a/superset/migrations/versions/2024-01-18_14-41_a32e0c4d8646_migrate_sunburst_chart.py
+++ b/superset/migrations/versions/2024-01-18_14-41_a32e0c4d8646_migrate_sunburst_chart.py
@@ -26,10 +26,10 @@ Create Date: 2023-12-22 14:41:43.638321
revision = "a32e0c4d8646"
down_revision = "59a1450b3c10"
-from alembic import op
+from alembic import op # noqa: E402
-from superset import db
-from superset.migrations.shared.migrate_viz import MigrateSunburst
+from superset import db # noqa: E402
+from superset.migrations.shared.migrate_viz import MigrateSunburst # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2024-01-18_15-20_214f580d09c9_migrate_filter_boxes_to_native_filters.py b/superset/migrations/versions/2024-01-18_15-20_214f580d09c9_migrate_filter_boxes_to_native_filters.py
index b54b60cb9..830905e0e 100644
--- a/superset/migrations/versions/2024-01-18_15-20_214f580d09c9_migrate_filter_boxes_to_native_filters.py
+++ b/superset/migrations/versions/2024-01-18_15-20_214f580d09c9_migrate_filter_boxes_to_native_filters.py
@@ -21,18 +21,19 @@ Revises: a32e0c4d8646
Create Date: 2024-01-10 09:20:32.233912
"""
+
# revision identifiers, used by Alembic.
revision = "214f580d09c9"
down_revision = "a32e0c4d8646"
-from alembic import op
-from sqlalchemy import Column, ForeignKey, Integer, String, Table, Text
-from sqlalchemy.ext.declarative import declarative_base
-from sqlalchemy.orm import relationship
+from alembic import op # noqa: E402
+from sqlalchemy import Column, ForeignKey, Integer, String, Table, Text # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
+from sqlalchemy.orm import relationship # noqa: E402
-from superset import db
-from superset.migrations.shared.native_filters import migrate_dashboard
-from superset.migrations.shared.utils import paginated_update
+from superset import db # noqa: E402
+from superset.migrations.shared.native_filters import migrate_dashboard # noqa: E402
+from superset.migrations.shared.utils import paginated_update # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2024-01-19_08-42_1cf8e4344e2b_merging.py b/superset/migrations/versions/2024-01-19_08-42_1cf8e4344e2b_merging.py
index 9ac2a9b24..ce8bf7f18 100644
--- a/superset/migrations/versions/2024-01-19_08-42_1cf8e4344e2b_merging.py
+++ b/superset/migrations/versions/2024-01-19_08-42_1cf8e4344e2b_merging.py
@@ -26,9 +26,6 @@ Create Date: 2024-01-19 08:42:37.694192
revision = "1cf8e4344e2b"
down_revision = ("e863403c0c50", "15a2c68a2e6b")
-import sqlalchemy as sa
-from alembic import op
-
def upgrade():
pass
diff --git a/superset/migrations/versions/2024-01-19_10-03_e863403c0c50_drop_url_table.py b/superset/migrations/versions/2024-01-19_10-03_e863403c0c50_drop_url_table.py
index 49b320f13..df46f36bc 100644
--- a/superset/migrations/versions/2024-01-19_10-03_e863403c0c50_drop_url_table.py
+++ b/superset/migrations/versions/2024-01-19_10-03_e863403c0c50_drop_url_table.py
@@ -26,11 +26,10 @@ Create Date: 2023-12-28 16:03:31.691033
revision = "e863403c0c50"
down_revision = "214f580d09c9"
-from importlib import import_module
+from importlib import import_module # noqa: E402
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy.dialects import postgresql
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
module = import_module("superset.migrations.versions.2016-01-13_20-24_8e80a26a31db_")
diff --git a/superset/migrations/versions/2024-02-07_17-13_87d38ad83218_migrate_can_view_and_drill_permission.py b/superset/migrations/versions/2024-02-07_17-13_87d38ad83218_migrate_can_view_and_drill_permission.py
index abc7adc66..c15379df6 100644
--- a/superset/migrations/versions/2024-02-07_17-13_87d38ad83218_migrate_can_view_and_drill_permission.py
+++ b/superset/migrations/versions/2024-02-07_17-13_87d38ad83218_migrate_can_view_and_drill_permission.py
@@ -26,11 +26,11 @@ Create Date: 2024-02-07 17:13:20.937186
revision = "87d38ad83218"
down_revision = "1cf8e4344e2b"
-from alembic import op
-from sqlalchemy.exc import SQLAlchemyError
-from sqlalchemy.orm import Session
+from alembic import op # noqa: E402
+from sqlalchemy.exc import SQLAlchemyError # noqa: E402
+from sqlalchemy.orm import Session # noqa: E402
-from superset.migrations.shared.security_converge import (
+from superset.migrations.shared.security_converge import ( # noqa: E402
add_pvms,
get_reversed_new_pvms,
get_reversed_pvm_map,
diff --git a/superset/migrations/versions/2024-02-14_14-43_17fcea065655_change_text_to_mediumtext.py b/superset/migrations/versions/2024-02-14_14-43_17fcea065655_change_text_to_mediumtext.py
index e63ab6ac5..3ba126d24 100644
--- a/superset/migrations/versions/2024-02-14_14-43_17fcea065655_change_text_to_mediumtext.py
+++ b/superset/migrations/versions/2024-02-14_14-43_17fcea065655_change_text_to_mediumtext.py
@@ -26,11 +26,11 @@ Create Date: 2024-02-14 14:43:39.898093
revision = "17fcea065655"
down_revision = "87d38ad83218"
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy.dialects.mysql.base import MySQLDialect
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
+from sqlalchemy.dialects.mysql.base import MySQLDialect # noqa: E402
-from superset.utils.core import MediumText
+from superset.utils.core import MediumText # noqa: E402
TABLE_COLUMNS = [
"annotation.json_metadata",
diff --git a/superset/migrations/versions/2024-03-01_10-47_be1b217cd8cd_big_number_kpi_single_metric.py b/superset/migrations/versions/2024-03-01_10-47_be1b217cd8cd_big_number_kpi_single_metric.py
index c0e03215f..84466b548 100644
--- a/superset/migrations/versions/2024-03-01_10-47_be1b217cd8cd_big_number_kpi_single_metric.py
+++ b/superset/migrations/versions/2024-03-01_10-47_be1b217cd8cd_big_number_kpi_single_metric.py
@@ -27,14 +27,14 @@ revision = "be1b217cd8cd"
down_revision = "17fcea065655"
-import json
+import json # noqa: E402
-from alembic import op
-from sqlalchemy import Column, Integer, String, Text
-from sqlalchemy.ext.declarative import declarative_base
+from alembic import op # noqa: E402
+from sqlalchemy import Column, Integer, String, Text # noqa: E402
+from sqlalchemy.ext.declarative import declarative_base # noqa: E402
-from superset import db
-from superset.migrations.shared.utils import paginated_update
+from superset import db # noqa: E402
+from superset.migrations.shared.utils import paginated_update # noqa: E402
Base = declarative_base()
diff --git a/superset/migrations/versions/2024-03-20_16-02_678eefb4ab44_add_access_token_table.py b/superset/migrations/versions/2024-03-20_16-02_678eefb4ab44_add_access_token_table.py
index f9a4036a3..ff3f06f07 100644
--- a/superset/migrations/versions/2024-03-20_16-02_678eefb4ab44_add_access_token_table.py
+++ b/superset/migrations/versions/2024-03-20_16-02_678eefb4ab44_add_access_token_table.py
@@ -26,9 +26,9 @@ Create Date: 2024-03-20 16:02:58.515915
revision = "678eefb4ab44"
down_revision = "be1b217cd8cd"
-import sqlalchemy as sa
-from alembic import op
-from sqlalchemy_utils import EncryptedType
+import sqlalchemy as sa # noqa: E402
+from alembic import op # noqa: E402
+from sqlalchemy_utils import EncryptedType # noqa: E402
def upgrade():
diff --git a/superset/migrations/versions/2024-04-01_22-44_c22cb5c2e546_user_attr_avatar_url.py b/superset/migrations/versions/2024-04-01_22-44_c22cb5c2e546_user_attr_avatar_url.py
index f52c8c582..8e07f1b6c 100644
--- a/superset/migrations/versions/2024-04-01_22-44_c22cb5c2e546_user_attr_avatar_url.py
+++ b/superset/migrations/versions/2024-04-01_22-44_c22cb5c2e546_user_attr_avatar_url.py
@@ -20,9 +20,9 @@ Revises: be1b217cd8cd
Create Date: 2024-04-01 22:44:40.386543
"""
+
import sqlalchemy as sa
from alembic import op
-from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "c22cb5c2e546"
diff --git a/superset/migrations/versions/2024-04-08_15-43_5ad7321c2169_mig_new_csv_upload_perm.py b/superset/migrations/versions/2024-04-08_15-43_5ad7321c2169_mig_new_csv_upload_perm.py
index 04bd4f625..3a846c42d 100644
--- a/superset/migrations/versions/2024-04-08_15-43_5ad7321c2169_mig_new_csv_upload_perm.py
+++ b/superset/migrations/versions/2024-04-08_15-43_5ad7321c2169_mig_new_csv_upload_perm.py
@@ -26,11 +26,11 @@ Create Date: 2024-04-08 15:43:29.682687
revision = "5ad7321c2169"
down_revision = "c22cb5c2e546"
-from alembic import op
-from sqlalchemy.exc import SQLAlchemyError
-from sqlalchemy.orm import Session
+from alembic import op # noqa: E402
+from sqlalchemy.exc import SQLAlchemyError # noqa: E402
+from sqlalchemy.orm import Session # noqa: E402
-from superset.migrations.shared.security_converge import (
+from superset.migrations.shared.security_converge import ( # noqa: E402
add_pvms,
get_reversed_new_pvms,
get_reversed_pvm_map,
diff --git a/superset/migrations/versions/2024-04-17_14-04_d60591c5515f_mig_new_excel_upload_perm.py b/superset/migrations/versions/2024-04-17_14-04_d60591c5515f_mig_new_excel_upload_perm.py
index c400212d9..56e637309 100644
--- a/superset/migrations/versions/2024-04-17_14-04_d60591c5515f_mig_new_excel_upload_perm.py
+++ b/superset/migrations/versions/2024-04-17_14-04_d60591c5515f_mig_new_excel_upload_perm.py
@@ -26,11 +26,11 @@ Create Date: 2024-04-17 14:04:36.041749
revision = "d60591c5515f"
down_revision = "5ad7321c2169"
-from alembic import op
-from sqlalchemy.exc import SQLAlchemyError
-from sqlalchemy.orm import Session
+from alembic import op # noqa: E402
+from sqlalchemy.exc import SQLAlchemyError # noqa: E402
+from sqlalchemy.orm import Session # noqa: E402
-from superset.migrations.shared.security_converge import (
+from superset.migrations.shared.security_converge import ( # noqa: E402
add_pvms,
get_reversed_new_pvms,
get_reversed_pvm_map,
diff --git a/superset/models/__init__.py b/superset/models/__init__.py
index 067d6ae83..750c61ccc 100644
--- a/superset/models/__init__.py
+++ b/superset/models/__init__.py
@@ -14,4 +14,4 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
-from . import core, dynamic_plugins, sql_lab, user_attributes
+from . import core, dynamic_plugins, sql_lab, user_attributes # noqa: F401
diff --git a/superset/models/annotations.py b/superset/models/annotations.py
index d8b6f8b1f..636da0055 100644
--- a/superset/models/annotations.py
+++ b/superset/models/annotations.py
@@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
"""a collection of Annotation-related models"""
+
from typing import Any
from flask_appbuilder import Model
diff --git a/superset/models/cache.py b/superset/models/cache.py
index a6dec2108..d700e29d4 100755
--- a/superset/models/cache.py
+++ b/superset/models/cache.py
@@ -21,7 +21,6 @@ from sqlalchemy import Column, DateTime, Integer, String
class CacheKey(Model): # pylint: disable=too-few-public-methods
-
"""Stores cache key records for the superset visualization."""
__tablename__ = "cache_keys"
diff --git a/superset/models/core.py b/superset/models/core.py
index 4c514f308..42f6a7824 100755
--- a/superset/models/core.py
+++ b/superset/models/core.py
@@ -14,7 +14,8 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
-# pylint: disable=line-too-long,too-many-lines
+
+# pylint: disable=too-many-lines
"""A collection of ORM sqlalchemy models for Superset"""
from __future__ import annotations
@@ -114,9 +115,7 @@ class ConfigurationMethod(StrEnum):
DYNAMIC_FORM = "dynamic_form"
-class Database(
- Model, AuditMixinNullable, ImportExportMixin
-): # pylint: disable=too-many-public-methods
+class Database(Model, AuditMixinNullable, ImportExportMixin): # pylint: disable=too-many-public-methods
"""An ORM object that stores Database related information"""
__tablename__ = "dbs"
diff --git a/superset/models/dashboard.py b/superset/models/dashboard.py
index aa961a2ff..caf900670 100644
--- a/superset/models/dashboard.py
+++ b/superset/models/dashboard.py
@@ -272,9 +272,9 @@ class Dashboard(AuditMixinNullable, ImportExportMixin, Model):
def datasets_trimmed_for_slices(self) -> list[dict[str, Any]]:
# Verbose but efficient database enumeration of dashboard datasources.
- slices_by_datasource: dict[
- tuple[type[BaseDatasource], int], set[Slice]
- ] = defaultdict(set)
+ slices_by_datasource: dict[tuple[type[BaseDatasource], int], set[Slice]] = (
+ defaultdict(set)
+ )
for slc in self.slices:
slices_by_datasource[(slc.cls_model, slc.datasource_id)].add(slc)
@@ -426,6 +426,6 @@ def id_or_slug_filter(id_or_slug: int | str) -> BinaryExpression:
OnDashboardChange = Callable[[Mapper, Connection, Dashboard], Any]
if is_feature_enabled("THUMBNAILS_SQLA_LISTENERS"):
- update_thumbnail: OnDashboardChange = lambda _, __, dash: dash.update_thumbnail()
+ update_thumbnail: OnDashboardChange = lambda _, __, dash: dash.update_thumbnail() # noqa: E731
sqla.event.listen(Dashboard, "after_insert", update_thumbnail)
sqla.event.listen(Dashboard, "after_update", update_thumbnail)
diff --git a/superset/models/helpers.py b/superset/models/helpers.py
index 7d82b6d08..4165f874a 100644
--- a/superset/models/helpers.py
+++ b/superset/models/helpers.py
@@ -16,6 +16,7 @@
# under the License.
# pylint: disable=too-many-lines
"""a collection of model-related helper classes and functions"""
+
import builtins
import dataclasses
import json
@@ -200,9 +201,7 @@ class ImportExportMixin:
for u in cls.__table_args__ # type: ignore
if isinstance(u, UniqueConstraint)
]
- unique.extend(
- {c.name} for c in cls.__table__.columns if c.unique # type: ignore
- )
+ unique.extend({c.name} for c in cls.__table__.columns if c.unique) # type: ignore
return unique
@classmethod
@@ -210,7 +209,7 @@ class ImportExportMixin:
"""Get a mapping of foreign name to the local name of foreign keys"""
parent_rel = cls.__mapper__.relationships.get(cls.export_parent)
if parent_rel:
- return {l.name: r.name for (l, r) in parent_rel.local_remote_pairs}
+ return {l.name: r.name for (l, r) in parent_rel.local_remote_pairs} # noqa: E741
return {}
@classmethod
@@ -556,7 +555,6 @@ class AuditMixinNullable(AuditMixin):
class QueryResult: # pylint: disable=too-few-public-methods
-
"""Object returned by the query interface"""
def __init__( # pylint: disable=too-many-arguments
@@ -1246,7 +1244,7 @@ class ExploreMixin: # pylint: disable=too-many-public-methods
def adhoc_column_to_sqla(
self,
- col: "AdhocColumn", # type: ignore
+ col: "AdhocColumn", # type: ignore # noqa: F821
force_type_check: bool = False,
template_processor: Optional[BaseTemplateProcessor] = None,
) -> ColumnElement:
@@ -1325,7 +1323,7 @@ class ExploreMixin: # pylint: disable=too-many-public-methods
)
)
- l = []
+ l = [] # noqa: E741
if start_dttm:
l.append(
col
diff --git a/superset/models/sql_lab.py b/superset/models/sql_lab.py
index 455dfd1af..18d0f2e1d 100644
--- a/superset/models/sql_lab.py
+++ b/superset/models/sql_lab.py
@@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
"""A collection of ORM sqlalchemy models for SQL Lab"""
+
import builtins
import inspect
import logging
@@ -349,7 +350,7 @@ class Query(
def adhoc_column_to_sqla(
self,
- col: "AdhocColumn", # type: ignore
+ col: "AdhocColumn", # type: ignore # noqa: F821
force_type_check: bool = False,
template_processor: Optional[BaseTemplateProcessor] = None,
) -> ColumnElement:
diff --git a/superset/models/user_attributes.py b/superset/models/user_attributes.py
index 55b6d8aba..512270c89 100644
--- a/superset/models/user_attributes.py
+++ b/superset/models/user_attributes.py
@@ -24,7 +24,6 @@ from superset.models.helpers import AuditMixinNullable
class UserAttribute(Model, AuditMixinNullable):
-
"""
Custom attributes attached to the user.
diff --git a/superset/queries/saved_queries/filters.py b/superset/queries/saved_queries/filters.py
index a9e7006b6..90e356163 100644
--- a/superset/queries/saved_queries/filters.py
+++ b/superset/queries/saved_queries/filters.py
@@ -45,9 +45,7 @@ class SavedQueryAllTextFilter(BaseFilter): # pylint: disable=too-few-public-met
)
-class SavedQueryFavoriteFilter(
- BaseFavoriteFilter
-): # pylint: disable=too-few-public-methods
+class SavedQueryFavoriteFilter(BaseFavoriteFilter): # pylint: disable=too-few-public-methods
"""
Custom filter for the GET list that filters all saved queries that a user has
favored
diff --git a/superset/reports/models.py b/superset/reports/models.py
index 63090e4c4..63f904876 100644
--- a/superset/reports/models.py
+++ b/superset/reports/models.py
@@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
"""A collection of ORM sqlalchemy models for Superset"""
+
from cron_descriptor import get_description
from flask_appbuilder import Model
from flask_appbuilder.models.decorators import renders
diff --git a/superset/reports/notifications/__init__.py b/superset/reports/notifications/__init__.py
index b9fff48cf..770ce43e2 100644
--- a/superset/reports/notifications/__init__.py
+++ b/superset/reports/notifications/__init__.py
@@ -16,8 +16,8 @@
# under the License.
from superset.reports.models import ReportRecipients
from superset.reports.notifications.base import BaseNotification, NotificationContent
-from superset.reports.notifications.email import EmailNotification
-from superset.reports.notifications.slack import SlackNotification
+from superset.reports.notifications.email import EmailNotification # noqa: F401
+from superset.reports.notifications.slack import SlackNotification # noqa: F401
def create_notification(
diff --git a/superset/result_set.py b/superset/result_set.py
index 548327103..061656720 100644
--- a/superset/result_set.py
+++ b/superset/result_set.py
@@ -14,8 +14,8 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
-""" Superset wrapper around pyarrow.Table.
-"""
+"""Superset wrapper around pyarrow.Table."""
+
import datetime
import json
import logging
@@ -34,7 +34,7 @@ from superset.utils.core import GenericDataType
logger = logging.getLogger(__name__)
-def dedup(l: list[str], suffix: str = "__", case_sensitive: bool = True) -> list[str]:
+def dedup(l: list[str], suffix: str = "__", case_sensitive: bool = True) -> list[str]: # noqa: E741
"""De-duplicates a list of string by suffixing a counter
Always returns the same number of entries as provided, and always returns
diff --git a/superset/row_level_security/api.py b/superset/row_level_security/api.py
index fc505e724..86956683c 100644
--- a/superset/row_level_security/api.py
+++ b/superset/row_level_security/api.py
@@ -299,7 +299,7 @@ class RLSRestApi(BaseSupersetModelRestApi):
exc_info=True,
)
return self.response_422(message=str(ex))
- except RLSRuleNotFoundError as ex:
+ except RLSRuleNotFoundError:
return self.response_404()
@expose("/", methods=("DELETE",))
diff --git a/superset/security/api.py b/superset/security/api.py
index acafc3257..61fd68e6f 100644
--- a/superset/security/api.py
+++ b/superset/security/api.py
@@ -54,8 +54,10 @@ class ResourceSchema(PermissiveSchema):
id = fields.String(required=True)
@post_load
- def convert_enum_to_value(
- self, data: dict[str, Any], **kwargs: Any # pylint: disable=unused-argument
+ def convert_enum_to_value( # pylint: disable=unused-argument
+ self,
+ data: dict[str, Any],
+ **kwargs: Any,
) -> dict[str, Any]:
# we don't care about the enum, we want the value inside
data["type"] = data["type"].value
diff --git a/superset/security/manager.py b/superset/security/manager.py
index 5037e9bac..4da85b7d1 100644
--- a/superset/security/manager.py
+++ b/superset/security/manager.py
@@ -16,6 +16,7 @@
# under the License.
# pylint: disable=too-many-lines
"""A set of constants and methods to manage permissions and security"""
+
import json
import logging
import re
@@ -857,9 +858,9 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods
pvms = self.get_session.query(PermissionView).filter(
or_(
PermissionView.permission # pylint: disable=singleton-comparison
- == None,
+ == None, # noqa: E711
PermissionView.view_menu # pylint: disable=singleton-comparison
- == None,
+ == None, # noqa: E711
)
)
self.get_session.commit()
diff --git a/superset/sql_validators/__init__.py b/superset/sql_validators/__init__.py
index ad048a86a..0298cf32e 100644
--- a/superset/sql_validators/__init__.py
+++ b/superset/sql_validators/__init__.py
@@ -17,7 +17,7 @@
from typing import Optional
from . import base, postgres, presto_db
-from .base import SQLValidationAnnotation
+from .base import SQLValidationAnnotation # noqa: F401
def get_validator_by_name(name: str) -> Optional[type[base.BaseSQLValidator]]:
diff --git a/superset/sqllab/execution_context_convertor.py b/superset/sqllab/execution_context_convertor.py
index a1e7a86d0..587e02755 100644
--- a/superset/sqllab/execution_context_convertor.py
+++ b/superset/sqllab/execution_context_convertor.py
@@ -28,8 +28,6 @@ from superset.sqllab.utils import apply_display_max_row_configuration_if_require
logger = logging.getLogger(__name__)
if TYPE_CHECKING:
- from superset.models.sql_lab import Query
- from superset.sqllab.sql_json_executer import SqlResults
from superset.sqllab.sqllab_execution_context import SqlJsonExecutionContext
diff --git a/superset/sqllab/sqllab_execution_context.py b/superset/sqllab/sqllab_execution_context.py
index bcd0bcbf1..7a732cf64 100644
--- a/superset/sqllab/sqllab_execution_context.py
+++ b/superset/sqllab/sqllab_execution_context.py
@@ -142,7 +142,6 @@ class SqlJsonExecutionContext: # pylint: disable=too-many-instance-attributes
self._sql_result = sql_result
def create_query(self) -> Query:
- # pylint: disable=line-too-long
start_time = now_as_float()
if self.select_as_cta:
return Query(
diff --git a/superset/tags/models.py b/superset/tags/models.py
index 49efb750b..ba859f519 100644
--- a/superset/tags/models.py
+++ b/superset/tags/models.py
@@ -130,7 +130,9 @@ class TaggedObject(Model, AuditMixinNullable):
def get_tag(
- name: str, session: orm.Session, type_: TagType # pylint: disable=disallowed-name
+ name: str,
+ session: orm.Session, # pylint: disable=disallowed-name
+ type_: TagType,
) -> Tag:
tag_name = name.strip()
tag = session.query(Tag).filter_by(name=tag_name, type=type_).one_or_none()
diff --git a/superset/tasks/celery_app.py b/superset/tasks/celery_app.py
index e9ab10c05..4d36917be 100644
--- a/superset/tasks/celery_app.py
+++ b/superset/tasks/celery_app.py
@@ -19,6 +19,7 @@
This is the main entrypoint used by Celery workers. As such,
it needs to call create_app() in order to initialize things properly
"""
+
from typing import Any
from celery.signals import task_postrun, worker_process_init
@@ -31,8 +32,6 @@ from superset.extensions import celery_app, db
flask_app = create_app()
# Need to import late, as the celery_app will have been setup by "create_app()"
-# pylint: disable=wrong-import-position, unused-import
-from . import cache, scheduler # isort:skip
# Export the celery app globally for Celery (as run on the cmd line) to find
app = celery_app
diff --git a/superset/tasks/slack_util.py b/superset/tasks/slack_util.py
index 652fd89b6..72a11b1e9 100644
--- a/superset/tasks/slack_util.py
+++ b/superset/tasks/slack_util.py
@@ -17,6 +17,7 @@
"""
DEPRECATION NOTICE: this module is deprecated and will be removed on 2.0.
"""
+
import logging
from io import IOBase
from typing import cast, Optional, Union
diff --git a/superset/temporary_cache/api.py b/superset/temporary_cache/api.py
index 5dc95c122..1937049cb 100644
--- a/superset/temporary_cache/api.py
+++ b/superset/temporary_cache/api.py
@@ -132,17 +132,13 @@ class TemporaryCacheRestApi(BaseSupersetApi, ABC):
return self.response(404, message=str(ex))
@abstractmethod
- def get_create_command(self) -> Any:
- ...
+ def get_create_command(self) -> Any: ...
@abstractmethod
- def get_update_command(self) -> Any:
- ...
+ def get_update_command(self) -> Any: ...
@abstractmethod
- def get_get_command(self) -> Any:
- ...
+ def get_get_command(self) -> Any: ...
@abstractmethod
- def get_delete_command(self) -> Any:
- ...
+ def get_delete_command(self) -> Any: ...
diff --git a/superset/utils/core.py b/superset/utils/core.py
index b89fd759a..f02b00443 100644
--- a/superset/utils/core.py
+++ b/superset/utils/core.py
@@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
"""Utility functions used across Superset"""
+
# pylint: disable=too-many-lines
from __future__ import annotations
@@ -1050,7 +1051,8 @@ def merge_extra_form_data(form_data: dict[str, Any]) -> None:
"adhoc_filters", []
)
adhoc_filters.extend(
- {"isExtra": True, **fltr} for fltr in append_adhoc_filters # type: ignore
+ {"isExtra": True, **fltr} # type: ignore
+ for fltr in append_adhoc_filters
)
if append_filters:
for key, value in form_data.items():
diff --git a/superset/utils/date_parser.py b/superset/utils/date_parser.py
index 0b4c0c2bb..9a7c135f5 100644
--- a/superset/utils/date_parser.py
+++ b/superset/utils/date_parser.py
@@ -216,11 +216,13 @@ def get_since_until( # pylint: disable=too-many-arguments,too-many-locals,too-m
),
(
r"^last\s+([0-9]+)\s+(second|minute|hour|day|week|month|year)s?$",
- lambda delta, unit: f"DATEADD(DATETIME('{_relative_start}'), -{int(delta)}, {unit})", # pylint: disable=line-too-long,useless-suppression
+ lambda delta,
+ unit: f"DATEADD(DATETIME('{_relative_start}'), -{int(delta)}, {unit})", # pylint: disable=line-too-long,useless-suppression
),
(
r"^next\s+([0-9]+)\s+(second|minute|hour|day|week|month|year)s?$",
- lambda delta, unit: f"DATEADD(DATETIME('{_relative_end}'), {int(delta)}, {unit})", # pylint: disable=line-too-long,useless-suppression
+ lambda delta,
+ unit: f"DATEADD(DATETIME('{_relative_end}'), {int(delta)}, {unit})", # pylint: disable=line-too-long,useless-suppression
),
(
r"^(DATETIME.*|DATEADD.*|DATETRUNC.*|LASTDAY.*|HOLIDAY.*)$",
diff --git a/superset/utils/decorators.py b/superset/utils/decorators.py
index 7e34b9836..70fb30587 100644
--- a/superset/utils/decorators.py
+++ b/superset/utils/decorators.py
@@ -48,8 +48,7 @@ def statsd_gauge(metric_prefix: str | None = None) -> Callable[..., Any]:
return result
except Exception as ex:
if (
- hasattr(ex, "status")
- and ex.status < 500 # pylint: disable=no-member
+ hasattr(ex, "status") and ex.status < 500 # pylint: disable=no-member
):
current_app.config["STATS_LOGGER"].gauge(
f"{metric_prefix_}.warning", 1
diff --git a/superset/utils/log.py b/superset/utils/log.py
index cb5343470..65355c175 100644
--- a/superset/utils/log.py
+++ b/superset/utils/log.py
@@ -35,7 +35,7 @@ from superset.extensions import stats_logger_manager
from superset.utils.core import get_user_id, LoggerLevel, to_int
if TYPE_CHECKING:
- from superset.stats_logger import BaseStatsLogger
+ pass
logger = logging.getLogger(__name__)
diff --git a/superset/utils/pandas_postprocessing/prophet.py b/superset/utils/pandas_postprocessing/prophet.py
index a1c823ee2..85d553093 100644
--- a/superset/utils/pandas_postprocessing/prophet.py
+++ b/superset/utils/pandas_postprocessing/prophet.py
@@ -28,7 +28,7 @@ from superset.utils.pandas_postprocessing.utils import PROPHET_TIME_GRAIN_MAP
def _prophet_parse_seasonality(
- input_value: Optional[Union[bool, int]]
+ input_value: Optional[Union[bool, int]],
) -> Union[bool, str, int]:
if input_value is None:
return "auto"
diff --git a/superset/utils/webdriver.py b/superset/utils/webdriver.py
index 4552600fc..fc7f18aca 100644
--- a/superset/utils/webdriver.py
+++ b/superset/utils/webdriver.py
@@ -314,9 +314,7 @@ class WebDriverSelenium(WebDriverProxy):
EC.visibility_of_any_elements_located(
(By.CLASS_NAME, "ant-modal-content")
)
- )[
- 0
- ]
+ )[0]
err_msg_div = modal.find_element(By.CLASS_NAME, "ant-modal-body")
diff --git a/superset/views/__init__.py b/superset/views/__init__.py
index 838e92aca..0ef34edd7 100644
--- a/superset/views/__init__.py
+++ b/superset/views/__init__.py
@@ -26,3 +26,17 @@ from . import (
tags,
)
from .log import api as log_api, views
+
+__all__ = [
+ "alerts",
+ "api",
+ "base",
+ "core",
+ "css_templates",
+ "dynamic_plugins",
+ "health",
+ "log_api",
+ "views",
+ "sql_lab",
+ "tags",
+]
diff --git a/superset/views/base.py b/superset/views/base.py
index 606aa6c9d..1df5b6a66 100644
--- a/superset/views/base.py
+++ b/superset/views/base.py
@@ -239,7 +239,7 @@ def api(f: Callable[..., FlaskResponse]) -> Callable[..., FlaskResponse]:
def handle_api_exception(
- f: Callable[..., FlaskResponse]
+ f: Callable[..., FlaskResponse],
) -> Callable[..., FlaskResponse]:
"""
A decorator to catch superset exceptions. Use it after the @api decorator above
diff --git a/superset/views/base_api.py b/superset/views/base_api.py
index 10f005007..a62e96314 100644
--- a/superset/views/base_api.py
+++ b/superset/views/base_api.py
@@ -248,7 +248,7 @@ class BaseSupersetApiMixin:
class BaseSupersetApi(BaseSupersetApiMixin, BaseApi):
- ...
+ pass
class BaseSupersetModelRestApi(BaseSupersetApiMixin, ModelRestApi):
@@ -347,11 +347,12 @@ class BaseSupersetModelRestApi(BaseSupersetApiMixin, ModelRestApi):
if self.apispec_parameter_schemas is None: # type: ignore
self.apispec_parameter_schemas = {}
self.apispec_parameter_schemas["get_related_schema"] = get_related_schema
- self.openapi_spec_component_schemas: tuple[
- type[Schema], ...
- ] = self.openapi_spec_component_schemas + (
- RelatedResponseSchema,
- DistincResponseSchema,
+ self.openapi_spec_component_schemas: tuple[type[Schema], ...] = (
+ self.openapi_spec_component_schemas
+ + (
+ RelatedResponseSchema,
+ DistincResponseSchema,
+ )
)
def _init_properties(self) -> None:
diff --git a/superset/views/chart/views.py b/superset/views/chart/views.py
index 7c40d5971..209f7582e 100644
--- a/superset/views/chart/views.py
+++ b/superset/views/chart/views.py
@@ -27,9 +27,7 @@ from superset.views.base import DeleteMixin, SupersetModelView
from superset.views.chart.mixin import SliceMixin
-class SliceModelView(
- SliceMixin, SupersetModelView, DeleteMixin
-): # pylint: disable=too-many-ancestors
+class SliceModelView(SliceMixin, SupersetModelView, DeleteMixin): # pylint: disable=too-many-ancestors
route_base = "/chart"
datamodel = SQLAInterface(Slice)
include_route_methods = RouteMethod.CRUD_SET | {
diff --git a/superset/views/dashboard/views.py b/superset/views/dashboard/views.py
index 0b41a67ee..33252084a 100644
--- a/superset/views/dashboard/views.py
+++ b/superset/views/dashboard/views.py
@@ -42,9 +42,7 @@ from superset.views.base import (
from superset.views.dashboard.mixin import DashboardMixin
-class DashboardModelView(
- DashboardMixin, SupersetModelView, DeleteMixin
-): # pylint: disable=too-many-ancestors
+class DashboardModelView(DashboardMixin, SupersetModelView, DeleteMixin): # pylint: disable=too-many-ancestors
route_base = "/dashboard"
datamodel = SQLAInterface(DashboardModel)
# TODO disable api_read and api_delete (used by cypress)
diff --git a/superset/views/database/views.py b/superset/views/database/views.py
index 876f69353..0e1ea3cb4 100644
--- a/superset/views/database/views.py
+++ b/superset/views/database/views.py
@@ -79,9 +79,7 @@ def upload_stream_write(form_file_field: "FileStorage", path: str) -> None:
file_description.write(chunk)
-class DatabaseView(
- DatabaseMixin, SupersetModelView, DeleteMixin, YamlExportMixin
-): # pylint: disable=too-many-ancestors
+class DatabaseView(DatabaseMixin, SupersetModelView, DeleteMixin, YamlExportMixin): # pylint: disable=too-many-ancestors
datamodel = SQLAInterface(models.Database)
class_permission_name = "Database"
diff --git a/superset/views/filters.py b/superset/views/filters.py
index 625566b98..19407219e 100644
--- a/superset/views/filters.py
+++ b/superset/views/filters.py
@@ -29,7 +29,6 @@ logger = logging.getLogger(__name__)
class FilterRelatedOwners(BaseFilter): # pylint: disable=too-few-public-methods
-
"""
A filter to allow searching for related owners of a resource.
@@ -55,7 +54,6 @@ class FilterRelatedOwners(BaseFilter): # pylint: disable=too-few-public-methods
class BaseFilterRelatedUsers(BaseFilter): # pylint: disable=too-few-public-methods
-
"""
Filter to apply on related users. Will exclude users in EXCLUDE_USERS_FROM_LISTS
diff --git a/superset/views/key_value.py b/superset/views/key_value.py
index 47fc7ff4a..e4d41cb2e 100644
--- a/superset/views/key_value.py
+++ b/superset/views/key_value.py
@@ -29,7 +29,6 @@ from superset.views.base import BaseSupersetView, deprecated, json_error_respons
class KV(BaseSupersetView):
-
"""Used for storing and retrieving key value pairs"""
@staticmethod
diff --git a/superset/views/sql_lab/views.py b/superset/views/sql_lab/views.py
index 8c21eea69..4ed5143bb 100644
--- a/superset/views/sql_lab/views.py
+++ b/superset/views/sql_lab/views.py
@@ -50,9 +50,7 @@ class SavedQueryView(BaseSupersetView):
return super().render_app_template()
-class SavedQueryViewApi(
- SupersetModelView, DeleteMixin
-): # pylint: disable=too-many-ancestors
+class SavedQueryViewApi(SupersetModelView, DeleteMixin): # pylint: disable=too-many-ancestors
datamodel = SQLAInterface(SavedQuery)
include_route_methods = RouteMethod.CRUD_SET
route_base = "/savedqueryviewapi"
diff --git a/superset/views/utils.py b/superset/views/utils.py
index 86030b980..2d8fcd68d 100644
--- a/superset/views/utils.py
+++ b/superset/views/utils.py
@@ -108,7 +108,7 @@ def get_permissions(
data_permissions = defaultdict(set)
roles_permissions = security_manager.get_user_roles_permissions(user)
- for _, permissions in roles_permissions.items():
+ for _, permissions in roles_permissions.items(): # noqa: F402
for permission in permissions:
if permission[0] in ("datasource_access", "database_access"):
data_permissions[permission[0]].add(permission[1])
diff --git a/superset/viz.py b/superset/viz.py
index 447b36d53..5a4b32307 100644
--- a/superset/viz.py
+++ b/superset/viz.py
@@ -20,6 +20,7 @@
These objects represent the backend of all the visualizations that
Superset can render.
"""
+
from __future__ import annotations
import copy
@@ -83,7 +84,6 @@ from superset.utils.date_parser import get_since_until, parse_past_timedelta
from superset.utils.hashing import md5_sha_from_str
if TYPE_CHECKING:
- from superset.common.query_context_factory import QueryContextFactory
from superset.connectors.sqla.models import BaseDatasource
config = app.config
@@ -105,7 +105,6 @@ METRIC_KEYS = [
class BaseViz: # pylint: disable=too-many-public-methods
-
"""All visualizations derive this base class"""
viz_type: str | None = None
@@ -180,7 +179,7 @@ class BaseViz: # pylint: disable=too-many-public-methods
@staticmethod
@deprecated(deprecated_in="3.0")
def handle_js_int_overflow(
- data: dict[str, list[dict[str, Any]]]
+ data: dict[str, list[dict[str, Any]]],
) -> dict[str, list[dict[str, Any]]]:
for record in data.get("records", {}):
for k, v in list(record.items()):
@@ -701,7 +700,6 @@ class BaseViz: # pylint: disable=too-many-public-methods
class TimeTableViz(BaseViz):
-
"""A data table with rich time-series related columns"""
viz_type = "time_table"
@@ -748,7 +746,6 @@ class TimeTableViz(BaseViz):
class CalHeatmapViz(BaseViz):
-
"""Calendar heatmap."""
viz_type = "cal_heatmap"
@@ -829,7 +826,6 @@ class CalHeatmapViz(BaseViz):
class NVD3Viz(BaseViz):
-
"""Base class for all nvd3 vizs"""
credits = 'NVD3.org'
@@ -839,7 +835,6 @@ class NVD3Viz(BaseViz):
class BubbleViz(NVD3Viz):
-
"""Based on the NVD3 bubble chart"""
viz_type = "bubble"
@@ -892,7 +887,6 @@ class BubbleViz(NVD3Viz):
class BulletViz(NVD3Viz):
-
"""Based on the NVD3 bullet chart"""
viz_type = "bullet"
@@ -924,7 +918,6 @@ class BulletViz(NVD3Viz):
class NVD3TimeSeriesViz(NVD3Viz):
-
"""A rich line chart component with tons of options"""
viz_type = "line"
@@ -1130,7 +1123,6 @@ class NVD3TimeSeriesViz(NVD3Viz):
class NVD3TimeSeriesBarViz(NVD3TimeSeriesViz):
-
"""A bar chart where the x axis is time"""
viz_type = "bar"
@@ -1139,7 +1131,6 @@ class NVD3TimeSeriesBarViz(NVD3TimeSeriesViz):
class NVD3TimePivotViz(NVD3TimeSeriesViz):
-
"""Time Series - Periodicity Pivot"""
viz_type = "time_pivot"
@@ -1188,7 +1179,6 @@ class NVD3TimePivotViz(NVD3TimeSeriesViz):
class NVD3CompareTimeSeriesViz(NVD3TimeSeriesViz):
-
"""A line chart component where you can compare the % change over time"""
viz_type = "compare"
@@ -1196,7 +1186,6 @@ class NVD3CompareTimeSeriesViz(NVD3TimeSeriesViz):
class NVD3TimeSeriesStackedViz(NVD3TimeSeriesViz):
-
"""A rich stack area chart"""
viz_type = "area"
@@ -1206,7 +1195,6 @@ class NVD3TimeSeriesStackedViz(NVD3TimeSeriesViz):
class HistogramViz(BaseViz):
-
"""Histogram"""
viz_type = "histogram"
@@ -1267,7 +1255,6 @@ class HistogramViz(BaseViz):
class DistributionBarViz(BaseViz):
-
"""A good old bar chart"""
viz_type = "dist_bar"
@@ -1355,7 +1342,6 @@ class DistributionBarViz(BaseViz):
class SankeyViz(BaseViz):
-
"""A Sankey diagram that requires a parent-child dataset"""
viz_type = "sankey"
@@ -1429,7 +1415,6 @@ class SankeyViz(BaseViz):
class ChordViz(BaseViz):
-
"""A Chord diagram"""
viz_type = "chord"
@@ -1470,7 +1455,6 @@ class ChordViz(BaseViz):
class CountryMapViz(BaseViz):
-
"""A country centric"""
viz_type = "country_map"
@@ -1507,7 +1491,6 @@ class CountryMapViz(BaseViz):
class WorldMapViz(BaseViz):
-
"""A country centric world map"""
viz_type = "world_map"
@@ -1571,7 +1554,6 @@ class WorldMapViz(BaseViz):
class ParallelCoordinatesViz(BaseViz):
-
"""Interactive parallel coordinate implementation
Uses this amazing javascript library
@@ -1606,7 +1588,6 @@ class ParallelCoordinatesViz(BaseViz):
class HeatmapViz(BaseViz):
-
"""A nice heatmap visualization that support high density through canvas"""
viz_type = "heatmap"
@@ -1666,7 +1647,6 @@ class HeatmapViz(BaseViz):
class HorizonViz(NVD3TimeSeriesViz):
-
"""Horizon chart
https://www.npmjs.com/package/d3-horizon-chart
@@ -1681,7 +1661,6 @@ class HorizonViz(NVD3TimeSeriesViz):
class MapboxViz(BaseViz):
-
"""Rich maps made with Mapbox"""
viz_type = "mapbox"
@@ -1829,7 +1808,6 @@ class MapboxViz(BaseViz):
class DeckGLMultiLayer(BaseViz):
-
"""Pile on multiple DeckGL layers"""
viz_type = "deck_multi"
@@ -1858,7 +1836,6 @@ class DeckGLMultiLayer(BaseViz):
class BaseDeckGLViz(BaseViz):
-
"""Base class for deck.gl visualizations"""
is_timeseries = False
@@ -2035,7 +2012,6 @@ class BaseDeckGLViz(BaseViz):
class DeckScatterViz(BaseDeckGLViz):
-
"""deck.gl's ScatterLayer"""
viz_type = "deck_scatter"
@@ -2089,7 +2065,6 @@ class DeckScatterViz(BaseDeckGLViz):
class DeckScreengrid(BaseDeckGLViz):
-
"""deck.gl's ScreenGridLayer"""
viz_type = "deck_screengrid"
@@ -2119,7 +2094,6 @@ class DeckScreengrid(BaseDeckGLViz):
class DeckGrid(BaseDeckGLViz):
-
"""deck.gl's DeckLayer"""
viz_type = "deck_grid"
@@ -2154,7 +2128,6 @@ def geohash_to_json(geohash_code: str) -> list[list[float]]:
class DeckPathViz(BaseDeckGLViz):
-
"""deck.gl's PathLayer"""
viz_type = "deck_path"
@@ -2205,7 +2178,6 @@ class DeckPathViz(BaseDeckGLViz):
class DeckPolygon(DeckPathViz):
-
"""deck.gl's Polygon Layer"""
viz_type = "deck_polygon"
@@ -2242,7 +2214,6 @@ class DeckPolygon(DeckPathViz):
class DeckHex(BaseDeckGLViz):
-
"""deck.gl's DeckLayer"""
viz_type = "deck_hex"
@@ -2265,7 +2236,6 @@ class DeckHex(BaseDeckGLViz):
class DeckHeatmap(BaseDeckGLViz):
-
"""deck.gl's HeatmapLayer"""
viz_type = "deck_heatmap"
@@ -2286,7 +2256,6 @@ class DeckHeatmap(BaseDeckGLViz):
class DeckContour(BaseDeckGLViz):
-
"""deck.gl's ContourLayer"""
viz_type = "deck_contour"
@@ -2307,7 +2276,6 @@ class DeckContour(BaseDeckGLViz):
class DeckGeoJson(BaseDeckGLViz):
-
"""deck.gl's GeoJSONLayer"""
viz_type = "deck_geojson"
@@ -2328,7 +2296,6 @@ class DeckGeoJson(BaseDeckGLViz):
class DeckArc(BaseDeckGLViz):
-
"""deck.gl's Arc Layer"""
viz_type = "deck_arc"
@@ -2363,7 +2330,6 @@ class DeckArc(BaseDeckGLViz):
class EventFlowViz(BaseViz):
-
"""A visualization to explore patterns in event sequences"""
viz_type = "event_flow"
@@ -2397,7 +2363,6 @@ class EventFlowViz(BaseViz):
class PairedTTestViz(BaseViz):
-
"""A table displaying paired t-test values"""
viz_type = "paired_ttest"
@@ -2503,7 +2468,6 @@ class RoseViz(NVD3TimeSeriesViz):
class PartitionViz(NVD3TimeSeriesViz):
-
"""
A hierarchical data visualization with support for time series.
"""
diff --git a/tests/common/logger_utils.py b/tests/common/logger_utils.py
index 8cb443cac..e783527c8 100644
--- a/tests/common/logger_utils.py
+++ b/tests/common/logger_utils.py
@@ -169,5 +169,6 @@ def _make_decorator(
def _get_logger(decorated: Decorated) -> Logger:
module = getmodule(decorated)
return module.__dict__.get(
- _LOGGER_VAR_NAME, logging.getLogger(module.__name__) # type: ignore
+ _LOGGER_VAR_NAME,
+ logging.getLogger(module.__name__), # type: ignore
)
diff --git a/tests/conftest.py b/tests/conftest.py
index c659a8524..3a712ec58 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -28,11 +28,8 @@ from __future__ import annotations
from typing import Callable, TYPE_CHECKING
from unittest.mock import MagicMock, Mock, PropertyMock
-from flask import current_app, Flask
-from flask.ctx import AppContext
from pytest import fixture
-from superset.app import create_app
from tests.example_data.data_loading.pandas.pandas_data_loader import PandasDataLoader
from tests.example_data.data_loading.pandas.pands_data_loading_conf import (
PandasLoaderConfigurations,
diff --git a/tests/example_data/data_generator/base_generator.py b/tests/example_data/data_generator/base_generator.py
index 38ab2e541..7571b33b8 100644
--- a/tests/example_data/data_generator/base_generator.py
+++ b/tests/example_data/data_generator/base_generator.py
@@ -21,5 +21,4 @@ from typing import Any
class ExampleDataGenerator(ABC):
@abstractmethod
- def generate(self) -> Iterable[dict[Any, Any]]:
- ...
+ def generate(self) -> Iterable[dict[Any, Any]]: ...
diff --git a/tests/example_data/data_generator/birth_names/birth_names_generator_factory.py b/tests/example_data/data_generator/birth_names/birth_names_generator_factory.py
index 0eba3f1c3..f4157a631 100644
--- a/tests/example_data/data_generator/birth_names/birth_names_generator_factory.py
+++ b/tests/example_data/data_generator/birth_names/birth_names_generator_factory.py
@@ -30,8 +30,7 @@ class BirthNamesGeneratorFactory(ABC):
__factory: BirthNamesGeneratorFactory
@abstractmethod
- def _make(self) -> BirthNamesGenerator:
- ...
+ def _make(self) -> BirthNamesGenerator: ...
@classmethod
def make(cls) -> BirthNamesGenerator:
diff --git a/tests/example_data/data_loading/base_data_loader.py b/tests/example_data/data_loading/base_data_loader.py
index 770150c14..7d9dd57c5 100644
--- a/tests/example_data/data_loading/base_data_loader.py
+++ b/tests/example_data/data_loading/base_data_loader.py
@@ -25,9 +25,7 @@ if TYPE_CHECKING:
class DataLoader(ABC):
@abstractmethod
- def load_table(self, table: Table) -> None:
- ...
+ def load_table(self, table: Table) -> None: ...
@abstractmethod
- def remove_table(self, table_name: str) -> None:
- ...
+ def remove_table(self, table_name: str) -> None: ...
diff --git a/tests/example_data/data_loading/data_definitions/types.py b/tests/example_data/data_loading/data_definitions/types.py
index a1ed10434..70420cb9b 100644
--- a/tests/example_data/data_loading/data_definitions/types.py
+++ b/tests/example_data/data_loading/data_definitions/types.py
@@ -46,8 +46,7 @@ class Table:
class TableMetaDataFactory(ABC):
@abstractmethod
- def make(self) -> TableMetaData:
- ...
+ def make(self) -> TableMetaData: ...
def make_table(self, data: Iterable[dict[Any, Any]]) -> Table:
metadata = self.make()
diff --git a/tests/example_data/data_loading/pandas/pandas_data_loader.py b/tests/example_data/data_loading/pandas/pandas_data_loader.py
index 49dcf3b2d..8dfbd21f6 100644
--- a/tests/example_data/data_loading/pandas/pandas_data_loader.py
+++ b/tests/example_data/data_loading/pandas/pandas_data_loader.py
@@ -79,5 +79,4 @@ class PandasDataLoader(DataLoader):
class TableToDfConvertor(ABC):
@abstractmethod
- def convert(self, table: Table) -> DataFrame:
- ...
+ def convert(self, table: Table) -> DataFrame: ...
diff --git a/tests/integration_tests/access_tests.py b/tests/integration_tests/access_tests.py
index 6ece4a081..f436a82da 100644
--- a/tests/integration_tests/access_tests.py
+++ b/tests/integration_tests/access_tests.py
@@ -16,34 +16,33 @@
# under the License.
# isort:skip_file
"""Unit tests for Superset"""
+
import unittest
from typing import Optional
import pytest
from flask.ctx import AppContext
from pytest_mock import MockFixture
-from sqlalchemy import inspect
+from sqlalchemy import inspect # noqa: F401
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
from tests.integration_tests.fixtures.world_bank_dashboard import (
- load_world_bank_dashboard_with_slices,
- load_world_bank_data,
+ load_world_bank_dashboard_with_slices, # noqa: F401
+ load_world_bank_data, # noqa: F401
)
from tests.integration_tests.fixtures.energy_dashboard import (
- load_energy_table_with_slice,
- load_energy_table_data,
+ load_energy_table_with_slice, # noqa: F401
+ load_energy_table_data, # noqa: F401
)
-from tests.integration_tests.test_app import app # isort:skip
-from superset import db, security_manager
-from superset.connectors.sqla.models import SqlaTable
-from superset.models import core as models
+from superset import security_manager
+from superset.connectors.sqla.models import SqlaTable # noqa: F401
+from superset.models import core as models # noqa: F401
from superset.utils.core import get_user_id, get_username, override_user
-from superset.utils.database import get_example_database
+from superset.utils.database import get_example_database # noqa: F401
-from tests.integration_tests.base_tests import SupersetTestCase
ROLE_TABLES_PERM_DATA = {
"role_name": "override_me",
diff --git a/tests/integration_tests/advanced_data_type/api_tests.py b/tests/integration_tests/advanced_data_type/api_tests.py
index e86506946..5080617f0 100644
--- a/tests/integration_tests/advanced_data_type/api_tests.py
+++ b/tests/integration_tests/advanced_data_type/api_tests.py
@@ -16,12 +16,13 @@
# under the License.
# isort:skip_file
"""Unit tests for Superset"""
+
import json
import prison
-from superset.utils.core import get_example_default_schema
+from superset.utils.core import get_example_default_schema # noqa: F401
-from tests.integration_tests.utils.get_dashboards import get_dashboards_ids
+from tests.integration_tests.utils.get_dashboards import get_dashboards_ids # noqa: F401
from unittest import mock
from sqlalchemy import Column
from typing import Any
@@ -76,7 +77,7 @@ def test_types_type_request(test_client, login_as_admin):
"""
Advanced Data Type API: Test to see if the API call returns all the valid advanced data types
"""
- uri = f"api/v1/advanced_data_type/types"
+ uri = "api/v1/advanced_data_type/types" # noqa: F541
response_value = test_client.get(uri)
data = json.loads(response_value.data.decode("utf-8"))
assert response_value.status_code == 200
diff --git a/tests/integration_tests/annotation_layers/api_tests.py b/tests/integration_tests/annotation_layers/api_tests.py
index 3a4389262..20ca2da0f 100644
--- a/tests/integration_tests/annotation_layers/api_tests.py
+++ b/tests/integration_tests/annotation_layers/api_tests.py
@@ -16,19 +16,20 @@
# under the License.
# isort:skip_file
"""Unit tests for Superset"""
+
import json
import pytest
import prison
from sqlalchemy.sql import func
-import tests.integration_tests.test_app
+import tests.integration_tests.test_app # noqa: F401
from superset import db
from superset.models.annotations import Annotation, AnnotationLayer
from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.annotation_layers.fixtures import (
- create_annotation_layers,
+ create_annotation_layers, # noqa: F401
get_end_dttm,
get_start_dttm,
)
diff --git a/tests/integration_tests/annotation_layers/fixtures.py b/tests/integration_tests/annotation_layers/fixtures.py
index ac25d28d4..a3fb6e184 100644
--- a/tests/integration_tests/annotation_layers/fixtures.py
+++ b/tests/integration_tests/annotation_layers/fixtures.py
@@ -22,7 +22,6 @@ from flask.ctx import AppContext
from superset import db
from superset.models.annotations import Annotation, AnnotationLayer
-from tests.integration_tests.test_app import app
ANNOTATION_LAYERS_COUNT = 10
ANNOTATIONS_COUNT = 5
diff --git a/tests/integration_tests/base_api_tests.py b/tests/integration_tests/base_api_tests.py
index 1afcc8ef6..1a65ce166 100644
--- a/tests/integration_tests/base_api_tests.py
+++ b/tests/integration_tests/base_api_tests.py
@@ -19,19 +19,19 @@ import json
from unittest.mock import patch
from tests.integration_tests.fixtures.world_bank_dashboard import (
- load_world_bank_dashboard_with_slices,
- load_world_bank_data,
+ load_world_bank_dashboard_with_slices, # noqa: F401
+ load_world_bank_data, # noqa: F401
)
import pytest
from flask_appbuilder.models.sqla.interface import SQLAInterface
import prison
-import tests.integration_tests.test_app
+import tests.integration_tests.test_app # noqa: F401
from superset import db, security_manager
from superset.extensions import appbuilder
from superset.models.dashboard import Dashboard
-from superset.views.base_api import BaseSupersetModelRestApi, requires_json
+from superset.views.base_api import BaseSupersetModelRestApi, requires_json # noqa: F401
from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.conftest import with_config
diff --git a/tests/integration_tests/base_tests.py b/tests/integration_tests/base_tests.py
index 84041697f..be08db539 100644
--- a/tests/integration_tests/base_tests.py
+++ b/tests/integration_tests/base_tests.py
@@ -16,6 +16,7 @@
# under the License.
# isort:skip_file
"""Unit tests for Superset"""
+
from datetime import datetime
import imp
import json
@@ -29,7 +30,7 @@ from flask_appbuilder.security.sqla import models as ab_models
from flask_testing import TestCase
from sqlalchemy.engine.interfaces import Dialect
from sqlalchemy.ext.declarative import DeclarativeMeta
-from sqlalchemy.orm import Session
+from sqlalchemy.orm import Session # noqa: F401
from sqlalchemy.sql import func
from sqlalchemy.dialects.mysql import dialect
diff --git a/tests/integration_tests/cache_tests.py b/tests/integration_tests/cache_tests.py
index ace511cde..b1bf2822f 100644
--- a/tests/integration_tests/cache_tests.py
+++ b/tests/integration_tests/cache_tests.py
@@ -15,18 +15,19 @@
# specific language governing permissions and limitations
# under the License.
"""Unit tests for Superset with caching"""
+
import json
import pytest
-from superset import app, db
+from superset import app, db # noqa: F401
from superset.common.db_query_status import QueryStatus
from superset.extensions import cache_manager
from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.constants import ADMIN_USERNAME
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
diff --git a/tests/integration_tests/cachekeys/api_tests.py b/tests/integration_tests/cachekeys/api_tests.py
index db2c51d12..1cc421245 100644
--- a/tests/integration_tests/cachekeys/api_tests.py
+++ b/tests/integration_tests/cachekeys/api_tests.py
@@ -16,6 +16,7 @@
# under the License.
# isort:skip_file
"""Unit tests for Superset"""
+
from typing import Any
import pytest
@@ -28,8 +29,8 @@ from tests.integration_tests.base_tests import (
post_assert_metric,
)
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
@@ -56,7 +57,7 @@ def test_invalidate_existing_cache(invalidate):
rv = invalidate({"datasource_uids": ["3__table"]})
assert rv.status_code == 201
- assert cache_manager.cache.get("cache_key") == None
+ assert cache_manager.cache.get("cache_key") is None # noqa: E711
assert (
not db.session.query(CacheKey).filter(CacheKey.cache_key == "cache_key").first()
)
diff --git a/tests/integration_tests/celery_tests.py b/tests/integration_tests/celery_tests.py
index 384e6674a..48497b977 100644
--- a/tests/integration_tests/celery_tests.py
+++ b/tests/integration_tests/celery_tests.py
@@ -16,6 +16,7 @@
# under the License.
# isort:skip_file
"""Unit tests for Superset Celery worker"""
+
import datetime
import random
import string
@@ -23,14 +24,14 @@ import time
import unittest.mock as mock
from typing import Optional
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
import pytest
-import flask
-from flask import current_app, has_app_context
+import flask # noqa: F401
+from flask import current_app, has_app_context # noqa: F401
from superset import db, sql_lab
from superset.common.db_query_status import QueryStatus
diff --git a/tests/integration_tests/charts/api_tests.py b/tests/integration_tests/charts/api_tests.py
index 16d44fe5c..2bed0b369 100644
--- a/tests/integration_tests/charts/api_tests.py
+++ b/tests/integration_tests/charts/api_tests.py
@@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
"""Unit tests for Superset"""
+
import json
from io import BytesIO
from unittest import mock
@@ -23,7 +24,6 @@ from zipfile import is_zipfile, ZipFile
import prison
import pytest
import yaml
-from flask.ctx import AppContext
from flask_babel import lazy_gettext as _
from parameterized import parameterized
from sqlalchemy import and_
@@ -32,29 +32,29 @@ from sqlalchemy.sql import func
from superset.commands.chart.data.get_data_command import ChartDataCommand
from superset.commands.chart.exceptions import ChartDataQueryFailedError
from superset.connectors.sqla.models import SqlaTable
-from superset.extensions import cache_manager, db, security_manager
+from superset.extensions import cache_manager, db, security_manager # noqa: F401
from superset.models.core import Database, FavStar, FavStarClassName
from superset.models.dashboard import Dashboard
from superset.models.slice import Slice
from superset.reports.models import ReportSchedule, ReportScheduleType
from superset.utils.core import get_example_default_schema
-from superset.utils.database import get_example_database
-from superset.viz import viz_types
+from superset.utils.database import get_example_database # noqa: F401
+from superset.viz import viz_types # noqa: F401
from tests.integration_tests.base_api_tests import ApiOwnersTestCaseMixin
from tests.integration_tests.base_tests import SupersetTestCase
-from tests.integration_tests.conftest import with_feature_flags
+from tests.integration_tests.conftest import with_feature_flags # noqa: F401
from tests.integration_tests.constants import (
ADMIN_USERNAME,
ALPHA_USERNAME,
GAMMA_USERNAME,
)
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
from tests.integration_tests.fixtures.energy_dashboard import (
- load_energy_table_data,
- load_energy_table_with_slice,
+ load_energy_table_data, # noqa: F401
+ load_energy_table_with_slice, # noqa: F401
)
from tests.integration_tests.fixtures.importexport import (
chart_config,
@@ -64,12 +64,12 @@ from tests.integration_tests.fixtures.importexport import (
dataset_metadata_config,
)
from tests.integration_tests.fixtures.unicode_dashboard import (
- load_unicode_dashboard_with_slice,
- load_unicode_data,
+ load_unicode_dashboard_with_slice, # noqa: F401
+ load_unicode_data, # noqa: F401
)
from tests.integration_tests.fixtures.world_bank_dashboard import (
- load_world_bank_dashboard_with_slices,
- load_world_bank_data,
+ load_world_bank_dashboard_with_slices, # noqa: F401
+ load_world_bank_data, # noqa: F401
)
from tests.integration_tests.insert_chart_mixin import InsertChartMixin
from tests.integration_tests.test_app import app
@@ -83,115 +83,121 @@ class TestChartApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCase):
resource_name = "chart"
@pytest.fixture(autouse=True)
- def clear_data_cache(self, app_context: AppContext):
- cache_manager.data_cache.clear()
- yield
+ def clear_data_cache(self):
+ with app.app_context():
+ cache_manager.data_cache.clear()
+ yield
@pytest.fixture()
def create_charts(self):
- charts = []
- admin = self.get_user("admin")
- for cx in range(CHARTS_FIXTURE_COUNT - 1):
- charts.append(self.insert_chart(f"name{cx}", [admin.id], 1))
- fav_charts = []
- for cx in range(round(CHARTS_FIXTURE_COUNT / 2)):
- fav_star = FavStar(
- user_id=admin.id, class_name="slice", obj_id=charts[cx].id
- )
- db.session.add(fav_star)
- db.session.commit()
- fav_charts.append(fav_star)
- yield charts
+ with self.create_app().app_context():
+ charts = []
+ admin = self.get_user("admin")
+ for cx in range(CHARTS_FIXTURE_COUNT - 1):
+ charts.append(self.insert_chart(f"name{cx}", [admin.id], 1))
+ fav_charts = []
+ for cx in range(round(CHARTS_FIXTURE_COUNT / 2)):
+ fav_star = FavStar(
+ user_id=admin.id, class_name="slice", obj_id=charts[cx].id
+ )
+ db.session.add(fav_star)
+ db.session.commit()
+ fav_charts.append(fav_star)
+ yield charts
- # rollback changes
- for chart in charts:
- db.session.delete(chart)
- for fav_chart in fav_charts:
- db.session.delete(fav_chart)
- db.session.commit()
+ # rollback changes
+ for chart in charts:
+ db.session.delete(chart)
+ for fav_chart in fav_charts:
+ db.session.delete(fav_chart)
+ db.session.commit()
@pytest.fixture()
def create_charts_created_by_gamma(self):
- charts = []
- user = self.get_user("gamma")
- for cx in range(CHARTS_FIXTURE_COUNT - 1):
- charts.append(self.insert_chart(f"gamma{cx}", [user.id], 1))
- yield charts
- # rollback changes
- for chart in charts:
- db.session.delete(chart)
- db.session.commit()
+ with self.create_app().app_context():
+ charts = []
+ user = self.get_user("gamma")
+ for cx in range(CHARTS_FIXTURE_COUNT - 1):
+ charts.append(self.insert_chart(f"gamma{cx}", [user.id], 1))
+ yield charts
+ # rollback changes
+ for chart in charts:
+ db.session.delete(chart)
+ db.session.commit()
@pytest.fixture()
def create_certified_charts(self):
- certified_charts = []
- admin = self.get_user("admin")
- for cx in range(CHARTS_FIXTURE_COUNT):
- certified_charts.append(
- self.insert_chart(
- f"certified{cx}",
- [admin.id],
- 1,
- certified_by="John Doe",
- certification_details="Sample certification",
+ with self.create_app().app_context():
+ certified_charts = []
+ admin = self.get_user("admin")
+ for cx in range(CHARTS_FIXTURE_COUNT):
+ certified_charts.append(
+ self.insert_chart(
+ f"certified{cx}",
+ [admin.id],
+ 1,
+ certified_by="John Doe",
+ certification_details="Sample certification",
+ )
)
- )
- yield certified_charts
+ yield certified_charts
- # rollback changes
- for chart in certified_charts:
- db.session.delete(chart)
- db.session.commit()
+ # rollback changes
+ for chart in certified_charts:
+ db.session.delete(chart)
+ db.session.commit()
@pytest.fixture()
def create_chart_with_report(self):
- admin = self.get_user("admin")
- chart = self.insert_chart(f"chart_report", [admin.id], 1)
- report_schedule = ReportSchedule(
- type=ReportScheduleType.REPORT,
- name="report_with_chart",
- crontab="* * * * *",
- chart=chart,
- )
- db.session.commit()
+ with self.create_app().app_context():
+ admin = self.get_user("admin")
+ chart = self.insert_chart("chart_report", [admin.id], 1) # noqa: F541
+ report_schedule = ReportSchedule(
+ type=ReportScheduleType.REPORT,
+ name="report_with_chart",
+ crontab="* * * * *",
+ chart=chart,
+ )
+ db.session.commit()
- yield chart
+ yield chart
- # rollback changes
- db.session.delete(report_schedule)
- db.session.delete(chart)
- db.session.commit()
+ # rollback changes
+ db.session.delete(report_schedule)
+ db.session.delete(chart)
+ db.session.commit()
@pytest.fixture()
def add_dashboard_to_chart(self):
- admin = self.get_user("admin")
+ with self.create_app().app_context():
+ admin = self.get_user("admin")
- self.chart = self.insert_chart("My chart", [admin.id], 1)
+ self.chart = self.insert_chart("My chart", [admin.id], 1)
- self.original_dashboard = Dashboard()
- self.original_dashboard.dashboard_title = "Original Dashboard"
- self.original_dashboard.slug = "slug"
- self.original_dashboard.owners = [admin]
- self.original_dashboard.slices = [self.chart]
- self.original_dashboard.published = False
- db.session.add(self.original_dashboard)
+ self.original_dashboard = Dashboard()
+ self.original_dashboard.dashboard_title = "Original Dashboard"
+ self.original_dashboard.slug = "slug"
+ self.original_dashboard.owners = [admin]
+ self.original_dashboard.slices = [self.chart]
+ self.original_dashboard.published = False
+ db.session.add(self.original_dashboard)
- self.new_dashboard = Dashboard()
- self.new_dashboard.dashboard_title = "New Dashboard"
- self.new_dashboard.slug = "new_slug"
- self.new_dashboard.owners = [admin]
- self.new_dashboard.published = False
- db.session.add(self.new_dashboard)
+ self.new_dashboard = Dashboard()
+ self.new_dashboard.dashboard_title = "New Dashboard"
+ self.new_dashboard.slug = "new_slug"
+ self.new_dashboard.owners = [admin]
+ self.new_dashboard.published = False
+ db.session.add(self.new_dashboard)
- db.session.commit()
+ db.session.commit()
- yield self.chart
+ yield self.chart
- db.session.delete(self.original_dashboard)
- db.session.delete(self.new_dashboard)
- db.session.delete(self.chart)
- db.session.commit()
+ db.session.delete(self.original_dashboard)
+ db.session.delete(self.new_dashboard)
+ db.session.delete(self.chart)
+ db.session.commit()
def test_info_security_chart(self):
"""
@@ -756,7 +762,7 @@ class TestChartApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCase):
Chart API: Test update chart admin can clear owner list
"""
chart_data = {"slice_name": "title1_changed", "owners": []}
- admin = self.get_user("admin")
+ self.get_user("admin") # noqa: F841
self.login(username="admin")
uri = f"api/v1/chart/{self.chart.id}"
rv = self.put_assert_metric(uri, chart_data, "put")
@@ -935,7 +941,7 @@ class TestChartApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCase):
"owners": [1000],
}
self.login(ADMIN_USERNAME)
- uri = f"api/v1/chart/"
+ uri = "api/v1/chart/" # noqa: F541
rv = self.client.post(uri, json=chart_data)
self.assertEqual(rv.status_code, 422)
response = json.loads(rv.data.decode("utf-8"))
@@ -1026,7 +1032,7 @@ class TestChartApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCase):
Chart API: Test get charts
"""
self.login(ADMIN_USERNAME)
- uri = f"api/v1/chart/"
+ uri = "api/v1/chart/" # noqa: F541
rv = self.get_assert_metric(uri, "get_list")
self.assertEqual(rv.status_code, 200)
data = json.loads(rv.data.decode("utf-8"))
@@ -1122,39 +1128,40 @@ class TestChartApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCase):
@pytest.fixture()
def load_energy_charts(self):
- admin = self.get_user("admin")
- energy_table = (
- db.session.query(SqlaTable)
- .filter_by(table_name="energy_usage")
- .one_or_none()
- )
- energy_table_id = 1
- if energy_table:
- energy_table_id = energy_table.id
- chart1 = self.insert_chart(
- "foo_a", [admin.id], energy_table_id, description="ZY_bar"
- )
- chart2 = self.insert_chart(
- "zy_foo", [admin.id], energy_table_id, description="desc1"
- )
- chart3 = self.insert_chart(
- "foo_b", [admin.id], energy_table_id, description="desc1zy_"
- )
- chart4 = self.insert_chart(
- "foo_c", [admin.id], energy_table_id, viz_type="viz_zy_"
- )
- chart5 = self.insert_chart(
- "bar", [admin.id], energy_table_id, description="foo"
- )
+ with app.app_context():
+ admin = self.get_user("admin")
+ energy_table = (
+ db.session.query(SqlaTable)
+ .filter_by(table_name="energy_usage")
+ .one_or_none()
+ )
+ energy_table_id = 1
+ if energy_table:
+ energy_table_id = energy_table.id
+ chart1 = self.insert_chart(
+ "foo_a", [admin.id], energy_table_id, description="ZY_bar"
+ )
+ chart2 = self.insert_chart(
+ "zy_foo", [admin.id], energy_table_id, description="desc1"
+ )
+ chart3 = self.insert_chart(
+ "foo_b", [admin.id], energy_table_id, description="desc1zy_"
+ )
+ chart4 = self.insert_chart(
+ "foo_c", [admin.id], energy_table_id, viz_type="viz_zy_"
+ )
+ chart5 = self.insert_chart(
+ "bar", [admin.id], energy_table_id, description="foo"
+ )
- yield
- # rollback changes
- db.session.delete(chart1)
- db.session.delete(chart2)
- db.session.delete(chart3)
- db.session.delete(chart4)
- db.session.delete(chart5)
- db.session.commit()
+ yield
+ # rollback changes
+ db.session.delete(chart1)
+ db.session.delete(chart2)
+ db.session.delete(chart3)
+ db.session.delete(chart4)
+ db.session.delete(chart5)
+ db.session.commit()
@pytest.mark.usefixtures("load_energy_charts")
def test_get_charts_custom_filter(self):
@@ -1913,7 +1920,7 @@ class TestChartApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCase):
assert json.loads(
self.client.put(
- f"/api/v1/chart/warm_up_cache",
+ "/api/v1/chart/warm_up_cache", # noqa: F541
json={"chart_id": slc.id},
).data
) == {
@@ -1940,7 +1947,7 @@ class TestChartApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCase):
assert json.loads(
self.client.put(
- f"/api/v1/chart/warm_up_cache",
+ "/api/v1/chart/warm_up_cache", # noqa: F541
json={"chart_id": slc.id},
).data
) == {
diff --git a/tests/integration_tests/charts/commands_tests.py b/tests/integration_tests/charts/commands_tests.py
index 5d3a4986b..3601056fb 100644
--- a/tests/integration_tests/charts/commands_tests.py
+++ b/tests/integration_tests/charts/commands_tests.py
@@ -19,7 +19,7 @@ from unittest.mock import patch
import pytest
import yaml
-from flask import g
+from flask import g # noqa: F401
from superset import db, security_manager
from superset.commands.chart.create import CreateChartCommand
@@ -38,12 +38,12 @@ from superset.models.core import Database
from superset.models.slice import Slice
from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
from tests.integration_tests.fixtures.energy_dashboard import (
- load_energy_table_data,
- load_energy_table_with_slice,
+ load_energy_table_data, # noqa: F401
+ load_energy_table_with_slice, # noqa: F401
)
from tests.integration_tests.fixtures.importexport import (
chart_config,
diff --git a/tests/integration_tests/charts/data/api_tests.py b/tests/integration_tests/charts/data/api_tests.py
index 1dd5e7113..8122eac9d 100644
--- a/tests/integration_tests/charts/data/api_tests.py
+++ b/tests/integration_tests/charts/data/api_tests.py
@@ -16,6 +16,7 @@
# under the License.
# isort:skip_file
"""Unit tests for Superset"""
+
import json
import unittest
import copy
@@ -32,20 +33,20 @@ from tests.integration_tests.conftest import with_feature_flags
from superset.charts.data.api import ChartDataRestApi
from superset.models.sql_lab import Query
from tests.integration_tests.base_tests import SupersetTestCase, test_client
-from tests.integration_tests.annotation_layers.fixtures import create_annotation_layers
+from tests.integration_tests.annotation_layers.fixtures import create_annotation_layers # noqa: F401
from tests.integration_tests.constants import (
ADMIN_USERNAME,
GAMMA_NO_CSV_USERNAME,
GAMMA_USERNAME,
)
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
from tests.integration_tests.test_app import app
from tests.integration_tests.fixtures.energy_dashboard import (
- load_energy_table_with_slice,
- load_energy_table_data,
+ load_energy_table_with_slice, # noqa: F401
+ load_energy_table_data, # noqa: F401
)
import pytest
from superset.models.slice import Slice
@@ -69,7 +70,7 @@ from superset.common.chart_data import ChartDataResultFormat, ChartDataResultTyp
from tests.common.query_context_generator import ANNOTATION_LAYERS
from tests.integration_tests.fixtures.query_context import get_query_context
-from tests.integration_tests.test_app import app
+from tests.integration_tests.test_app import app # noqa: F811
CHART_DATA_URI = "api/v1/chart/data"
@@ -131,7 +132,9 @@ class BaseTestChartDataApi(SupersetTestCase):
def quote_name(self, name: str):
if get_main_database().backend in {"presto", "hive"}:
- with get_example_database().get_inspector_with_context() as inspector: # E: Ne
+ with (
+ get_example_database().get_inspector_with_context() as inspector
+ ): # E: Ne
return inspector.engine.dialect.identifier_preparer.quote_identifier(
name
)
@@ -636,9 +639,9 @@ class TestPostChartDataApi(BaseTestChartDataApi):
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_with_invalid_where_parameter_closing_unclosed__400(self):
self.query_context_payload["queries"][0]["filters"] = []
- self.query_context_payload["queries"][0]["extras"][
- "where"
- ] = "state = 'CA') OR (state = 'NY'"
+ self.query_context_payload["queries"][0]["extras"]["where"] = (
+ "state = 'CA') OR (state = 'NY'"
+ )
rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data")
@@ -672,9 +675,9 @@ class TestPostChartDataApi(BaseTestChartDataApi):
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_with_invalid_having_parameter_closing_and_comment__400(self):
self.query_context_payload["queries"][0]["filters"] = []
- self.query_context_payload["queries"][0]["extras"][
- "having"
- ] = "COUNT(1) = 0) UNION ALL SELECT 'abc', 1--comment"
+ self.query_context_payload["queries"][0]["extras"]["having"] = (
+ "COUNT(1) = 0) UNION ALL SELECT 'abc', 1--comment"
+ )
rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data")
@@ -709,9 +712,9 @@ class TestPostChartDataApi(BaseTestChartDataApi):
self.query_context_payload["queries"][0]["filters"] = [
{"col": "gender", "op": "==", "val": "boy"}
]
- self.query_context_payload["queries"][0]["extras"][
- "where"
- ] = "('boy' = '{{ filter_values('gender', 'xyz' )[0] }}')"
+ self.query_context_payload["queries"][0]["extras"]["where"] = (
+ "('boy' = '{{ filter_values('gender', 'xyz' )[0] }}')"
+ )
rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data")
result = rv.json["result"][0]["query"]
if get_example_database().backend != "presto":
@@ -858,9 +861,9 @@ class TestPostChartDataApi(BaseTestChartDataApi):
"""
annotation_layers = []
- self.query_context_payload["queries"][0][
- "annotation_layers"
- ] = annotation_layers
+ self.query_context_payload["queries"][0]["annotation_layers"] = (
+ annotation_layers
+ )
# formula
annotation_layers.append(ANNOTATION_LAYERS[AnnotationType.FORMULA])
@@ -1171,7 +1174,7 @@ class TestGetChartDataApi(BaseTestChartDataApi):
orig_run = ChartDataCommand.run
def mock_run(self, **kwargs):
- assert kwargs["force_cached"] == True
+ assert kwargs["force_cached"] is True # noqa: E712
# override force_cached to get result from DB
return orig_run(self, force_cached=False)
@@ -1217,7 +1220,7 @@ class TestGetChartDataApi(BaseTestChartDataApi):
orig_run = ChartDataCommand.run
def mock_run(self, **kwargs):
- assert kwargs["force_cached"] == True
+ assert kwargs["force_cached"] is True # noqa: E712
# override force_cached to get result from DB
return orig_run(self, force_cached=False)
@@ -1382,7 +1385,7 @@ def test_data_cache_default_timeout(
def test_chart_cache_timeout(
- load_energy_table_with_slice: list[Slice],
+ load_energy_table_with_slice: list[Slice], # noqa: F811
test_client,
login_as_admin,
physical_query_context,
diff --git a/tests/integration_tests/charts/schema_tests.py b/tests/integration_tests/charts/schema_tests.py
index 3f0b4e4b5..8f74d9de2 100644
--- a/tests/integration_tests/charts/schema_tests.py
+++ b/tests/integration_tests/charts/schema_tests.py
@@ -16,6 +16,7 @@
# under the License.
# isort:skip_file
"""Unit tests for Superset"""
+
from unittest import mock
import pytest
@@ -25,8 +26,8 @@ from tests.integration_tests.test_app import app
from superset.charts.schemas import ChartDataQueryContextSchema
from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
from tests.integration_tests.fixtures.query_context import get_query_context
diff --git a/tests/integration_tests/cli_tests.py b/tests/integration_tests/cli_tests.py
index 1b2c5f8b4..048612a08 100644
--- a/tests/integration_tests/cli_tests.py
+++ b/tests/integration_tests/cli_tests.py
@@ -22,7 +22,7 @@ from unittest import mock
from zipfile import is_zipfile, ZipFile
import pytest
-import yaml
+import yaml # noqa: F401
from freezegun import freeze_time
import superset.cli.importexport
@@ -30,8 +30,8 @@ import superset.cli.thumbnails
from superset import app, db
from superset.models.dashboard import Dashboard
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
logger = logging.getLogger(__name__)
diff --git a/tests/integration_tests/conftest.py b/tests/integration_tests/conftest.py
index 77ddbe1d8..185f7d410 100644
--- a/tests/integration_tests/conftest.py
+++ b/tests/integration_tests/conftest.py
@@ -146,7 +146,7 @@ def setup_sample_data() -> Any:
def drop_from_schema(engine: Engine, schema_name: str):
- schemas = engine.execute(f"SHOW SCHEMAS").fetchall()
+ schemas = engine.execute(f"SHOW SCHEMAS").fetchall() # noqa: F541
if schema_name not in [s[0] for s in schemas]:
# schema doesn't exist
return
diff --git a/tests/integration_tests/core_tests.py b/tests/integration_tests/core_tests.py
index 99994a26b..bcb9aa329 100644
--- a/tests/integration_tests/core_tests.py
+++ b/tests/integration_tests/core_tests.py
@@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
"""Unit tests for Superset"""
+
import datetime
import doctest
import html
@@ -56,16 +57,16 @@ from superset.views.database.views import DatabaseView
from tests.integration_tests.conftest import with_feature_flags
from tests.integration_tests.constants import ADMIN_USERNAME, GAMMA_USERNAME
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
from tests.integration_tests.fixtures.energy_dashboard import (
- load_energy_table_data,
- load_energy_table_with_slice,
+ load_energy_table_data, # noqa: F401
+ load_energy_table_with_slice, # noqa: F401
)
from tests.integration_tests.fixtures.world_bank_dashboard import (
- load_world_bank_dashboard_with_slices,
- load_world_bank_data,
+ load_world_bank_dashboard_with_slices, # noqa: F401
+ load_world_bank_data, # noqa: F401
)
from tests.integration_tests.test_app import app
@@ -97,8 +98,8 @@ class TestCore(SupersetTestCase):
def insert_dashboard_created_by(self, username: str) -> Dashboard:
user = self.get_user(username)
dashboard = self.insert_dashboard(
- f"create_title_test",
- f"create_slug_test",
+ f"create_title_test", # noqa: F541
+ f"create_slug_test", # noqa: F541
[user.id],
created_by=user,
)
@@ -176,7 +177,7 @@ class TestCore(SupersetTestCase):
@pytest.mark.usefixtures("load_energy_table_with_slice")
def test_save_slice(self):
self.login(ADMIN_USERNAME)
- slice_name = f"Energy Sankey"
+ slice_name = f"Energy Sankey" # noqa: F541
slice_id = self.get_slice(slice_name).id
copy_name_prefix = "Test Sankey"
copy_name = f"{copy_name_prefix}[save]{random.random()}"
@@ -478,7 +479,7 @@ class TestCore(SupersetTestCase):
def create_sample_csvfile(self, filename: str, content: list[str]) -> None:
with open(filename, "w+") as test_file:
- for l in content:
+ for l in content: # noqa: E741
test_file.write(f"{l}\n")
def create_sample_excelfile(self, filename: str, content: dict[str, str]) -> None:
@@ -1185,7 +1186,7 @@ class TestCore(SupersetTestCase):
self.login(ADMIN_USERNAME)
random_key = "random_key"
mock_command.return_value = random_key
- slice_name = f"Energy Sankey"
+ slice_name = f"Energy Sankey" # noqa: F541
slice_id = self.get_slice(slice_name).id
form_data = {"slice_id": slice_id, "viz_type": "line", "datasource": "1__table"}
rv = self.client.get(
diff --git a/tests/integration_tests/css_templates/api_tests.py b/tests/integration_tests/css_templates/api_tests.py
index b5ece91ef..dfb6f6e90 100644
--- a/tests/integration_tests/css_templates/api_tests.py
+++ b/tests/integration_tests/css_templates/api_tests.py
@@ -16,6 +16,7 @@
# under the License.
# isort:skip_file
"""Unit tests for Superset"""
+
import json
import pytest
import prison
@@ -23,10 +24,10 @@ from datetime import datetime
from freezegun import freeze_time
from sqlalchemy.sql import func
-import tests.integration_tests.test_app
+import tests.integration_tests.test_app # noqa: F401
from superset import db
from superset.models.core import CssTemplate
-from superset.utils.database import get_example_database
+from superset.utils.database import get_example_database # noqa: F401
from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.constants import ADMIN_USERNAME
@@ -74,7 +75,7 @@ class TestCssTemplateApi(SupersetTestCase):
css_templates = db.session.query(CssTemplate).all()
self.login(ADMIN_USERNAME)
- uri = f"api/v1/css_template/"
+ uri = "api/v1/css_template/" # noqa: F541
rv = self.get_assert_metric(uri, "get_list")
assert rv.status_code == 200
data = json.loads(rv.data.decode("utf-8"))
@@ -168,7 +169,7 @@ class TestCssTemplateApi(SupersetTestCase):
CssTemplate API: Test info
"""
self.login(ADMIN_USERNAME)
- uri = f"api/v1/css_template/_info"
+ uri = "api/v1/css_template/_info" # noqa: F541
rv = self.get_assert_metric(uri, "info")
assert rv.status_code == 200
@@ -243,7 +244,7 @@ class TestCssTemplateApi(SupersetTestCase):
}
self.login(ADMIN_USERNAME)
- uri = f"api/v1/css_template/"
+ uri = "api/v1/css_template/" # noqa: F541
rv = self.post_assert_metric(uri, post_data, "post")
data = json.loads(rv.data.decode("utf-8"))
assert rv.status_code == 201
diff --git a/tests/integration_tests/csv_upload_tests.py b/tests/integration_tests/csv_upload_tests.py
index 299340b0c..67c96c4fa 100644
--- a/tests/integration_tests/csv_upload_tests.py
+++ b/tests/integration_tests/csv_upload_tests.py
@@ -16,6 +16,7 @@
# under the License.
# isort:skip_file
"""Unit tests for Superset CSV upload"""
+
import json
import logging
import os
@@ -29,7 +30,7 @@ import pytest
import superset.utils.database
from superset.sql_parse import Table
-from tests.integration_tests.conftest import ADMIN_SCHEMA_NAME
+from tests.integration_tests.conftest import ADMIN_SCHEMA_NAME # noqa: F401
from superset import db
from superset import security_manager
from superset.models.core import Database
diff --git a/tests/integration_tests/dashboard_tests.py b/tests/integration_tests/dashboard_tests.py
index 57c73f83d..1852adba4 100644
--- a/tests/integration_tests/dashboard_tests.py
+++ b/tests/integration_tests/dashboard_tests.py
@@ -16,6 +16,7 @@
# under the License.
# isort:skip_file
"""Unit tests for Superset"""
+
import re
import unittest
from random import random
@@ -24,7 +25,6 @@ import pytest
from flask import Response, escape, url_for
from sqlalchemy import func
-from tests.integration_tests.test_app import app
from superset import db, security_manager
from superset.connectors.sqla.models import SqlaTable
from superset.models.dashboard import Dashboard
@@ -35,21 +35,21 @@ from tests.integration_tests.constants import (
GAMMA_USERNAME,
)
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
from tests.integration_tests.fixtures.energy_dashboard import (
- load_energy_table_with_slice,
- load_energy_table_data,
+ load_energy_table_with_slice, # noqa: F401
+ load_energy_table_data, # noqa: F401
)
-from tests.integration_tests.fixtures.public_role import public_role_like_gamma
+from tests.integration_tests.fixtures.public_role import public_role_like_gamma # noqa: F401
from tests.integration_tests.fixtures.unicode_dashboard import (
- load_unicode_dashboard_with_position,
- load_unicode_data,
+ load_unicode_dashboard_with_position, # noqa: F401
+ load_unicode_data, # noqa: F401
)
from tests.integration_tests.fixtures.world_bank_dashboard import (
- load_world_bank_dashboard_with_slices,
- load_world_bank_data,
+ load_world_bank_dashboard_with_slices, # noqa: F401
+ load_world_bank_data, # noqa: F401
)
from .base_tests import SupersetTestCase
diff --git a/tests/integration_tests/dashboard_utils.py b/tests/integration_tests/dashboard_utils.py
index 41dd8dc97..98498cac0 100644
--- a/tests/integration_tests/dashboard_utils.py
+++ b/tests/integration_tests/dashboard_utils.py
@@ -19,7 +19,7 @@
import json
from typing import Optional
-from pandas import DataFrame
+from pandas import DataFrame # noqa: F401
from superset import db
from superset.connectors.sqla.models import SqlaTable
diff --git a/tests/integration_tests/dashboards/api_tests.py b/tests/integration_tests/dashboards/api_tests.py
index fd63666c2..949b65511 100644
--- a/tests/integration_tests/dashboards/api_tests.py
+++ b/tests/integration_tests/dashboards/api_tests.py
@@ -16,6 +16,7 @@
# under the License.
# isort:skip_file
"""Unit tests for Superset"""
+
import json
from io import BytesIO
from time import sleep
@@ -30,7 +31,7 @@ import yaml
from freezegun import freeze_time
from sqlalchemy import and_
-from superset import app, db, security_manager
+from superset import app, db, security_manager # noqa: F401
from superset.models.dashboard import Dashboard
from superset.models.core import FavStar, FavStarClassName
from superset.reports.models import ReportSchedule, ReportScheduleType
@@ -39,7 +40,7 @@ from superset.utils.core import backend, override_user
from tests.integration_tests.base_api_tests import ApiOwnersTestCaseMixin
from tests.integration_tests.base_tests import SupersetTestCase
-from tests.integration_tests.conftest import with_feature_flags
+from tests.integration_tests.conftest import with_feature_flags # noqa: F401
from tests.integration_tests.constants import (
ADMIN_USERNAME,
ALPHA_USERNAME,
@@ -56,12 +57,12 @@ from tests.integration_tests.fixtures.importexport import (
)
from tests.integration_tests.utils.get_dashboards import get_dashboards_ids
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
from tests.integration_tests.fixtures.world_bank_dashboard import (
- load_world_bank_dashboard_with_slices,
- load_world_bank_data,
+ load_world_bank_dashboard_with_slices, # noqa: F401
+ load_world_bank_data, # noqa: F401
)
DASHBOARDS_FIXTURE_COUNT = 10
@@ -148,7 +149,9 @@ class TestDashboardApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCas
with self.create_app().app_context():
admin = self.get_user("admin")
dashboard = self.insert_dashboard(
- f"dashboard_report", "dashboard_report", [admin.id]
+ "dashboard_report",
+ "dashboard_report",
+ [admin.id], # noqa: F541
)
report_schedule = ReportSchedule(
type=ReportScheduleType.REPORT,
@@ -186,8 +189,6 @@ class TestDashboardApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCas
def test_get_dashboard_datasets_as_guest(self, is_guest_user, has_guest_access):
self.login(ADMIN_USERNAME)
uri = "api/v1/dashboard/world_health/datasets"
- is_guest_user = True
- has_guest_access = True
response = self.get_assert_metric(uri, "get_datasets")
self.assertEqual(response.status_code, 200)
data = json.loads(response.data.decode("utf-8"))
@@ -1708,7 +1709,7 @@ class TestDashboardApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCas
rv = self.get_assert_metric(uri, "export")
- headers = f"attachment; filename=dashboard_export_20220101T000000.zip"
+ headers = "attachment; filename=dashboard_export_20220101T000000.zip" # noqa: F541
assert rv.status_code == 200
assert rv.headers["Content-Disposition"] == headers
@@ -2017,7 +2018,7 @@ class TestDashboardApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCas
API: Test get filter related roles
"""
self.login(ADMIN_USERNAME)
- uri = f"api/v1/dashboard/related/roles"
+ uri = "api/v1/dashboard/related/roles" # noqa: F541
rv = self.client.get(uri)
assert rv.status_code == 200
@@ -2059,7 +2060,7 @@ class TestDashboardApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCas
"superset.views.filters.current_app.config",
{"EXTRA_RELATED_QUERY_FILTERS": {"role": _base_filter}},
):
- uri = f"api/v1/dashboard/related/roles"
+ uri = "api/v1/dashboard/related/roles" # noqa: F541
rv = self.client.get(uri)
assert rv.status_code == 200
response = json.loads(rv.data.decode("utf-8"))
@@ -2147,7 +2148,7 @@ class TestDashboardApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCas
self.assertEqual(data["count"], len(expected_models))
def test_gets_not_created_by_user_dashboards_filter(self):
- dashboard = self.insert_dashboard(f"title", f"slug", [])
+ dashboard = self.insert_dashboard("title", "slug", []) # noqa: F541
expected_models = (
db.session.query(Dashboard).filter(Dashboard.created_by_fk.is_(None)).all()
)
diff --git a/tests/integration_tests/dashboards/base_case.py b/tests/integration_tests/dashboards/base_case.py
index 3600ec877..8b9b8e95e 100644
--- a/tests/integration_tests/dashboards/base_case.py
+++ b/tests/integration_tests/dashboards/base_case.py
@@ -20,9 +20,9 @@ from flask import Response
from superset import app, security_manager
from tests.integration_tests.base_tests import SupersetTestCase
-from tests.integration_tests.dashboards.consts import *
+from tests.integration_tests.dashboards.consts import * # noqa: F403
from tests.integration_tests.dashboards.dashboard_test_utils import (
- build_save_dash_parts,
+ build_save_dash_parts, # noqa: F401
)
from tests.integration_tests.dashboards.superset_factory_util import (
delete_all_inserted_objects,
@@ -31,31 +31,31 @@ from tests.integration_tests.dashboards.superset_factory_util import (
class DashboardTestCase(SupersetTestCase):
def get_dashboard_via_api_by_id(self, dashboard_id: int) -> Response:
- uri = DASHBOARD_API_URL_FORMAT.format(dashboard_id)
+ uri = DASHBOARD_API_URL_FORMAT.format(dashboard_id) # noqa: F405
return self.get_assert_metric(uri, "get")
def get_dashboard_view_response(self, dashboard_to_access) -> Response:
return self.client.get(dashboard_to_access.url)
def get_dashboard_api_response(self, dashboard_to_access) -> Response:
- return self.client.get(DASHBOARD_API_URL_FORMAT.format(dashboard_to_access.id))
+ return self.client.get(DASHBOARD_API_URL_FORMAT.format(dashboard_to_access.id)) # noqa: F405
def get_dashboards_list_response(self) -> Response:
- return self.client.get(GET_DASHBOARDS_LIST_VIEW)
+ return self.client.get(GET_DASHBOARDS_LIST_VIEW) # noqa: F405
def get_dashboards_api_response(self) -> Response:
- return self.client.get(DASHBOARDS_API_URL)
+ return self.client.get(DASHBOARDS_API_URL) # noqa: F405
def delete_dashboard_via_view(self, dashboard_id: int) -> Response:
- delete_dashboard_url = DELETE_DASHBOARD_VIEW_URL_FORMAT.format(dashboard_id)
+ delete_dashboard_url = DELETE_DASHBOARD_VIEW_URL_FORMAT.format(dashboard_id) # noqa: F405
return self.get_resp(delete_dashboard_url, {})
def delete_dashboard_via_api(self, dashboard_id):
- uri = DASHBOARD_API_URL_FORMAT.format(dashboard_id)
+ uri = DASHBOARD_API_URL_FORMAT.format(dashboard_id) # noqa: F405
return self.delete_assert_metric(uri, "delete")
def bulk_delete_dashboard_via_api(self, dashboard_ids):
- uri = DASHBOARDS_API_URL_WITH_QUERY_FORMAT.format(prison.dumps(dashboard_ids))
+ uri = DASHBOARDS_API_URL_WITH_QUERY_FORMAT.format(prison.dumps(dashboard_ids)) # noqa: F405
return self.delete_assert_metric(uri, "bulk_delete")
def delete_dashboard(self, dashboard_id: int) -> Response:
diff --git a/tests/integration_tests/dashboards/commands_tests.py b/tests/integration_tests/dashboards/commands_tests.py
index 94473a2d4..4c199372f 100644
--- a/tests/integration_tests/dashboards/commands_tests.py
+++ b/tests/integration_tests/dashboards/commands_tests.py
@@ -16,7 +16,7 @@
# under the License.
import itertools
import json
-from unittest.mock import MagicMock, patch
+from unittest.mock import MagicMock, patch # noqa: F401
import pytest
import yaml
@@ -47,8 +47,8 @@ from tests.integration_tests.fixtures.importexport import (
dataset_metadata_config,
)
from tests.integration_tests.fixtures.world_bank_dashboard import (
- load_world_bank_dashboard_with_slices,
- load_world_bank_data,
+ load_world_bank_dashboard_with_slices, # noqa: F401
+ load_world_bank_data, # noqa: F401
)
diff --git a/tests/integration_tests/dashboards/dao_tests.py b/tests/integration_tests/dashboards/dao_tests.py
index 9638ee104..e030a49e2 100644
--- a/tests/integration_tests/dashboards/dao_tests.py
+++ b/tests/integration_tests/dashboards/dao_tests.py
@@ -21,14 +21,14 @@ import time
from unittest.mock import patch
import pytest
-import tests.integration_tests.test_app # pylint: disable=unused-import
+import tests.integration_tests.test_app # pylint: disable=unused-import # noqa: F401
from superset import db, security_manager
from superset.daos.dashboard import DashboardDAO
from superset.models.dashboard import Dashboard
from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.fixtures.world_bank_dashboard import (
- load_world_bank_dashboard_with_slices,
- load_world_bank_data,
+ load_world_bank_dashboard_with_slices, # noqa: F401
+ load_world_bank_data, # noqa: F401
)
diff --git a/tests/integration_tests/dashboards/filter_state/api_tests.py b/tests/integration_tests/dashboards/filter_state/api_tests.py
index 4dd02bfb6..4aa3ae01c 100644
--- a/tests/integration_tests/dashboards/filter_state/api_tests.py
+++ b/tests/integration_tests/dashboards/filter_state/api_tests.py
@@ -15,24 +15,24 @@
# specific language governing permissions and limitations
# under the License.
import json
-from unittest.mock import patch
+from unittest.mock import patch # noqa: F401
import pytest
from flask.ctx import AppContext
from flask_appbuilder.security.sqla.models import User
-from sqlalchemy.orm import Session
+from sqlalchemy.orm import Session # noqa: F401
from superset import db
-from superset.commands.dashboard.exceptions import DashboardAccessDeniedError
+from superset.commands.dashboard.exceptions import DashboardAccessDeniedError # noqa: F401
from superset.commands.temporary_cache.entry import Entry
from superset.extensions import cache_manager
from superset.models.dashboard import Dashboard
from superset.temporary_cache.utils import cache_key
from tests.integration_tests.fixtures.world_bank_dashboard import (
- load_world_bank_dashboard_with_slices,
- load_world_bank_data,
+ load_world_bank_dashboard_with_slices, # noqa: F401
+ load_world_bank_data, # noqa: F401
)
-from tests.integration_tests.test_app import app
+from tests.integration_tests.test_app import app # noqa: F401
KEY = "test-key"
INITIAL_VALUE = json.dumps({"test": "initial value"})
@@ -40,7 +40,7 @@ UPDATED_VALUE = json.dumps({"test": "updated value"})
@pytest.fixture
-def dashboard_id(app_context: AppContext, load_world_bank_dashboard_with_slices) -> int:
+def dashboard_id(app_context: AppContext, load_world_bank_dashboard_with_slices) -> int: # noqa: F811
dashboard = db.session.query(Dashboard).filter_by(slug="world_health").one()
return dashboard.id
diff --git a/tests/integration_tests/dashboards/permalink/api_tests.py b/tests/integration_tests/dashboards/permalink/api_tests.py
index bfa20fd8a..c48b95d70 100644
--- a/tests/integration_tests/dashboards/permalink/api_tests.py
+++ b/tests/integration_tests/dashboards/permalink/api_tests.py
@@ -15,24 +15,24 @@
# specific language governing permissions and limitations
# under the License.
from collections.abc import Iterator
-from unittest.mock import patch
+from unittest.mock import patch # noqa: F401
from uuid import uuid3
import pytest
-from flask_appbuilder.security.sqla.models import User
-from sqlalchemy.orm import Session
+from flask_appbuilder.security.sqla.models import User # noqa: F401
+from sqlalchemy.orm import Session # noqa: F401
from superset import db
-from superset.commands.dashboard.exceptions import DashboardAccessDeniedError
+from superset.commands.dashboard.exceptions import DashboardAccessDeniedError # noqa: F401
from superset.key_value.models import KeyValueEntry
from superset.key_value.types import KeyValueResource
from superset.key_value.utils import decode_permalink_id
from superset.models.dashboard import Dashboard
from tests.integration_tests.fixtures.world_bank_dashboard import (
- load_world_bank_dashboard_with_slices,
- load_world_bank_data,
+ load_world_bank_dashboard_with_slices, # noqa: F401
+ load_world_bank_data, # noqa: F401
)
-from tests.integration_tests.test_app import app
+from tests.integration_tests.test_app import app # noqa: F401
STATE = {
"dataMask": {"FILTER_1": "foo"},
@@ -41,7 +41,7 @@ STATE = {
@pytest.fixture
-def dashboard_id(load_world_bank_dashboard_with_slices) -> int:
+def dashboard_id(load_world_bank_dashboard_with_slices) -> int: # noqa: F811
dashboard = db.session.query(Dashboard).filter_by(slug="world_health").one()
return dashboard.id
diff --git a/tests/integration_tests/dashboards/security/base_case.py b/tests/integration_tests/dashboards/security/base_case.py
index ddb0c119e..b52260508 100644
--- a/tests/integration_tests/dashboards/security/base_case.py
+++ b/tests/integration_tests/dashboards/security/base_case.py
@@ -16,8 +16,7 @@
# under the License.
from typing import Optional
-import pytest
-from flask import escape, Response
+from flask import Response
from superset.models.dashboard import Dashboard
from tests.integration_tests.dashboards.base_case import DashboardTestCase
diff --git a/tests/integration_tests/dashboards/security/security_dataset_tests.py b/tests/integration_tests/dashboards/security/security_dataset_tests.py
index 1ca0b0bd7..998c3db41 100644
--- a/tests/integration_tests/dashboards/security/security_dataset_tests.py
+++ b/tests/integration_tests/dashboards/security/security_dataset_tests.py
@@ -15,64 +15,69 @@
# specific language governing permissions and limitations
# under the License.
"""Unit tests for Superset"""
+
import json
import prison
import pytest
-from flask import escape
+from flask import escape # noqa: F401
from superset import app
from superset.daos.dashboard import DashboardDAO
from tests.integration_tests.constants import ADMIN_USERNAME, GAMMA_USERNAME
from tests.integration_tests.dashboards.base_case import DashboardTestCase
-from tests.integration_tests.dashboards.consts import *
-from tests.integration_tests.dashboards.dashboard_test_utils import *
-from tests.integration_tests.dashboards.superset_factory_util import *
+from tests.integration_tests.dashboards.consts import * # noqa: F403
+from tests.integration_tests.dashboards.dashboard_test_utils import * # noqa: F403
+from tests.integration_tests.dashboards.superset_factory_util import * # noqa: F403
from tests.integration_tests.fixtures.energy_dashboard import (
- load_energy_table_data,
- load_energy_table_with_slice,
+ load_energy_table_data, # noqa: F401
+ load_energy_table_with_slice, # noqa: F401
)
class TestDashboardDatasetSecurity(DashboardTestCase):
@pytest.fixture
def load_dashboard(self):
- table = db.session.query(SqlaTable).filter_by(table_name="energy_usage").one()
- # get a slice from the allowed table
- slice = db.session.query(Slice).filter_by(slice_name="Energy Sankey").one()
+ with app.app_context():
+ table = (
+ db.session.query(SqlaTable).filter_by(table_name="energy_usage").one() # noqa: F405
+ )
+ # get a slice from the allowed table
+ slice = db.session.query(Slice).filter_by(slice_name="Energy Sankey").one() # noqa: F405
- self.grant_public_access_to_table(table)
+ self.grant_public_access_to_table(table)
- pytest.hidden_dash_slug = f"hidden_dash_{random_slug()}"
- pytest.published_dash_slug = f"published_dash_{random_slug()}"
+ pytest.hidden_dash_slug = f"hidden_dash_{random_slug()}" # noqa: F405
+ pytest.published_dash_slug = f"published_dash_{random_slug()}" # noqa: F405
- # Create a published and hidden dashboard and add them to the database
- published_dash = Dashboard()
- published_dash.dashboard_title = "Published Dashboard"
- published_dash.slug = pytest.published_dash_slug
- published_dash.slices = [slice]
- published_dash.published = True
+ # Create a published and hidden dashboard and add them to the database
+ published_dash = Dashboard() # noqa: F405
+ published_dash.dashboard_title = "Published Dashboard"
+ published_dash.slug = pytest.published_dash_slug
+ published_dash.slices = [slice]
+ published_dash.published = True
- hidden_dash = Dashboard()
- hidden_dash.dashboard_title = "Hidden Dashboard"
- hidden_dash.slug = pytest.hidden_dash_slug
- hidden_dash.slices = [slice]
- hidden_dash.published = False
+ hidden_dash = Dashboard() # noqa: F405
+ hidden_dash.dashboard_title = "Hidden Dashboard"
+ hidden_dash.slug = pytest.hidden_dash_slug
+ hidden_dash.slices = [slice]
+ hidden_dash.published = False
- db.session.add(published_dash)
- db.session.add(hidden_dash)
- yield db.session.commit()
+ db.session.add(published_dash) # noqa: F405
+ db.session.add(hidden_dash) # noqa: F405
+ yield db.session.commit() # noqa: F405
- self.revoke_public_access_to_table(table)
- db.session.delete(published_dash)
- db.session.delete(hidden_dash)
- db.session.commit()
+ self.revoke_public_access_to_table(table)
+ db.session.delete(published_dash) # noqa: F405
+ db.session.delete(hidden_dash) # noqa: F405
+ db.session.commit() # noqa: F405
def test_dashboard_access__admin_can_access_all(self):
# arrange
self.login(ADMIN_USERNAME)
dashboard_title_by_url = {
- dash.url: dash.dashboard_title for dash in get_all_dashboards()
+ dash.url: dash.dashboard_title
+ for dash in get_all_dashboards() # noqa: F405
}
# act
@@ -87,14 +92,14 @@ class TestDashboardDatasetSecurity(DashboardTestCase):
def test_get_dashboards__users_are_dashboards_owners(self):
# arrange
username = "gamma"
- user = security_manager.find_user(username)
- my_owned_dashboard = create_dashboard_to_db(
+ user = security_manager.find_user(username) # noqa: F405
+ my_owned_dashboard = create_dashboard_to_db( # noqa: F405
dashboard_title="My Dashboard",
published=False,
owners=[user],
)
- not_my_owned_dashboard = create_dashboard_to_db(
+ not_my_owned_dashboard = create_dashboard_to_db( # noqa: F405
dashboard_title="Not My Dashboard",
published=False,
)
@@ -102,7 +107,7 @@ class TestDashboardDatasetSecurity(DashboardTestCase):
self.login(user.username)
# act
- get_dashboards_response = self.get_resp(DASHBOARDS_API_URL)
+ get_dashboards_response = self.get_resp(DASHBOARDS_API_URL) # noqa: F405
# assert
self.assertIn(my_owned_dashboard.url, get_dashboards_response)
@@ -110,29 +115,29 @@ class TestDashboardDatasetSecurity(DashboardTestCase):
def test_get_dashboards__owners_can_view_empty_dashboard(self):
# arrange
- dash = create_dashboard_to_db("Empty Dashboard", slug="empty_dashboard")
+ dash = create_dashboard_to_db("Empty Dashboard", slug="empty_dashboard") # noqa: F405
dashboard_url = dash.url
- gamma_user = security_manager.find_user("gamma")
+ gamma_user = security_manager.find_user("gamma") # noqa: F405
self.login(gamma_user.username)
# act
- get_dashboards_response = self.get_resp(DASHBOARDS_API_URL)
+ get_dashboards_response = self.get_resp(DASHBOARDS_API_URL) # noqa: F405
# assert
self.assertNotIn(dashboard_url, get_dashboards_response)
def test_get_dashboards__user_can_not_view_unpublished_dash(self):
# arrange
- admin_user = security_manager.find_user(ADMIN_USERNAME)
- gamma_user = security_manager.find_user(GAMMA_USERNAME)
- admin_and_draft_dashboard = create_dashboard_to_db(
+ admin_user = security_manager.find_user(ADMIN_USERNAME) # noqa: F405
+ gamma_user = security_manager.find_user(GAMMA_USERNAME) # noqa: F405
+ admin_and_draft_dashboard = create_dashboard_to_db( # noqa: F405
dashboard_title="admin_owned_unpublished_dash", owners=[admin_user]
)
self.login(gamma_user.username)
# act - list dashboards as a gamma user
- get_dashboards_response_as_gamma = self.get_resp(DASHBOARDS_API_URL)
+ get_dashboards_response_as_gamma = self.get_resp(DASHBOARDS_API_URL) # noqa: F405
# assert
self.assertNotIn(
@@ -142,21 +147,21 @@ class TestDashboardDatasetSecurity(DashboardTestCase):
@pytest.mark.usefixtures("load_energy_table_with_slice", "load_dashboard")
def test_get_dashboards__users_can_view_permitted_dashboard(self):
# arrange
- username = random_str()
- new_role = f"role_{random_str()}"
+ username = random_str() # noqa: F405
+ new_role = f"role_{random_str()}" # noqa: F405
self.create_user_with_roles(username, [new_role], should_create_roles=True)
- accessed_table = get_sql_table_by_name("energy_usage")
+ accessed_table = get_sql_table_by_name("energy_usage") # noqa: F405
self.grant_role_access_to_table(accessed_table, new_role)
# get a slice from the allowed table
- slice_to_add_to_dashboards = get_slice_by_name("Energy Sankey")
+ slice_to_add_to_dashboards = get_slice_by_name("Energy Sankey") # noqa: F405
# Create a published and hidden dashboard and add them to the database
- first_dash = create_dashboard_to_db(
+ first_dash = create_dashboard_to_db( # noqa: F405
dashboard_title="Published Dashboard",
published=True,
slices=[slice_to_add_to_dashboards],
)
- second_dash = create_dashboard_to_db(
+ second_dash = create_dashboard_to_db( # noqa: F405
dashboard_title="Hidden Dashboard",
published=True,
slices=[slice_to_add_to_dashboards],
@@ -165,7 +170,7 @@ class TestDashboardDatasetSecurity(DashboardTestCase):
try:
self.login(username)
# act
- get_dashboards_response = self.get_resp(DASHBOARDS_API_URL)
+ get_dashboards_response = self.get_resp(DASHBOARDS_API_URL) # noqa: F405
# assert
self.assertIn(second_dash.url, get_dashboards_response)
@@ -178,14 +183,14 @@ class TestDashboardDatasetSecurity(DashboardTestCase):
Dashboard API: Test get dashboards no data access
"""
admin = self.get_user("admin")
- title = f"title{random_str()}"
- dashboard = create_dashboard_to_db(title, "slug1", owners=[admin])
+ title = f"title{random_str()}" # noqa: F405
+ dashboard = create_dashboard_to_db(title, "slug1", owners=[admin]) # noqa: F405
self.login(GAMMA_USERNAME)
arguments = {
"filters": [{"col": "dashboard_title", "opr": "sw", "value": title[0:8]}]
}
- uri = DASHBOARDS_API_URL_WITH_QUERY_FORMAT.format(prison.dumps(arguments))
+ uri = DASHBOARDS_API_URL_WITH_QUERY_FORMAT.format(prison.dumps(arguments)) # noqa: F405
rv = self.client.get(uri)
self.assert200(rv)
data = json.loads(rv.data.decode("utf-8"))
diff --git a/tests/integration_tests/dashboards/security/security_rbac_tests.py b/tests/integration_tests/dashboards/security/security_rbac_tests.py
index 820df3c40..5e10a6af0 100644
--- a/tests/integration_tests/dashboards/security/security_rbac_tests.py
+++ b/tests/integration_tests/dashboards/security/security_rbac_tests.py
@@ -15,9 +15,10 @@
# specific language governing permissions and limitations
# under the License.
"""Unit tests for Superset"""
+
import json
from unittest import mock
-from unittest.mock import patch
+from unittest.mock import patch # noqa: F401
import pytest
@@ -30,7 +31,7 @@ from tests.integration_tests.constants import (
GAMMA_SQLLAB_USERNAME,
GAMMA_USERNAME,
)
-from tests.integration_tests.dashboards.dashboard_test_utils import *
+from tests.integration_tests.dashboards.dashboard_test_utils import * # noqa: F403
from tests.integration_tests.dashboards.security.base_case import (
BaseTestDashboardSecurity,
)
@@ -41,14 +42,14 @@ from tests.integration_tests.dashboards.superset_factory_util import (
create_slice_to_db,
)
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
-from tests.integration_tests.fixtures.public_role import public_role_like_gamma
+from tests.integration_tests.fixtures.public_role import public_role_like_gamma # noqa: F401
from tests.integration_tests.fixtures.query_context import get_query_context
from tests.integration_tests.fixtures.world_bank_dashboard import (
- load_world_bank_dashboard_with_slices,
- load_world_bank_data,
+ load_world_bank_dashboard_with_slices, # noqa: F401
+ load_world_bank_data, # noqa: F401
)
CHART_DATA_URI = "api/v1/chart/data"
@@ -74,8 +75,8 @@ class TestDashboardRoleBasedSecurity(BaseTestDashboardSecurity):
def test_get_dashboard_view__owner_can_access(self):
# arrange
- username = random_str()
- new_role = f"role_{random_str()}"
+ username = random_str() # noqa: F405
+ new_role = f"role_{random_str()}" # noqa: F405
owner = self.create_user_with_roles(
username, [new_role], should_create_roles=True
)
@@ -92,11 +93,11 @@ class TestDashboardRoleBasedSecurity(BaseTestDashboardSecurity):
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_get_dashboard_view__user_can_not_access_without_permission(self):
- username = random_str()
- new_role = f"role_{random_str()}"
+ username = random_str() # noqa: F405
+ new_role = f"role_{random_str()}" # noqa: F405
self.create_user_with_roles(username, [new_role], should_create_roles=True)
slice = (
- db.session.query(Slice)
+ db.session.query(Slice) # noqa: F405
.filter_by(slice_name="Girl Name Cloud")
.one_or_none()
)
@@ -116,10 +117,10 @@ class TestDashboardRoleBasedSecurity(BaseTestDashboardSecurity):
):
# arrange
dashboard_to_access = create_dashboard_to_db(published=False)
- username = random_str()
- new_role = f"role_{random_str()}"
+ username = random_str() # noqa: F405
+ new_role = f"role_{random_str()}" # noqa: F405
self.create_user_with_roles(username, [new_role], should_create_roles=True)
- grant_access_to_dashboard(dashboard_to_access, new_role)
+ grant_access_to_dashboard(dashboard_to_access, new_role) # noqa: F405
self.login(username)
# act
@@ -129,7 +130,7 @@ class TestDashboardRoleBasedSecurity(BaseTestDashboardSecurity):
assert response.status_code == 302
# post
- revoke_access_to_dashboard(dashboard_to_access, new_role)
+ revoke_access_to_dashboard(dashboard_to_access, new_role) # noqa: F405
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_get_dashboard_view__user_no_access_regular_rbac(self):
@@ -137,7 +138,7 @@ class TestDashboardRoleBasedSecurity(BaseTestDashboardSecurity):
return
slice = (
- db.session.query(Slice)
+ db.session.query(Slice) # noqa: F405
.filter_by(slice_name="Girl Name Cloud")
.one_or_none()
)
@@ -151,8 +152,8 @@ class TestDashboardRoleBasedSecurity(BaseTestDashboardSecurity):
request_payload = get_query_context("birth_names")
rv = self.post_assert_metric(CHART_DATA_URI, request_payload, "data")
assert rv.status_code == 403
- db.session.delete(dashboard)
- db.session.commit()
+ db.session.delete(dashboard) # noqa: F405
+ db.session.commit() # noqa: F405
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_get_dashboard_view__user_access_regular_rbac(self):
@@ -160,7 +161,7 @@ class TestDashboardRoleBasedSecurity(BaseTestDashboardSecurity):
return
slice = (
- db.session.query(Slice)
+ db.session.query(Slice) # noqa: F405
.filter_by(slice_name="Girl Name Cloud")
.one_or_none()
)
@@ -174,8 +175,8 @@ class TestDashboardRoleBasedSecurity(BaseTestDashboardSecurity):
request_payload = get_query_context("birth_names")
rv = self.post_assert_metric(CHART_DATA_URI, request_payload, "data")
assert rv.status_code == 200
- db.session.delete(dashboard)
- db.session.commit()
+ db.session.delete(dashboard) # noqa: F405
+ db.session.commit() # noqa: F405
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_get_dashboard_view__user_access_with_dashboard_permission(self):
@@ -183,18 +184,18 @@ class TestDashboardRoleBasedSecurity(BaseTestDashboardSecurity):
return
# arrange
- username = random_str()
- new_role = f"role_{random_str()}"
+ username = random_str() # noqa: F405
+ new_role = f"role_{random_str()}" # noqa: F405
self.create_user_with_roles(username, [new_role], should_create_roles=True)
slice = (
- db.session.query(Slice)
+ db.session.query(Slice) # noqa: F405
.filter_by(slice_name="Girl Name Cloud")
.one_or_none()
)
dashboard_to_access = create_dashboard_to_db(published=True, slices=[slice])
self.login(username)
- grant_access_to_dashboard(dashboard_to_access, new_role)
+ grant_access_to_dashboard(dashboard_to_access, new_role) # noqa: F405
# act
response = self.get_dashboard_view_response(dashboard_to_access)
@@ -207,12 +208,12 @@ class TestDashboardRoleBasedSecurity(BaseTestDashboardSecurity):
self.assertEqual(rv.status_code, 403)
# post
- revoke_access_to_dashboard(dashboard_to_access, new_role)
+ revoke_access_to_dashboard(dashboard_to_access, new_role) # noqa: F405
@pytest.mark.usefixtures("public_role_like_gamma")
def test_get_dashboard_view__public_user_can_not_access_without_permission(self):
dashboard_to_access = create_dashboard_to_db(published=True)
- grant_access_to_dashboard(dashboard_to_access, "Alpha")
+ grant_access_to_dashboard(dashboard_to_access, "Alpha") # noqa: F405
# act
response = self.get_dashboard_view_response(dashboard_to_access)
@@ -226,7 +227,7 @@ class TestDashboardRoleBasedSecurity(BaseTestDashboardSecurity):
):
# arrange
dashboard_to_access = create_dashboard_to_db(published=False)
- grant_access_to_dashboard(dashboard_to_access, "Public")
+ grant_access_to_dashboard(dashboard_to_access, "Public") # noqa: F405
# act
response = self.get_dashboard_view_response(dashboard_to_access)
@@ -234,7 +235,7 @@ class TestDashboardRoleBasedSecurity(BaseTestDashboardSecurity):
assert response.status_code == 302
# post
- revoke_access_to_dashboard(dashboard_to_access, "Public")
+ revoke_access_to_dashboard(dashboard_to_access, "Public") # noqa: F405
@pytest.mark.usefixtures("public_role_like_gamma")
def test_get_dashboard_view__public_user_access_with_dashboard_permission(self):
@@ -242,7 +243,7 @@ class TestDashboardRoleBasedSecurity(BaseTestDashboardSecurity):
dashboard_to_access = create_dashboard_to_db(
published=True, slices=[create_slice_to_db()]
)
- grant_access_to_dashboard(dashboard_to_access, "Public")
+ grant_access_to_dashboard(dashboard_to_access, "Public") # noqa: F405
# act
response = self.get_dashboard_view_response(dashboard_to_access)
@@ -251,11 +252,11 @@ class TestDashboardRoleBasedSecurity(BaseTestDashboardSecurity):
self.assert200(response)
# post
- revoke_access_to_dashboard(dashboard_to_access, "Public")
+ revoke_access_to_dashboard(dashboard_to_access, "Public") # noqa: F405
def _create_sample_dashboards_with_owner_access(self):
- username = random_str()
- new_role = f"role_{random_str()}"
+ username = random_str() # noqa: F405
+ new_role = f"role_{random_str()}" # noqa: F405
owner = self.create_user_with_roles(
username, [new_role], should_create_roles=True
)
@@ -276,8 +277,8 @@ class TestDashboardRoleBasedSecurity(BaseTestDashboardSecurity):
return username, not_owned_dashboards, owned_dashboards
def _create_sample_only_published_dashboard_with_roles(self):
- username = random_str()
- new_role = f"role_{random_str()}"
+ username = random_str() # noqa: F405
+ new_role = f"role_{random_str()}" # noqa: F405
self.create_user_with_roles(username, [new_role], should_create_roles=True)
published_dashboards = [
create_dashboard_to_db(published=True),
@@ -288,7 +289,7 @@ class TestDashboardRoleBasedSecurity(BaseTestDashboardSecurity):
create_dashboard_to_db(published=False),
]
for dash in published_dashboards + draft_dashboards:
- grant_access_to_dashboard(dash, new_role)
+ grant_access_to_dashboard(dash, new_role) # noqa: F405
return username, new_role, draft_dashboards, published_dashboards
def test_get_dashboards_api__admin_get_all_dashboards(self):
@@ -296,7 +297,7 @@ class TestDashboardRoleBasedSecurity(BaseTestDashboardSecurity):
create_dashboard_to_db(
owners=[], slices=[create_slice_to_db()], published=False
)
- dashboard_counts = count_dashboards()
+ dashboard_counts = count_dashboards() # noqa: F405
self.login(ADMIN_USERNAME)
@@ -325,8 +326,8 @@ class TestDashboardRoleBasedSecurity(BaseTestDashboardSecurity):
)
def test_get_dashboards_api__user_without_any_permissions_get_empty_list(self):
- username = random_str()
- new_role = f"role_{random_str()}"
+ username = random_str() # noqa: F405
+ new_role = f"role_{random_str()}" # noqa: F405
self.create_user_with_roles(username, [new_role], should_create_roles=True)
create_dashboard_to_db(published=True)
self.login(username)
@@ -360,7 +361,7 @@ class TestDashboardRoleBasedSecurity(BaseTestDashboardSecurity):
# post
for dash in published_dashboards + draft_dashboards:
- revoke_access_to_dashboard(dash, new_role)
+ revoke_access_to_dashboard(dash, new_role) # noqa: F405
@pytest.mark.usefixtures("public_role_like_gamma")
def test_get_dashboards_api__public_user_without_any_permissions_get_empty_list(
@@ -389,7 +390,7 @@ class TestDashboardRoleBasedSecurity(BaseTestDashboardSecurity):
]
for dash in published_dashboards + draft_dashboards:
- grant_access_to_dashboard(dash, "Public")
+ grant_access_to_dashboard(dash, "Public") # noqa: F405
# act
response = self.get_dashboards_api_response()
@@ -404,7 +405,7 @@ class TestDashboardRoleBasedSecurity(BaseTestDashboardSecurity):
# post
for dash in published_dashboards + draft_dashboards:
- revoke_access_to_dashboard(dash, "Public")
+ revoke_access_to_dashboard(dash, "Public") # noqa: F405
def test_cannot_get_draft_dashboard_without_roles_by_uuid(self):
"""
@@ -446,19 +447,19 @@ class TestDashboardRoleBasedSecurity(BaseTestDashboardSecurity):
rv = self.client.get(uri)
assert rv.status_code == 403
# rollback changes
- db.session.delete(dashboard)
- db.session.commit()
+ db.session.delete(dashboard) # noqa: F405
+ db.session.commit() # noqa: F405
@with_feature_flags(DASHBOARD_RBAC=True)
@pytest.mark.usefixtures("load_world_bank_dashboard_with_slices")
def test_copy_dashboard_via_api(self):
- source = db.session.query(Dashboard).filter_by(slug="world_health").first()
+ source = db.session.query(Dashboard).filter_by(slug="world_health").first() # noqa: F405
source.roles = [self.get_role("Gamma")]
if not (published := source.published):
source.published = True # Required per the DashboardAccessFilter for RBAC.
- db.session.commit()
+ db.session.commit() # noqa: F405
uri = f"api/v1/dashboard/{source.id}/copy/"
@@ -486,23 +487,23 @@ class TestDashboardRoleBasedSecurity(BaseTestDashboardSecurity):
response = json.loads(rv.data.decode("utf-8"))
target = (
- db.session.query(Dashboard)
- .filter(Dashboard.id == response["result"]["id"])
+ db.session.query(Dashboard) # noqa: F405
+ .filter(Dashboard.id == response["result"]["id"]) # noqa: F405
.one()
)
- db.session.delete(target)
+ db.session.delete(target) # noqa: F405
source.roles = []
if not published:
source.published = False
- db.session.commit()
+ db.session.commit() # noqa: F405
@with_feature_flags(DASHBOARD_RBAC=True)
@pytest.mark.usefixtures("load_world_bank_dashboard_with_slices")
def test_copy_dashboard_via_dao(self):
- source = db.session.query(Dashboard).filter_by(slug="world_health").first()
+ source = db.session.query(Dashboard).filter_by(slug="world_health").first() # noqa: F405
data = {
"dashboard_title": "copied dash",
@@ -517,12 +518,12 @@ class TestDashboardRoleBasedSecurity(BaseTestDashboardSecurity):
),
}
- with override_user(security_manager.find_user("gamma")):
+ with override_user(security_manager.find_user("gamma")): # noqa: F405
with pytest.raises(DashboardForbiddenError):
DashboardDAO.copy_dashboard(source, data)
- with override_user(security_manager.find_user("admin")):
+ with override_user(security_manager.find_user("admin")): # noqa: F405
target = DashboardDAO.copy_dashboard(source, data)
- db.session.delete(target)
+ db.session.delete(target) # noqa: F405
- db.session.commit()
+ db.session.commit() # noqa: F405
diff --git a/tests/integration_tests/databases/api_tests.py b/tests/integration_tests/databases/api_tests.py
index 8181db9b2..016c89798 100644
--- a/tests/integration_tests/databases/api_tests.py
+++ b/tests/integration_tests/databases/api_tests.py
@@ -16,6 +16,7 @@
# under the License.
# isort:skip_file
"""Unit tests for Superset"""
+
import dataclasses
import json
from collections import defaultdict
@@ -30,15 +31,15 @@ import yaml
from unittest.mock import Mock
-from sqlalchemy.engine.url import make_url
+from sqlalchemy.engine.url import make_url # noqa: F401
from sqlalchemy.exc import DBAPIError
from sqlalchemy.sql import func
from superset import db, security_manager
-from superset.commands.database.ssh_tunnel.exceptions import SSHTunnelDatabasePortError
+from superset.commands.database.ssh_tunnel.exceptions import SSHTunnelDatabasePortError # noqa: F401
from superset.connectors.sqla.models import SqlaTable
from superset.databases.ssh_tunnel.models import SSHTunnel
-from superset.databases.utils import make_url_safe
+from superset.databases.utils import make_url_safe # noqa: F401
from superset.db_engine_specs.mysql import MySQLEngineSpec
from superset.db_engine_specs.postgres import PostgresEngineSpec
from superset.db_engine_specs.redshift import RedshiftEngineSpec
@@ -52,16 +53,16 @@ from superset.utils.database import get_example_database, get_main_database
from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.constants import ADMIN_USERNAME, GAMMA_USERNAME
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
from tests.integration_tests.fixtures.energy_dashboard import (
- load_energy_table_with_slice,
- load_energy_table_data,
+ load_energy_table_with_slice, # noqa: F401
+ load_energy_table_data, # noqa: F401
)
from tests.integration_tests.fixtures.world_bank_dashboard import (
- load_world_bank_dashboard_with_slices,
- load_world_bank_data,
+ load_world_bank_dashboard_with_slices, # noqa: F401
+ load_world_bank_data, # noqa: F401
)
from tests.integration_tests.fixtures.importexport import (
database_config,
@@ -75,8 +76,8 @@ from tests.integration_tests.fixtures.importexport import (
database_with_ssh_tunnel_config_private_pass_only,
)
from tests.integration_tests.fixtures.unicode_dashboard import (
- load_unicode_dashboard_with_position,
- load_unicode_data,
+ load_unicode_dashboard_with_position, # noqa: F401
+ load_unicode_data, # noqa: F401
)
from tests.integration_tests.test_app import app
@@ -2496,9 +2497,9 @@ class TestDatabaseApi(SupersetTestCase):
uri = "api/v1/database/import/"
masked_database_config = database_config.copy()
- masked_database_config[
- "sqlalchemy_uri"
- ] = "postgresql://username:XXXXXXXXXX@host:12345/db"
+ masked_database_config["sqlalchemy_uri"] = (
+ "postgresql://username:XXXXXXXXXX@host:12345/db"
+ )
buf = BytesIO()
with ZipFile(buf, "w") as bundle:
@@ -2553,9 +2554,9 @@ class TestDatabaseApi(SupersetTestCase):
uri = "api/v1/database/import/"
masked_database_config = database_config.copy()
- masked_database_config[
- "sqlalchemy_uri"
- ] = "vertica+vertica_python://hackathon:XXXXXXXXXX@host:5433/dbname?ssl=1"
+ masked_database_config["sqlalchemy_uri"] = (
+ "vertica+vertica_python://hackathon:XXXXXXXXXX@host:5433/dbname?ssl=1"
+ )
buf = BytesIO()
with ZipFile(buf, "w") as bundle:
@@ -4039,7 +4040,7 @@ class TestDatabaseApi(SupersetTestCase):
expected_names = [db.database_name for db in dbs]
expected_names.sort()
- uri = f"api/v1/database/"
+ uri = "api/v1/database/" # noqa: F541
# Get the list of databases without filter in the config
rv = self.client.get(uri)
data = json.loads(rv.data.decode("utf-8"))
@@ -4058,7 +4059,7 @@ class TestDatabaseApi(SupersetTestCase):
"superset.views.filters.current_app.config",
{"EXTRA_DYNAMIC_QUERY_FILTERS": {"databases": base_filter_mock}},
):
- uri = f"api/v1/database/"
+ uri = "api/v1/database/" # noqa: F541
rv = self.client.get(uri)
data = json.loads(rv.data.decode("utf-8"))
# Only one database start with dyntest
diff --git a/tests/integration_tests/databases/commands/upload_test.py b/tests/integration_tests/databases/commands/upload_test.py
index 695e3e890..26379aa97 100644
--- a/tests/integration_tests/databases/commands/upload_test.py
+++ b/tests/integration_tests/databases/commands/upload_test.py
@@ -26,7 +26,6 @@ from superset import db, security_manager
from superset.commands.database.exceptions import (
DatabaseNotFoundError,
DatabaseSchemaUploadNotAllowed,
- DatabaseUploadFailed,
DatabaseUploadNotSupported,
)
from superset.commands.database.uploaders.base import UploadCommand
diff --git a/tests/integration_tests/databases/commands_tests.py b/tests/integration_tests/databases/commands_tests.py
index ecdf1b7a8..2388b38ff 100644
--- a/tests/integration_tests/databases/commands_tests.py
+++ b/tests/integration_tests/databases/commands_tests.py
@@ -22,14 +22,14 @@ import yaml
from func_timeout import FunctionTimedOut
from sqlalchemy.exc import DBAPIError
-from superset import db, event_logger, security_manager
+from superset import db, event_logger, security_manager # noqa: F401
from superset.commands.database.create import CreateDatabaseCommand
from superset.commands.database.exceptions import (
DatabaseInvalidError,
DatabaseNotFoundError,
DatabaseSecurityUnsafeError,
DatabaseTablesUnexpectedError,
- DatabaseTestConnectionDriverError,
+ DatabaseTestConnectionDriverError, # noqa: F401
DatabaseTestConnectionUnexpectedError,
)
from superset.commands.database.export import ExportDatabasesCommand
@@ -40,7 +40,7 @@ from superset.commands.database.validate import ValidateDatabaseParametersComman
from superset.commands.exceptions import CommandInvalidError
from superset.commands.importers.exceptions import IncorrectVersionError
from superset.connectors.sqla.models import SqlaTable
-from superset.databases.schemas import DatabaseTestConnectionSchema
+from superset.databases.schemas import DatabaseTestConnectionSchema # noqa: F401
from superset.databases.ssh_tunnel.models import SSHTunnel
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
from superset.exceptions import (
@@ -54,12 +54,12 @@ from superset.utils.core import backend
from superset.utils.database import get_example_database
from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
from tests.integration_tests.fixtures.energy_dashboard import (
- load_energy_table_data,
- load_energy_table_with_slice,
+ load_energy_table_data, # noqa: F401
+ load_energy_table_with_slice, # noqa: F401
)
from tests.integration_tests.fixtures.importexport import (
database_config,
@@ -386,7 +386,6 @@ class TestExportDatabasesCommand(SupersetTestCase):
mock_g.user = security_manager.find_user("admin")
example_db = get_example_database()
- db_uuid = example_db.uuid
command = ExportDatabasesCommand([example_db.id], export_related=False)
contents = dict(command.run())
@@ -623,9 +622,9 @@ class TestImportDatabasesCommand(SupersetTestCase):
def test_import_v1_database_masked_password(self):
"""Test that database imports with masked passwords are rejected"""
masked_database_config = database_config.copy()
- masked_database_config[
- "sqlalchemy_uri"
- ] = "postgresql://username:XXXXXXXXXX@host:12345/db"
+ masked_database_config["sqlalchemy_uri"] = (
+ "postgresql://username:XXXXXXXXXX@host:12345/db"
+ )
contents = {
"metadata.yaml": yaml.safe_dump(database_metadata_config),
"databases/imported_database.yaml": yaml.safe_dump(masked_database_config),
diff --git a/tests/integration_tests/databases/ssh_tunnel/commands/commands_tests.py b/tests/integration_tests/databases/ssh_tunnel/commands/commands_tests.py
index f6e5ca9d0..9e1f33f39 100644
--- a/tests/integration_tests/databases/ssh_tunnel/commands/commands_tests.py
+++ b/tests/integration_tests/databases/ssh_tunnel/commands/commands_tests.py
@@ -15,15 +15,12 @@
# specific language governing permissions and limitations
# under the License.
from unittest import mock
-from unittest.mock import patch
import pytest
from superset import security_manager
-from superset.commands.database.ssh_tunnel.create import CreateSSHTunnelCommand
from superset.commands.database.ssh_tunnel.delete import DeleteSSHTunnelCommand
from superset.commands.database.ssh_tunnel.exceptions import (
- SSHTunnelInvalidError,
SSHTunnelNotFoundError,
)
from superset.commands.database.ssh_tunnel.update import UpdateSSHTunnelCommand
diff --git a/tests/integration_tests/datasets/api_tests.py b/tests/integration_tests/datasets/api_tests.py
index 3597bcdb0..8d24c2993 100644
--- a/tests/integration_tests/datasets/api_tests.py
+++ b/tests/integration_tests/datasets/api_tests.py
@@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
"""Unit tests for Superset"""
+
import json
import unittest
from io import BytesIO
@@ -29,7 +30,7 @@ from sqlalchemy import inspect
from sqlalchemy.orm import joinedload
from sqlalchemy.sql import func
-from superset import app
+from superset import app # noqa: F401
from superset.commands.dataset.exceptions import DatasetCreateFailedError
from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn
from superset.daos.exceptions import (
@@ -37,7 +38,7 @@ from superset.daos.exceptions import (
DAODeleteFailedError,
DAOUpdateFailedError,
)
-from superset.datasets.models import Dataset
+from superset.datasets.models import Dataset # noqa: F401
from superset.extensions import db, security_manager
from superset.models.core import Database
from superset.models.slice import Slice
@@ -45,19 +46,19 @@ from superset.utils.core import backend, get_example_default_schema
from superset.utils.database import get_example_database, get_main_database
from superset.utils.dict_import_export import export_to_dict
from tests.integration_tests.base_tests import SupersetTestCase
-from tests.integration_tests.conftest import CTAS_SCHEMA_NAME, with_feature_flags
+from tests.integration_tests.conftest import CTAS_SCHEMA_NAME, with_feature_flags # noqa: F401
from tests.integration_tests.constants import (
ADMIN_USERNAME,
ALPHA_USERNAME,
GAMMA_USERNAME,
)
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
from tests.integration_tests.fixtures.energy_dashboard import (
- load_energy_table_data,
- load_energy_table_with_slice,
+ load_energy_table_data, # noqa: F401
+ load_energy_table_with_slice, # noqa: F401
)
from tests.integration_tests.fixtures.importexport import (
database_config,
@@ -961,7 +962,7 @@ class TestDatasetApi(SupersetTestCase):
"""
# Add default dataset
- main_db = get_main_database()
+ get_main_database() # noqa: F841
dataset = self.insert_default_dataset()
prev_col_len = len(dataset.columns)
@@ -1530,7 +1531,7 @@ class TestDatasetApi(SupersetTestCase):
uri = f"api/v1/dataset/{dataset.id}/column/{column_id}"
rv = self.client.delete(uri)
assert rv.status_code == 200
- assert db.session.query(TableColumn).get(column_id) == None
+ assert db.session.query(TableColumn).get(column_id) is None # noqa: E711
@pytest.mark.usefixtures("create_datasets")
def test_delete_dataset_column_not_found(self):
@@ -1602,7 +1603,7 @@ class TestDatasetApi(SupersetTestCase):
uri = f"api/v1/dataset/{dataset.id}/metric/{test_metric.id}"
rv = self.client.delete(uri)
assert rv.status_code == 200
- assert db.session.query(SqlMetric).get(test_metric.id) == None
+ assert db.session.query(SqlMetric).get(test_metric.id) is None # noqa: E711
@pytest.mark.usefixtures("create_datasets")
def test_delete_dataset_metric_not_found(self):
@@ -2267,7 +2268,7 @@ class TestDatasetApi(SupersetTestCase):
dataset = self.get_fixture_virtual_datasets()[0]
self.login(ADMIN_USERNAME)
- uri = f"api/v1/dataset/duplicate"
+ uri = "api/v1/dataset/duplicate" # noqa: F541
table_data = {"base_model_id": dataset.id, "table_name": "Dupe1"}
rv = self.post_assert_metric(uri, table_data, "duplicate")
assert rv.status_code == 201
@@ -2293,7 +2294,7 @@ class TestDatasetApi(SupersetTestCase):
dataset = self.get_fixture_datasets()[0]
self.login(ADMIN_USERNAME)
- uri = f"api/v1/dataset/duplicate"
+ uri = "api/v1/dataset/duplicate" # noqa: F541
table_data = {"base_model_id": dataset.id, "table_name": "Dupe2"}
rv = self.post_assert_metric(uri, table_data, "duplicate")
assert rv.status_code == 422
@@ -2307,7 +2308,7 @@ class TestDatasetApi(SupersetTestCase):
dataset = self.get_fixture_virtual_datasets()[0]
self.login(ADMIN_USERNAME)
- uri = f"api/v1/dataset/duplicate"
+ uri = "api/v1/dataset/duplicate" # noqa: F541
table_data = {
"base_model_id": dataset.id,
"table_name": "sql_virtual_dataset_2",
@@ -2321,7 +2322,7 @@ class TestDatasetApi(SupersetTestCase):
"""
self.login(ADMIN_USERNAME)
- uri = f"api/v1/dataset/duplicate"
+ uri = "api/v1/dataset/duplicate" # noqa: F541
table_data = {
"base_model_id": -1,
"table_name": "Dupe3",
diff --git a/tests/integration_tests/datasets/commands_tests.py b/tests/integration_tests/datasets/commands_tests.py
index 806346693..405ea52c4 100644
--- a/tests/integration_tests/datasets/commands_tests.py
+++ b/tests/integration_tests/datasets/commands_tests.py
@@ -42,12 +42,12 @@ from superset.utils.core import get_example_default_schema, override_user
from superset.utils.database import get_example_database
from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
from tests.integration_tests.fixtures.energy_dashboard import (
- load_energy_table_data,
- load_energy_table_with_slice,
+ load_energy_table_data, # noqa: F401
+ load_energy_table_with_slice, # noqa: F401
)
from tests.integration_tests.fixtures.importexport import (
database_config,
@@ -58,8 +58,8 @@ from tests.integration_tests.fixtures.importexport import (
dataset_ui_export,
)
from tests.integration_tests.fixtures.world_bank_dashboard import (
- load_world_bank_dashboard_with_slices,
- load_world_bank_data,
+ load_world_bank_dashboard_with_slices, # noqa: F401
+ load_world_bank_data, # noqa: F401
)
diff --git a/tests/integration_tests/datasource/api_tests.py b/tests/integration_tests/datasource/api_tests.py
index 0bf7edddb..044aead80 100644
--- a/tests/integration_tests/datasource/api_tests.py
+++ b/tests/integration_tests/datasource/api_tests.py
@@ -153,7 +153,7 @@ class TestDatasourceApi(SupersetTestCase):
self.login(ADMIN_USERNAME)
table = self.get_virtual_dataset()
table.normalize_columns = True
- rv = self.client.get(f"api/v1/datasource/table/{table.id}/column/col2/values/")
+ self.client.get(f"api/v1/datasource/table/{table.id}/column/col2/values/") # noqa: F841
values_for_column_mock.assert_called_with(
column_name="col2",
limit=10000,
@@ -166,7 +166,7 @@ class TestDatasourceApi(SupersetTestCase):
self.login(ADMIN_USERNAME)
table = self.get_virtual_dataset()
table.normalize_columns = True
- rv = self.client.get(f"api/v1/datasource/table/{table.id}/column/col2/values/")
+ self.client.get(f"api/v1/datasource/table/{table.id}/column/col2/values/") # noqa: F841
denormalize_name_mock.assert_not_called()
@pytest.mark.usefixtures("app_context", "virtual_dataset")
@@ -175,7 +175,7 @@ class TestDatasourceApi(SupersetTestCase):
self.login(ADMIN_USERNAME)
table = self.get_virtual_dataset()
table.normalize_columns = False
- rv = self.client.get(f"api/v1/datasource/table/{table.id}/column/col2/values/")
+ self.client.get(f"api/v1/datasource/table/{table.id}/column/col2/values/") # noqa: F841
values_for_column_mock.assert_called_with(
column_name="col2",
limit=10000,
@@ -188,5 +188,5 @@ class TestDatasourceApi(SupersetTestCase):
self.login(ADMIN_USERNAME)
table = self.get_virtual_dataset()
table.normalize_columns = False
- rv = self.client.get(f"api/v1/datasource/table/{table.id}/column/col2/values/")
+ self.client.get(f"api/v1/datasource/table/{table.id}/column/col2/values/") # noqa: F841
denormalize_name_mock.assert_called_with(ANY, "col2")
diff --git a/tests/integration_tests/datasource_tests.py b/tests/integration_tests/datasource_tests.py
index 34da3df35..e9a688899 100644
--- a/tests/integration_tests/datasource_tests.py
+++ b/tests/integration_tests/datasource_tests.py
@@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
"""Unit tests for Superset"""
+
import json
from contextlib import contextmanager
from datetime import datetime, timedelta
@@ -26,18 +27,18 @@ import pytest
from superset import app, db
from superset.commands.dataset.exceptions import DatasetNotFoundError
from superset.common.utils.query_cache_manager import QueryCacheManager
-from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn
+from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn # noqa: F401
from superset.constants import CacheRegion
from superset.daos.exceptions import DatasourceNotFound, DatasourceTypeNotSupportedError
from superset.exceptions import SupersetGenericDBErrorException
from superset.models.core import Database
-from superset.utils.core import backend, get_example_default_schema
-from superset.utils.database import get_example_database, get_main_database
+from superset.utils.core import backend, get_example_default_schema # noqa: F401
+from superset.utils.database import get_example_database, get_main_database # noqa: F401
from tests.integration_tests.base_tests import db_insert_temp_object, SupersetTestCase
from tests.integration_tests.constants import ADMIN_USERNAME
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
from tests.integration_tests.fixtures.datasource import get_datasource_post
@@ -83,7 +84,7 @@ class TestDatasource(SupersetTestCase):
self.login(ADMIN_USERNAME)
database = get_example_database()
- sql = f"SELECT DATE() as default_dttm, DATE() as additional_dttm, 1 as metric;"
+ sql = f"SELECT DATE() as default_dttm, DATE() as additional_dttm, 1 as metric;" # noqa: F541
if database.backend == "sqlite":
pass
elif database.backend in ["postgresql", "mysql"]:
diff --git a/tests/integration_tests/db_engine_specs/base_engine_spec_tests.py b/tests/integration_tests/db_engine_specs/base_engine_spec_tests.py
index ababce38e..d7498dc4f 100644
--- a/tests/integration_tests/db_engine_specs/base_engine_spec_tests.py
+++ b/tests/integration_tests/db_engine_specs/base_engine_spec_tests.py
@@ -36,12 +36,12 @@ from tests.integration_tests.db_engine_specs.base_tests import TestDbEngineSpec
from tests.integration_tests.test_app import app
from ..fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
from ..fixtures.energy_dashboard import (
- load_energy_table_data,
- load_energy_table_with_slice,
+ load_energy_table_data, # noqa: F401
+ load_energy_table_with_slice, # noqa: F401
)
from ..fixtures.pyodbcRow import Row
@@ -333,8 +333,8 @@ def test_time_grain_denylist():
with app.app_context():
time_grain_functions = SqliteEngineSpec.get_time_grain_expressions()
- assert not "PT1M" in time_grain_functions
- assert not "SQLITE_NONEXISTENT_GRAIN" in time_grain_functions
+ assert "PT1M" not in time_grain_functions # noqa: E713
+ assert "SQLITE_NONEXISTENT_GRAIN" not in time_grain_functions # noqa: E713
app.config = config
diff --git a/tests/integration_tests/db_engine_specs/base_tests.py b/tests/integration_tests/db_engine_specs/base_tests.py
index 2d4f72c4f..c30c8a0f1 100644
--- a/tests/integration_tests/db_engine_specs/base_tests.py
+++ b/tests/integration_tests/db_engine_specs/base_tests.py
@@ -16,7 +16,7 @@
# under the License.
# isort:skip_file
-from tests.integration_tests.test_app import app
+from tests.integration_tests.test_app import app # noqa: F401
from tests.integration_tests.base_tests import SupersetTestCase
from superset.db_engine_specs.base import BaseEngineSpec
from superset.models.core import Database
diff --git a/tests/integration_tests/db_engine_specs/bigquery_tests.py b/tests/integration_tests/db_engine_specs/bigquery_tests.py
index c012376b3..ce184685d 100644
--- a/tests/integration_tests/db_engine_specs/bigquery_tests.py
+++ b/tests/integration_tests/db_engine_specs/bigquery_tests.py
@@ -27,8 +27,8 @@ from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
from superset.sql_parse import Table
from tests.integration_tests.db_engine_specs.base_tests import TestDbEngineSpec
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
diff --git a/tests/integration_tests/db_engine_specs/hive_tests.py b/tests/integration_tests/db_engine_specs/hive_tests.py
index d4b2e14d5..39d2c30fd 100644
--- a/tests/integration_tests/db_engine_specs/hive_tests.py
+++ b/tests/integration_tests/db_engine_specs/hive_tests.py
@@ -31,18 +31,14 @@ def test_0_progress():
log = """
17/02/07 18:26:27 INFO log.PerfLogger:
17/02/07 18:26:27 INFO log.PerfLogger:
- """.split(
- "\n"
- )
+ """.split("\n")
assert HiveEngineSpec.progress(log) == 0
def test_number_of_jobs_progress():
log = """
17/02/07 19:15:55 INFO ql.Driver: Total jobs = 2
- """.split(
- "\n"
- )
+ """.split("\n")
assert HiveEngineSpec.progress(log) == 0
@@ -50,9 +46,7 @@ def test_job_1_launched_progress():
log = """
17/02/07 19:15:55 INFO ql.Driver: Total jobs = 2
17/02/07 19:15:55 INFO ql.Driver: Launching Job 1 out of 2
- """.split(
- "\n"
- )
+ """.split("\n")
assert HiveEngineSpec.progress(log) == 0
@@ -61,9 +55,7 @@ def test_job_1_launched_stage_1():
17/02/07 19:15:55 INFO ql.Driver: Total jobs = 2
17/02/07 19:15:55 INFO ql.Driver: Launching Job 1 out of 2
17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 0%, reduce = 0%
- """.split(
- "\n"
- )
+ """.split("\n")
assert HiveEngineSpec.progress(log) == 0
@@ -73,9 +65,7 @@ def test_job_1_launched_stage_1_map_40_progress(): # pylint: disable=invalid-na
17/02/07 19:15:55 INFO ql.Driver: Launching Job 1 out of 2
17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 0%, reduce = 0%
17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 40%, reduce = 0%
- """.split(
- "\n"
- )
+ """.split("\n")
assert HiveEngineSpec.progress(log) == 10
@@ -86,9 +76,7 @@ def test_job_1_launched_stage_1_map_80_reduce_40_progress(): # pylint: disable=
17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 0%, reduce = 0%
17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 40%, reduce = 0%
17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 80%, reduce = 40%
- """.split(
- "\n"
- )
+ """.split("\n")
assert HiveEngineSpec.progress(log) == 30
@@ -101,9 +89,7 @@ def test_job_1_launched_stage_2_stages_progress(): # pylint: disable=invalid-na
17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 80%, reduce = 40%
17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-2 map = 0%, reduce = 0%
17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 100%, reduce = 0%
- """.split(
- "\n"
- )
+ """.split("\n")
assert HiveEngineSpec.progress(log) == 12
@@ -115,9 +101,7 @@ def test_job_2_launched_stage_2_stages_progress(): # pylint: disable=invalid-na
17/02/07 19:15:55 INFO ql.Driver: Launching Job 2 out of 2
17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 0%, reduce = 0%
17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 40%, reduce = 0%
- """.split(
- "\n"
- )
+ """.split("\n")
assert HiveEngineSpec.progress(log) == 60
@@ -187,7 +171,7 @@ def test_df_to_sql_if_exists_fail_with_schema(mock_g):
@mock.patch("superset.db_engine_specs.hive.upload_to_s3")
def test_df_to_sql_if_exists_replace(mock_upload_to_s3, mock_g):
config = app.config.copy()
- app.config["CSV_TO_HIVE_UPLOAD_DIRECTORY_FUNC"]: lambda *args: ""
+ app.config["CSV_TO_HIVE_UPLOAD_DIRECTORY_FUNC"]: lambda *args: "" # noqa: F722
mock_upload_to_s3.return_value = "mock-location"
mock_g.user = True
mock_database = mock.MagicMock()
@@ -214,7 +198,7 @@ def test_df_to_sql_if_exists_replace(mock_upload_to_s3, mock_g):
@mock.patch("superset.db_engine_specs.hive.upload_to_s3")
def test_df_to_sql_if_exists_replace_with_schema(mock_upload_to_s3, mock_g):
config = app.config.copy()
- app.config["CSV_TO_HIVE_UPLOAD_DIRECTORY_FUNC"]: lambda *args: ""
+ app.config["CSV_TO_HIVE_UPLOAD_DIRECTORY_FUNC"]: lambda *args: "" # noqa: F722
mock_upload_to_s3.return_value = "mock-location"
mock_g.user = True
mock_database = mock.MagicMock()
@@ -301,7 +285,7 @@ def test_upload_to_s3_success(client):
with app.app_context():
location = upload_to_s3("filename", "prefix", Table("table"))
- assert f"s3a://bucket/prefix/table" == location
+ assert "s3a://bucket/prefix/table" == location # noqa: F541
app.config = config
diff --git a/tests/integration_tests/db_engine_specs/presto_tests.py b/tests/integration_tests/db_engine_specs/presto_tests.py
index 81a9e6110..02669a162 100644
--- a/tests/integration_tests/db_engine_specs/presto_tests.py
+++ b/tests/integration_tests/db_engine_specs/presto_tests.py
@@ -20,7 +20,7 @@ from unittest import mock, skipUnless
import pandas as pd
from flask.ctx import AppContext
-from sqlalchemy import types
+from sqlalchemy import types # noqa: F401
from sqlalchemy.sql import select
from superset.db_engine_specs.presto import PrestoEngineSpec
diff --git a/tests/integration_tests/dict_import_export_tests.py b/tests/integration_tests/dict_import_export_tests.py
index b4dddff09..ff25d1b40 100644
--- a/tests/integration_tests/dict_import_export_tests.py
+++ b/tests/integration_tests/dict_import_export_tests.py
@@ -16,6 +16,7 @@
# under the License.
# isort:skip_file
"""Unit tests for Superset"""
+
import json
import unittest
from uuid import uuid4
diff --git a/tests/integration_tests/email_tests.py b/tests/integration_tests/email_tests.py
index 7c7cc1683..c8bc13706 100644
--- a/tests/integration_tests/email_tests.py
+++ b/tests/integration_tests/email_tests.py
@@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
"""Unit tests for email service in Superset"""
+
import logging
import ssl
import tempfile
diff --git a/tests/integration_tests/embedded/api_tests.py b/tests/integration_tests/embedded/api_tests.py
index 02880fbf6..533f1311d 100644
--- a/tests/integration_tests/embedded/api_tests.py
+++ b/tests/integration_tests/embedded/api_tests.py
@@ -16,6 +16,7 @@
# under the License.
# isort:skip_file
"""Tests for security api methods"""
+
from unittest import mock
import pytest
@@ -26,8 +27,8 @@ from superset.models.dashboard import Dashboard
from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.constants import ADMIN_USERNAME
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
diff --git a/tests/integration_tests/embedded/dao_tests.py b/tests/integration_tests/embedded/dao_tests.py
index ca96354ba..e1f72feb8 100644
--- a/tests/integration_tests/embedded/dao_tests.py
+++ b/tests/integration_tests/embedded/dao_tests.py
@@ -17,14 +17,14 @@
# isort:skip_file
import pytest
-import tests.integration_tests.test_app # pylint: disable=unused-import
+import tests.integration_tests.test_app # pylint: disable=unused-import # noqa: F401
from superset import db
from superset.daos.dashboard import EmbeddedDashboardDAO
from superset.models.dashboard import Dashboard
from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.fixtures.world_bank_dashboard import (
- load_world_bank_dashboard_with_slices,
- load_world_bank_data,
+ load_world_bank_dashboard_with_slices, # noqa: F401
+ load_world_bank_data, # noqa: F401
)
diff --git a/tests/integration_tests/embedded/test_view.py b/tests/integration_tests/embedded/test_view.py
index 1b3248883..7fcfcdba9 100644
--- a/tests/integration_tests/embedded/test_view.py
+++ b/tests/integration_tests/embedded/test_view.py
@@ -25,10 +25,10 @@ from superset import db
from superset.daos.dashboard import EmbeddedDashboardDAO
from superset.models.dashboard import Dashboard
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
-from tests.integration_tests.fixtures.client import client
+from tests.integration_tests.fixtures.client import client # noqa: F401
if TYPE_CHECKING:
from typing import Any
@@ -41,7 +41,7 @@ if TYPE_CHECKING:
"superset.extensions.feature_flag_manager._feature_flags",
EMBEDDED_SUPERSET=True,
)
-def test_get_embedded_dashboard(client: FlaskClient[Any]):
+def test_get_embedded_dashboard(client: FlaskClient[Any]): # noqa: F811
dash = db.session.query(Dashboard).filter_by(slug="births").first()
embedded = EmbeddedDashboardDAO.upsert(dash, [])
uri = f"embedded/{embedded.uuid}"
@@ -54,7 +54,7 @@ def test_get_embedded_dashboard(client: FlaskClient[Any]):
"superset.extensions.feature_flag_manager._feature_flags",
EMBEDDED_SUPERSET=True,
)
-def test_get_embedded_dashboard_referrer_not_allowed(client: FlaskClient[Any]):
+def test_get_embedded_dashboard_referrer_not_allowed(client: FlaskClient[Any]): # noqa: F811
dash = db.session.query(Dashboard).filter_by(slug="births").first()
embedded = EmbeddedDashboardDAO.upsert(dash, ["test.example.com"])
uri = f"embedded/{embedded.uuid}"
@@ -66,7 +66,7 @@ def test_get_embedded_dashboard_referrer_not_allowed(client: FlaskClient[Any]):
"superset.extensions.feature_flag_manager._feature_flags",
EMBEDDED_SUPERSET=True,
)
-def test_get_embedded_dashboard_non_found(client: FlaskClient[Any]):
- uri = f"embedded/bad-uuid"
+def test_get_embedded_dashboard_non_found(client: FlaskClient[Any]): # noqa: F811
+ uri = "embedded/bad-uuid" # noqa: F541
response = client.get(uri)
assert response.status_code == 404
diff --git a/tests/integration_tests/event_logger_tests.py b/tests/integration_tests/event_logger_tests.py
index 98c3ea922..62a5759da 100644
--- a/tests/integration_tests/event_logger_tests.py
+++ b/tests/integration_tests/event_logger_tests.py
@@ -21,7 +21,7 @@ from datetime import timedelta
from typing import Any, Optional
from unittest.mock import patch
-from flask import current_app
+from flask import current_app # noqa: F401
from freezegun import freeze_time
from superset import security_manager
@@ -231,4 +231,4 @@ class TestEventLogger(unittest.TestCase):
payload_override={"engine": "sqlite"},
)
- assert logger.records[0]["user_id"] == None
+ assert logger.records[0]["user_id"] == None # noqa: E711
diff --git a/tests/integration_tests/explore/api_tests.py b/tests/integration_tests/explore/api_tests.py
index 6d33f1c91..dc410b23f 100644
--- a/tests/integration_tests/explore/api_tests.py
+++ b/tests/integration_tests/explore/api_tests.py
@@ -19,7 +19,7 @@ from unittest.mock import patch
import pytest
from flask_appbuilder.security.sqla.models import User
-from sqlalchemy.orm import Session
+from sqlalchemy.orm import Session # noqa: F401
from superset import db
from superset.commands.explore.form_data.state import TemporaryExploreState
@@ -28,8 +28,8 @@ from superset.explore.exceptions import DatasetAccessDeniedError
from superset.extensions import cache_manager
from superset.models.slice import Slice
from tests.integration_tests.fixtures.world_bank_dashboard import (
- load_world_bank_dashboard_with_slices,
- load_world_bank_data,
+ load_world_bank_dashboard_with_slices, # noqa: F401
+ load_world_bank_data, # noqa: F401
)
from tests.integration_tests.test_app import app
@@ -38,22 +38,22 @@ FORM_DATA = {"test": "test value"}
@pytest.fixture
-def chart_id(load_world_bank_dashboard_with_slices) -> int:
- with app.app_context() as ctx:
+def chart_id(load_world_bank_dashboard_with_slices) -> int: # noqa: F811
+ with app.app_context(): # noqa: F841
chart = db.session.query(Slice).filter_by(slice_name="World's Population").one()
return chart.id
@pytest.fixture
def admin_id() -> int:
- with app.app_context() as ctx:
+ with app.app_context(): # noqa: F841
admin = db.session.query(User).filter_by(username="admin").one()
return admin.id
@pytest.fixture
def dataset() -> int:
- with app.app_context() as ctx:
+ with app.app_context(): # noqa: F841
dataset = (
db.session.query(SqlaTable)
.filter_by(table_name="wb_health_population")
@@ -79,9 +79,9 @@ def assert_dataset(result, dataset_id):
dataset = result["dataset"]
assert dataset["id"] == dataset_id
assert dataset["datasource_name"] == "wb_health_population"
- assert dataset["is_sqllab_view"] == False
+ assert dataset["is_sqllab_view"] is False # noqa: E712
assert dataset["main_dttm_col"] == "year"
- assert dataset["sql"] == None
+ assert dataset["sql"] is None # noqa: E711
assert dataset["type"] == "table"
assert dataset["uid"] == f"{dataset_id}__table"
@@ -90,7 +90,7 @@ def assert_dataset(result, dataset_id):
def assert_slice(result, chart_id, dataset_id):
slice = result["slice"]
assert slice["edit_url"] == f"/chart/edit/{chart_id}"
- assert slice["is_managed_externally"] == False
+ assert slice["is_managed_externally"] is False # noqa: E712
assert slice["slice_id"] == chart_id
assert slice["slice_name"] == "World's Population"
assert slice["form_data"]["datasource"] == f"{dataset_id}__table"
@@ -98,14 +98,14 @@ def assert_slice(result, chart_id, dataset_id):
def test_no_params_provided(test_client, login_as_admin):
- resp = test_client.get(f"api/v1/explore/")
+ resp = test_client.get("api/v1/explore/") # noqa: F541
assert resp.status_code == 200
data = json.loads(resp.data.decode("utf-8"))
result = data.get("result")
assert result["dataset"]["name"] == "[Missing Dataset]"
assert result["form_data"]["datasource"] == "None__table"
- assert result["message"] == None
- assert result["slice"] == None
+ assert result["message"] is None # noqa: E711
+ assert result["slice"] is None # noqa: E711
def test_get_from_cache(test_client, login_as_admin, dataset):
@@ -118,8 +118,8 @@ def test_get_from_cache(test_client, login_as_admin, dataset):
assert_dataset(result, dataset.id)
assert result["form_data"]["datasource"] == f"{dataset.id}__table"
assert result["form_data"]["test"] == "test value"
- assert result["message"] == None
- assert result["slice"] == None
+ assert result["message"] is None # noqa: E711
+ assert result["slice"] is None # noqa: E711
def test_get_from_cache_unknown_key_chart_id(
@@ -155,7 +155,7 @@ def test_get_from_cache_unknown_key_dataset(test_client, login_as_admin, dataset
result["message"]
== "Form data not found in cache, reverting to dataset metadata."
)
- assert result["slice"] == None
+ assert result["slice"] is None # noqa: E711
def test_get_from_cache_unknown_key_no_extra_parameters(test_client, login_as_admin):
@@ -166,8 +166,8 @@ def test_get_from_cache_unknown_key_no_extra_parameters(test_client, login_as_ad
result = data.get("result")
assert result["dataset"]["name"] == "[Missing Dataset]"
assert result["form_data"]["datasource"] == "None__table"
- assert result["message"] == None
- assert result["slice"] == None
+ assert result["message"] is None # noqa: E711
+ assert result["slice"] is None # noqa: E711
def test_get_from_permalink(test_client, login_as_admin, chart_id, dataset):
@@ -176,7 +176,7 @@ def test_get_from_permalink(test_client, login_as_admin, chart_id, dataset):
"datasource": f"{dataset.id}__{dataset.type}",
**FORM_DATA,
}
- resp = test_client.post(f"api/v1/explore/permalink", json={"formData": form_data})
+ resp = test_client.post("api/v1/explore/permalink", json={"formData": form_data}) # noqa: F541
data = json.loads(resp.data.decode("utf-8"))
permalink_key = data["key"]
resp = test_client.get(f"api/v1/explore/?permalink_key={permalink_key}")
@@ -186,8 +186,8 @@ def test_get_from_permalink(test_client, login_as_admin, chart_id, dataset):
assert_dataset(result, dataset.id)
assert result["form_data"]["datasource"] == f"{dataset.id}__table"
assert result["form_data"]["test"] == "test value"
- assert result["message"] == None
- assert result["slice"] == None
+ assert result["message"] is None # noqa: E711
+ assert result["slice"] is None # noqa: E711
def test_get_from_permalink_unknown_key(test_client, login_as_admin):
diff --git a/tests/integration_tests/explore/form_data/api_tests.py b/tests/integration_tests/explore/form_data/api_tests.py
index 9187e4621..82bd7854b 100644
--- a/tests/integration_tests/explore/form_data/api_tests.py
+++ b/tests/integration_tests/explore/form_data/api_tests.py
@@ -19,7 +19,7 @@ from unittest.mock import patch
import pytest
from flask_appbuilder.security.sqla.models import User
-from sqlalchemy.orm import Session
+from sqlalchemy.orm import Session # noqa: F401
from superset import db
from superset.commands.dataset.exceptions import DatasetAccessDeniedError
@@ -29,8 +29,8 @@ from superset.extensions import cache_manager
from superset.models.slice import Slice
from superset.utils.core import DatasourceType
from tests.integration_tests.fixtures.world_bank_dashboard import (
- load_world_bank_dashboard_with_slices,
- load_world_bank_data,
+ load_world_bank_dashboard_with_slices, # noqa: F401
+ load_world_bank_data, # noqa: F401
)
from tests.integration_tests.test_app import app
@@ -40,22 +40,22 @@ UPDATED_FORM_DATA = json.dumps({"test": "updated value"})
@pytest.fixture
-def chart_id(load_world_bank_dashboard_with_slices) -> int:
- with app.app_context() as ctx:
+def chart_id(load_world_bank_dashboard_with_slices) -> int: # noqa: F811
+ with app.app_context() as ctx: # noqa: F841
chart = db.session.query(Slice).filter_by(slice_name="World's Population").one()
return chart.id
@pytest.fixture
def admin_id() -> int:
- with app.app_context() as ctx:
+ with app.app_context() as ctx: # noqa: F841
admin = db.session.query(User).filter_by(username="admin").one()
return admin.id
@pytest.fixture
def datasource() -> int:
- with app.app_context() as ctx:
+ with app.app_context() as ctx: # noqa: F841
dataset = (
db.session.query(SqlaTable)
.filter_by(table_name="wb_health_population")
@@ -351,7 +351,7 @@ def test_put_not_owner(test_client, login_as, chart_id: int, datasource: SqlaTab
def test_get_key_not_found(test_client, login_as_admin):
- resp = test_client.get(f"api/v1/explore/form_data/unknown-key")
+ resp = test_client.get(f"api/v1/explore/form_data/unknown-key") # noqa: F541
assert resp.status_code == 404
diff --git a/tests/integration_tests/explore/form_data/commands_tests.py b/tests/integration_tests/explore/form_data/commands_tests.py
index 293a2c556..b9ad73301 100644
--- a/tests/integration_tests/explore/form_data/commands_tests.py
+++ b/tests/integration_tests/explore/form_data/commands_tests.py
@@ -20,7 +20,7 @@ from unittest.mock import patch
import pytest
-from superset import app, db, security, security_manager
+from superset import app, db, security_manager
from superset.commands.exceptions import DatasourceTypeInvalidError
from superset.commands.explore.form_data.create import CreateFormDataCommand
from superset.commands.explore.form_data.delete import DeleteFormDataCommand
@@ -326,7 +326,7 @@ class TestCreateFormDataCommand(SupersetTestCase):
delete_command = DeleteFormDataCommand(delete_args)
response = delete_command.run()
- assert response == True
+ assert response is True # noqa: E712
@patch("superset.security.manager.g")
@pytest.mark.usefixtures("create_dataset", "create_slice", "create_query")
@@ -343,4 +343,4 @@ class TestCreateFormDataCommand(SupersetTestCase):
delete_command = DeleteFormDataCommand(delete_args)
response = delete_command.run()
- assert response == False
+ assert response is False # noqa: E712
diff --git a/tests/integration_tests/explore/permalink/api_tests.py b/tests/integration_tests/explore/permalink/api_tests.py
index a171504cc..004061989 100644
--- a/tests/integration_tests/explore/permalink/api_tests.py
+++ b/tests/integration_tests/explore/permalink/api_tests.py
@@ -20,7 +20,7 @@ from typing import Any
from uuid import uuid3
import pytest
-from sqlalchemy.orm import Session
+from sqlalchemy.orm import Session # noqa: F401
from superset import db
from superset.explore.permalink.schemas import ExplorePermalinkSchema
@@ -30,14 +30,14 @@ from superset.key_value.utils import decode_permalink_id, encode_permalink_key
from superset.models.slice import Slice
from superset.utils.core import DatasourceType
from tests.integration_tests.fixtures.world_bank_dashboard import (
- load_world_bank_dashboard_with_slices,
- load_world_bank_data,
+ load_world_bank_dashboard_with_slices, # noqa: F401
+ load_world_bank_data, # noqa: F401
)
-from tests.integration_tests.test_app import app
+from tests.integration_tests.test_app import app # noqa: F401
@pytest.fixture
-def chart(app_context, load_world_bank_dashboard_with_slices) -> Slice:
+def chart(app_context, load_world_bank_dashboard_with_slices) -> Slice: # noqa: F811
chart = db.session.query(Slice).filter_by(slice_name="World's Population").one()
return chart
@@ -70,7 +70,7 @@ def permalink_salt() -> Iterator[str]:
def test_post(
form_data: dict[str, Any], permalink_salt: str, test_client, login_as_admin
):
- resp = test_client.post(f"api/v1/explore/permalink", json={"formData": form_data})
+ resp = test_client.post(f"api/v1/explore/permalink", json={"formData": form_data}) # noqa: F541
assert resp.status_code == 201
data = json.loads(resp.data.decode("utf-8"))
key = data["key"]
@@ -83,7 +83,7 @@ def test_post(
def test_post_access_denied(form_data, test_client, login_as):
login_as("gamma")
- resp = test_client.post(f"api/v1/explore/permalink", json={"formData": form_data})
+ resp = test_client.post(f"api/v1/explore/permalink", json={"formData": form_data}) # noqa: F541
assert resp.status_code == 403
@@ -120,14 +120,14 @@ def test_get_missing_chart(
def test_post_invalid_schema(test_client, login_as_admin) -> None:
- resp = test_client.post(f"api/v1/explore/permalink", json={"abc": 123})
+ resp = test_client.post(f"api/v1/explore/permalink", json={"abc": 123}) # noqa: F541
assert resp.status_code == 400
def test_get(
form_data: dict[str, Any], permalink_salt: str, test_client, login_as_admin
) -> None:
- resp = test_client.post(f"api/v1/explore/permalink", json={"formData": form_data})
+ resp = test_client.post(f"api/v1/explore/permalink", json={"formData": form_data}) # noqa: F541
data = json.loads(resp.data.decode("utf-8"))
key = data["key"]
resp = test_client.get(f"api/v1/explore/permalink/{key}")
diff --git a/tests/integration_tests/explore/permalink/commands_tests.py b/tests/integration_tests/explore/permalink/commands_tests.py
index f499591aa..4993e33f1 100644
--- a/tests/integration_tests/explore/permalink/commands_tests.py
+++ b/tests/integration_tests/explore/permalink/commands_tests.py
@@ -19,13 +19,10 @@ from unittest.mock import patch
import pytest
-from superset import app, db, security, security_manager
-from superset.commands.exceptions import DatasourceTypeInvalidError
-from superset.commands.explore.form_data.parameters import CommandParameters
+from superset import app, db, security_manager
from superset.commands.explore.permalink.create import CreateExplorePermalinkCommand
from superset.commands.explore.permalink.get import GetExplorePermalinkCommand
from superset.connectors.sqla.models import SqlaTable
-from superset.key_value.utils import decode_permalink_id
from superset.models.slice import Slice
from superset.models.sql_lab import Query
from superset.utils.core import DatasourceType, get_example_default_schema
diff --git a/tests/integration_tests/extensions/metastore_cache_test.py b/tests/integration_tests/extensions/metastore_cache_test.py
index eeb7279fe..2909e978f 100644
--- a/tests/integration_tests/extensions/metastore_cache_test.py
+++ b/tests/integration_tests/extensions/metastore_cache_test.py
@@ -65,7 +65,7 @@ def test_caching_flow(app_context: AppContext, cache: SupersetMetastoreCache) ->
assert cache.get(SECOND_KEY) == SECOND_VALUE
assert cache.add(FIRST_KEY, FIRST_KEY_UPDATED_VALUE) is False
assert cache.get(FIRST_KEY) == FIRST_KEY_INITIAL_VALUE
- assert cache.set(FIRST_KEY, FIRST_KEY_UPDATED_VALUE) == True
+ assert cache.set(FIRST_KEY, FIRST_KEY_UPDATED_VALUE) is True # noqa: E712
assert cache.get(FIRST_KEY) == FIRST_KEY_UPDATED_VALUE
cache.delete(FIRST_KEY)
assert cache.has(FIRST_KEY) is False
diff --git a/tests/integration_tests/fixtures/__init__.py b/tests/integration_tests/fixtures/__init__.py
index 0e06be352..b32b68731 100644
--- a/tests/integration_tests/fixtures/__init__.py
+++ b/tests/integration_tests/fixtures/__init__.py
@@ -16,16 +16,16 @@
# under the License.
from .birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_dashboard_with_slices_module_scope,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_dashboard_with_slices_module_scope, # noqa: F401
)
-from .energy_dashboard import load_energy_table_data, load_energy_table_with_slice
-from .public_role import public_role_like_gamma, public_role_like_test_role
+from .energy_dashboard import load_energy_table_data, load_energy_table_with_slice # noqa: F401
+from .public_role import public_role_like_gamma, public_role_like_test_role # noqa: F401
from .unicode_dashboard import (
- load_unicode_dashboard_with_position,
- load_unicode_dashboard_with_slice,
+ load_unicode_dashboard_with_position, # noqa: F401
+ load_unicode_dashboard_with_slice, # noqa: F401
)
from .world_bank_dashboard import (
- load_world_bank_dashboard_with_slices,
- load_world_bank_dashboard_with_slices_module_scope,
+ load_world_bank_dashboard_with_slices, # noqa: F401
+ load_world_bank_dashboard_with_slices_module_scope, # noqa: F401
)
diff --git a/tests/integration_tests/fixtures/datasource.py b/tests/integration_tests/fixtures/datasource.py
index fc0b73bde..e91ac6727 100644
--- a/tests/integration_tests/fixtures/datasource.py
+++ b/tests/integration_tests/fixtures/datasource.py
@@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
"""Fixtures for test_datasource.py"""
+
from collections.abc import Generator
from typing import Any
@@ -22,13 +23,13 @@ import pytest
from sqlalchemy import Column, create_engine, Date, Integer, MetaData, String, Table
from sqlalchemy.ext.declarative import declarative_base
-from superset.columns.models import Column as Sl_Column
+from superset.columns.models import Column as Sl_Column # noqa: F401
from superset.connectors.sqla.models import SqlaTable, TableColumn
from superset.extensions import db
from superset.models.core import Database
-from superset.tables.models import Table as Sl_Table
+from superset.tables.models import Table as Sl_Table # noqa: F401
from superset.utils.core import get_example_default_schema
-from superset.utils.database import get_example_database
+from superset.utils.database import get_example_database # noqa: F401
from tests.integration_tests.test_app import app
diff --git a/tests/integration_tests/fixtures/users.py b/tests/integration_tests/fixtures/users.py
index 6cc228d51..dd3730df9 100644
--- a/tests/integration_tests/fixtures/users.py
+++ b/tests/integration_tests/fixtures/users.py
@@ -20,7 +20,6 @@ from flask_appbuilder.security.sqla.models import Role, User
from superset import db, security_manager
from tests.integration_tests.constants import GAMMA_SQLLAB_NO_DATA_USERNAME
-from tests.integration_tests.test_app import app
@pytest.fixture()
diff --git a/tests/integration_tests/import_export_tests.py b/tests/integration_tests/import_export_tests.py
index 95c6afad9..8f7352fac 100644
--- a/tests/integration_tests/import_export_tests.py
+++ b/tests/integration_tests/import_export_tests.py
@@ -16,11 +16,12 @@
# under the License.
# isort:skip_file
"""Unit tests for Superset"""
+
import json
import unittest
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
import pytest
@@ -28,8 +29,8 @@ from flask import g
from sqlalchemy.orm.session import make_transient
from tests.integration_tests.fixtures.energy_dashboard import (
- load_energy_table_with_slice,
- load_energy_table_data,
+ load_energy_table_with_slice, # noqa: F401
+ load_energy_table_data, # noqa: F401
)
from tests.integration_tests.test_app import app
from superset.commands.dashboard.importers.v0 import decode_dashboards
@@ -46,8 +47,8 @@ from superset.utils.database import get_example_database
from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.constants import ADMIN_USERNAME
from tests.integration_tests.fixtures.world_bank_dashboard import (
- load_world_bank_dashboard_with_slices,
- load_world_bank_data,
+ load_world_bank_dashboard_with_slices, # noqa: F401
+ load_world_bank_data, # noqa: F401
)
@@ -369,9 +370,7 @@ class TestImportExport(SupersetTestCase):
}}
}}
}}
- """.format(
- slc.id
- )
+ """.format(slc.id)
imported_dash_id = import_dashboard(dash_with_1_slice, import_time=1990)
imported_dash = self.get_dash(imported_dash_id)
@@ -577,9 +576,7 @@ class TestImportExport(SupersetTestCase):
}}
}}
}}
- """.format(
- slc.id
- )
+ """.format(slc.id)
return dash_with_1_slice
def test_import_table_no_metadata(self):
diff --git a/tests/integration_tests/key_value/commands/create_test.py b/tests/integration_tests/key_value/commands/create_test.py
index 494456fa0..c16a9da47 100644
--- a/tests/integration_tests/key_value/commands/create_test.py
+++ b/tests/integration_tests/key_value/commands/create_test.py
@@ -27,7 +27,7 @@ from superset.extensions import db
from superset.key_value.exceptions import KeyValueCreateFailedError
from superset.utils.core import override_user
from tests.integration_tests.key_value.commands.fixtures import (
- admin,
+ admin, # noqa: F401
JSON_CODEC,
JSON_VALUE,
PICKLE_CODEC,
@@ -36,7 +36,7 @@ from tests.integration_tests.key_value.commands.fixtures import (
)
-def test_create_id_entry(app_context: AppContext, admin: User) -> None:
+def test_create_id_entry(app_context: AppContext, admin: User) -> None: # noqa: F811
from superset.commands.key_value.create import CreateKeyValueCommand
from superset.key_value.models import KeyValueEntry
@@ -53,7 +53,7 @@ def test_create_id_entry(app_context: AppContext, admin: User) -> None:
db.session.commit()
-def test_create_uuid_entry(app_context: AppContext, admin: User) -> None:
+def test_create_uuid_entry(app_context: AppContext, admin: User) -> None: # noqa: F811
from superset.commands.key_value.create import CreateKeyValueCommand
from superset.key_value.models import KeyValueEntry
@@ -68,7 +68,7 @@ def test_create_uuid_entry(app_context: AppContext, admin: User) -> None:
db.session.commit()
-def test_create_fail_json_entry(app_context: AppContext, admin: User) -> None:
+def test_create_fail_json_entry(app_context: AppContext, admin: User) -> None: # noqa: F811
from superset.commands.key_value.create import CreateKeyValueCommand
with pytest.raises(KeyValueCreateFailedError):
@@ -79,7 +79,7 @@ def test_create_fail_json_entry(app_context: AppContext, admin: User) -> None:
).run()
-def test_create_pickle_entry(app_context: AppContext, admin: User) -> None:
+def test_create_pickle_entry(app_context: AppContext, admin: User) -> None: # noqa: F811
from superset.commands.key_value.create import CreateKeyValueCommand
from superset.key_value.models import KeyValueEntry
diff --git a/tests/integration_tests/key_value/commands/delete_test.py b/tests/integration_tests/key_value/commands/delete_test.py
index 706aab888..141547e56 100644
--- a/tests/integration_tests/key_value/commands/delete_test.py
+++ b/tests/integration_tests/key_value/commands/delete_test.py
@@ -26,7 +26,7 @@ from flask_appbuilder.security.sqla.models import User
from superset.extensions import db
from tests.integration_tests.key_value.commands.fixtures import (
- admin,
+ admin, # noqa: F401
JSON_VALUE,
RESOURCE,
)
@@ -55,7 +55,7 @@ def key_value_entry() -> KeyValueEntry:
def test_delete_id_entry(
app_context: AppContext,
- admin: User,
+ admin: User, # noqa: F811
key_value_entry: KeyValueEntry,
) -> None:
from superset.commands.key_value.delete import DeleteKeyValueCommand
@@ -65,7 +65,7 @@ def test_delete_id_entry(
def test_delete_uuid_entry(
app_context: AppContext,
- admin: User,
+ admin: User, # noqa: F811
key_value_entry: KeyValueEntry,
) -> None:
from superset.commands.key_value.delete import DeleteKeyValueCommand
@@ -75,7 +75,7 @@ def test_delete_uuid_entry(
def test_delete_entry_missing(
app_context: AppContext,
- admin: User,
+ admin: User, # noqa: F811
key_value_entry: KeyValueEntry,
) -> None:
from superset.commands.key_value.delete import DeleteKeyValueCommand
diff --git a/tests/integration_tests/key_value/commands/fixtures.py b/tests/integration_tests/key_value/commands/fixtures.py
index 6ba09c8a1..31bcaadea 100644
--- a/tests/integration_tests/key_value/commands/fixtures.py
+++ b/tests/integration_tests/key_value/commands/fixtures.py
@@ -24,7 +24,6 @@ from uuid import UUID
import pytest
from flask_appbuilder.security.sqla.models import User
-from sqlalchemy.orm import Session
from superset.extensions import db
from superset.key_value.types import (
@@ -65,6 +64,6 @@ def key_value_entry() -> Generator[KeyValueEntry, None, None]:
@pytest.fixture
def admin() -> User:
- with app.app_context() as ctx:
+ with app.app_context(): # noqa: F841
admin = db.session.query(User).filter_by(username="admin").one()
return admin
diff --git a/tests/integration_tests/key_value/commands/get_test.py b/tests/integration_tests/key_value/commands/get_test.py
index b14c64f75..bf5859dbb 100644
--- a/tests/integration_tests/key_value/commands/get_test.py
+++ b/tests/integration_tests/key_value/commands/get_test.py
@@ -28,7 +28,7 @@ from tests.integration_tests.key_value.commands.fixtures import (
ID_KEY,
JSON_CODEC,
JSON_VALUE,
- key_value_entry,
+ key_value_entry, # noqa: F401
RESOURCE,
UUID_KEY,
)
@@ -37,7 +37,7 @@ if TYPE_CHECKING:
from superset.key_value.models import KeyValueEntry
-def test_get_id_entry(app_context: AppContext, key_value_entry: KeyValueEntry) -> None:
+def test_get_id_entry(app_context: AppContext, key_value_entry: KeyValueEntry) -> None: # noqa: F811
from superset.commands.key_value.get import GetKeyValueCommand
value = GetKeyValueCommand(resource=RESOURCE, key=ID_KEY, codec=JSON_CODEC).run()
@@ -45,7 +45,8 @@ def test_get_id_entry(app_context: AppContext, key_value_entry: KeyValueEntry) -
def test_get_uuid_entry(
- app_context: AppContext, key_value_entry: KeyValueEntry
+ app_context: AppContext,
+ key_value_entry: KeyValueEntry, # noqa: F811
) -> None:
from superset.commands.key_value.get import GetKeyValueCommand
@@ -55,7 +56,7 @@ def test_get_uuid_entry(
def test_get_id_entry_missing(
app_context: AppContext,
- key_value_entry: KeyValueEntry,
+ key_value_entry: KeyValueEntry, # noqa: F811
) -> None:
from superset.commands.key_value.get import GetKeyValueCommand
diff --git a/tests/integration_tests/key_value/commands/update_test.py b/tests/integration_tests/key_value/commands/update_test.py
index 62d118b19..d8922ab2e 100644
--- a/tests/integration_tests/key_value/commands/update_test.py
+++ b/tests/integration_tests/key_value/commands/update_test.py
@@ -25,10 +25,10 @@ from flask_appbuilder.security.sqla.models import User
from superset.extensions import db
from superset.utils.core import override_user
from tests.integration_tests.key_value.commands.fixtures import (
- admin,
+ admin, # noqa: F401
ID_KEY,
JSON_CODEC,
- key_value_entry,
+ key_value_entry, # noqa: F401
RESOURCE,
UUID_KEY,
)
@@ -42,8 +42,8 @@ NEW_VALUE = "new value"
def test_update_id_entry(
app_context: AppContext,
- admin: User,
- key_value_entry: KeyValueEntry,
+ admin: User, # noqa: F811
+ key_value_entry: KeyValueEntry, # noqa: F811
) -> None:
from superset.commands.key_value.update import UpdateKeyValueCommand
from superset.key_value.models import KeyValueEntry
@@ -64,8 +64,8 @@ def test_update_id_entry(
def test_update_uuid_entry(
app_context: AppContext,
- admin: User,
- key_value_entry: KeyValueEntry,
+ admin: User, # noqa: F811
+ key_value_entry: KeyValueEntry, # noqa: F811
) -> None:
from superset.commands.key_value.update import UpdateKeyValueCommand
from superset.key_value.models import KeyValueEntry
@@ -84,7 +84,7 @@ def test_update_uuid_entry(
assert entry.changed_by_fk == admin.id
-def test_update_missing_entry(app_context: AppContext, admin: User) -> None:
+def test_update_missing_entry(app_context: AppContext, admin: User) -> None: # noqa: F811
from superset.commands.key_value.update import UpdateKeyValueCommand
with override_user(admin):
diff --git a/tests/integration_tests/key_value/commands/upsert_test.py b/tests/integration_tests/key_value/commands/upsert_test.py
index b23ddaee9..a652c2ce7 100644
--- a/tests/integration_tests/key_value/commands/upsert_test.py
+++ b/tests/integration_tests/key_value/commands/upsert_test.py
@@ -25,10 +25,10 @@ from flask_appbuilder.security.sqla.models import User
from superset.extensions import db
from superset.utils.core import override_user
from tests.integration_tests.key_value.commands.fixtures import (
- admin,
+ admin, # noqa: F401
ID_KEY,
JSON_CODEC,
- key_value_entry,
+ key_value_entry, # noqa: F401
RESOURCE,
UUID_KEY,
)
@@ -42,8 +42,8 @@ NEW_VALUE = "new value"
def test_upsert_id_entry(
app_context: AppContext,
- admin: User,
- key_value_entry: KeyValueEntry,
+ admin: User, # noqa: F811
+ key_value_entry: KeyValueEntry, # noqa: F811
) -> None:
from superset.commands.key_value.upsert import UpsertKeyValueCommand
from superset.key_value.models import KeyValueEntry
@@ -64,8 +64,8 @@ def test_upsert_id_entry(
def test_upsert_uuid_entry(
app_context: AppContext,
- admin: User,
- key_value_entry: KeyValueEntry,
+ admin: User, # noqa: F811
+ key_value_entry: KeyValueEntry, # noqa: F811
) -> None:
from superset.commands.key_value.upsert import UpsertKeyValueCommand
from superset.key_value.models import KeyValueEntry
@@ -84,7 +84,7 @@ def test_upsert_uuid_entry(
assert entry.changed_by_fk == admin.id
-def test_upsert_missing_entry(app_context: AppContext, admin: User) -> None:
+def test_upsert_missing_entry(app_context: AppContext, admin: User) -> None: # noqa: F811
from superset.commands.key_value.upsert import UpsertKeyValueCommand
from superset.key_value.models import KeyValueEntry
diff --git a/tests/integration_tests/log_api_tests.py b/tests/integration_tests/log_api_tests.py
index f09423262..cd55b6270 100644
--- a/tests/integration_tests/log_api_tests.py
+++ b/tests/integration_tests/log_api_tests.py
@@ -16,6 +16,7 @@
# under the License.
# isort:skip_file
"""Unit tests for Superset"""
+
from datetime import datetime, timedelta
import json
from typing import Optional
@@ -29,14 +30,14 @@ from superset import db
from superset.models.core import Log
from superset.views.log.api import LogRestApi
from tests.integration_tests.base_tests import SupersetTestCase
-from tests.integration_tests.conftest import with_feature_flags
+from tests.integration_tests.conftest import with_feature_flags # noqa: F401
from tests.integration_tests.constants import (
ADMIN_USERNAME,
ALPHA_USERNAME,
GAMMA_USERNAME,
)
from tests.integration_tests.dashboard_utils import create_dashboard
-from tests.integration_tests.test_app import app
+from tests.integration_tests.test_app import app # noqa: F401
EXPECTED_COLUMNS = [
"action",
@@ -173,7 +174,7 @@ class TestLogApi(SupersetTestCase):
log1 = self.insert_log("dashboard", admin_user, dashboard_id=dash.id)
log2 = self.insert_log("dashboard", admin_user, dashboard_id=dash.id)
- uri = f"api/v1/log/recent_activity/"
+ uri = f"api/v1/log/recent_activity/" # noqa: F541
rv = self.client.get(uri)
self.assertEqual(rv.status_code, 200)
response = json.loads(rv.data.decode("utf-8"))
diff --git a/tests/integration_tests/migrations/0769ef90fddd_fix_schema_perm_for_datasets__tests.py b/tests/integration_tests/migrations/0769ef90fddd_fix_schema_perm_for_datasets__tests.py
index 1b74a7f04..1e15e8711 100644
--- a/tests/integration_tests/migrations/0769ef90fddd_fix_schema_perm_for_datasets__tests.py
+++ b/tests/integration_tests/migrations/0769ef90fddd_fix_schema_perm_for_datasets__tests.py
@@ -23,8 +23,8 @@ from superset.connectors.sqla.models import SqlaTable
from superset.models.slice import Slice
from superset.utils.core import backend, get_example_default_schema
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
migration_module = import_module(
diff --git a/tests/integration_tests/migrations/7e67aecbf3f1_chart_ds_constraint__tests.py b/tests/integration_tests/migrations/7e67aecbf3f1_chart_ds_constraint__tests.py
index a30741c0a..320c3bc21 100644
--- a/tests/integration_tests/migrations/7e67aecbf3f1_chart_ds_constraint__tests.py
+++ b/tests/integration_tests/migrations/7e67aecbf3f1_chart_ds_constraint__tests.py
@@ -53,4 +53,4 @@ def test_upgrade():
def test_upgrade_bad_json():
slc = Slice(datasource_type="query", params=json.dumps(sample_params))
- assert None == upgrade_slice(slc)
+ assert None is upgrade_slice(slc) # noqa: E711
diff --git a/tests/integration_tests/migrations/87d38ad83218_migrate_can_view_and_drill_permission__tests.py b/tests/integration_tests/migrations/87d38ad83218_migrate_can_view_and_drill_permission__tests.py
index a2ccd5948..789b80db1 100644
--- a/tests/integration_tests/migrations/87d38ad83218_migrate_can_view_and_drill_permission__tests.py
+++ b/tests/integration_tests/migrations/87d38ad83218_migrate_can_view_and_drill_permission__tests.py
@@ -25,7 +25,6 @@ from superset.migrations.shared.security_converge import (
PermissionView,
ViewMenu,
)
-from tests.integration_tests.test_app import app
migration_module = import_module(
"superset.migrations.versions."
diff --git a/tests/integration_tests/migrations/ad07e4fdbaba_rm_time_range_endpoints_from_qc_3__test.py b/tests/integration_tests/migrations/ad07e4fdbaba_rm_time_range_endpoints_from_qc_3__test.py
index aa0679670..7d95b65de 100644
--- a/tests/integration_tests/migrations/ad07e4fdbaba_rm_time_range_endpoints_from_qc_3__test.py
+++ b/tests/integration_tests/migrations/ad07e4fdbaba_rm_time_range_endpoints_from_qc_3__test.py
@@ -133,4 +133,4 @@ def test_upgrade():
def test_upgrade_bad_json():
slc = Slice(slice_name="FOO", query_context="abc")
- assert None == upgrade_slice(slc)
+ assert None is upgrade_slice(slc) # noqa: E711
diff --git a/tests/integration_tests/model_tests.py b/tests/integration_tests/model_tests.py
index b9cbd9332..7c3bc15c3 100644
--- a/tests/integration_tests/model_tests.py
+++ b/tests/integration_tests/model_tests.py
@@ -21,21 +21,21 @@ import unittest
from unittest import mock
from superset import security_manager
-from superset.connectors.sqla.models import SqlaTable
+from superset.connectors.sqla.models import SqlaTable # noqa: F401
from superset.exceptions import SupersetException
from superset.utils.core import override_user
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
import pytest
from sqlalchemy.engine.url import make_url
-from sqlalchemy.types import DateTime
+from sqlalchemy.types import DateTime # noqa: F401
-import tests.integration_tests.test_app
+import tests.integration_tests.test_app # noqa: F401
from superset import app, db as metadata_db
-from superset.db_engine_specs.postgres import PostgresEngineSpec
+from superset.db_engine_specs.postgres import PostgresEngineSpec # noqa: F401
from superset.common.db_query_status import QueryStatus
from superset.models.core import Database
from superset.models.slice import Slice
@@ -43,8 +43,8 @@ from superset.utils.database import get_example_database
from .base_tests import SupersetTestCase
from .fixtures.energy_dashboard import (
- load_energy_table_with_slice,
- load_energy_table_data,
+ load_energy_table_with_slice, # noqa: F401
+ load_energy_table_data, # noqa: F401
)
diff --git a/tests/integration_tests/queries/api_tests.py b/tests/integration_tests/queries/api_tests.py
index 1987d2097..e38c48943 100644
--- a/tests/integration_tests/queries/api_tests.py
+++ b/tests/integration_tests/queries/api_tests.py
@@ -16,6 +16,7 @@
# under the License.
# isort:skip_file
"""Unit tests for Superset"""
+
from datetime import datetime, timedelta
from unittest import mock
import json
@@ -26,7 +27,7 @@ import pytest
import prison
from sqlalchemy.sql import func
-import tests.integration_tests.test_app
+import tests.integration_tests.test_app # noqa: F401
from superset import db, security_manager
from superset.common.db_query_status import QueryStatus
from superset.models.core import Database
diff --git a/tests/integration_tests/queries/saved_queries/api_tests.py b/tests/integration_tests/queries/saved_queries/api_tests.py
index 6b75f6790..6e5ed9abb 100644
--- a/tests/integration_tests/queries/saved_queries/api_tests.py
+++ b/tests/integration_tests/queries/saved_queries/api_tests.py
@@ -16,6 +16,7 @@
# under the License.
# isort:skip_file
"""Unit tests for Superset"""
+
import json
from datetime import datetime
from io import BytesIO
@@ -28,7 +29,6 @@ import prison
from freezegun import freeze_time
from sqlalchemy.sql import func, and_
-import tests.integration_tests.test_app
from superset import db
from superset.models.core import Database
from superset.models.core import FavStar
@@ -134,7 +134,7 @@ class TestSavedQueryApi(SupersetTestCase):
)
self.login(ADMIN_USERNAME)
- uri = f"api/v1/saved_query/"
+ uri = "api/v1/saved_query/"
rv = self.get_assert_metric(uri, "get_list")
assert rv.status_code == 200
data = json.loads(rv.data.decode("utf-8"))
@@ -166,7 +166,7 @@ class TestSavedQueryApi(SupersetTestCase):
)
self.login(user.username)
- uri = f"api/v1/saved_query/"
+ uri = "api/v1/saved_query/"
rv = self.get_assert_metric(uri, "get_list")
assert rv.status_code == 200
data = json.loads(rv.data.decode("utf-8"))
@@ -453,7 +453,7 @@ class TestSavedQueryApi(SupersetTestCase):
],
}
- uri = f"api/v1/saved_query/related/database"
+ uri = "api/v1/saved_query/related/database"
rv = self.client.get(uri)
assert rv.status_code == 200
data = json.loads(rv.data.decode("utf-8"))
@@ -464,7 +464,7 @@ class TestSavedQueryApi(SupersetTestCase):
SavedQuery API: Test related user not found
"""
self.login(ADMIN_USERNAME)
- uri = f"api/v1/saved_query/related/user"
+ uri = "api/v1/saved_query/related/user"
rv = self.client.get(uri)
assert rv.status_code == 404
@@ -479,7 +479,7 @@ class TestSavedQueryApi(SupersetTestCase):
)
self.login(ADMIN_USERNAME)
- uri = f"api/v1/saved_query/distinct/schema"
+ uri = "api/v1/saved_query/distinct/schema"
rv = self.client.get(uri)
assert rv.status_code == 200
data = json.loads(rv.data.decode("utf-8"))
@@ -497,7 +497,7 @@ class TestSavedQueryApi(SupersetTestCase):
SavedQuery API: Test related user not allowed
"""
self.login(ADMIN_USERNAME)
- uri = f"api/v1/saved_query/wrong"
+ uri = "api/v1/saved_query/wrong"
rv = self.client.get(uri)
assert rv.status_code == 405
@@ -554,7 +554,7 @@ class TestSavedQueryApi(SupersetTestCase):
"""
Saved Query API: Test create
"""
- admin = self.get_user("admin")
+ self.get_user("admin") # noqa: F841
example_db = get_example_database()
post_data = {
@@ -566,7 +566,7 @@ class TestSavedQueryApi(SupersetTestCase):
}
self.login(ADMIN_USERNAME)
- uri = f"api/v1/saved_query/"
+ uri = "api/v1/saved_query/"
rv = self.client.post(uri, json=post_data)
data = json.loads(rv.data.decode("utf-8"))
assert rv.status_code == 201
diff --git a/tests/integration_tests/query_context_tests.py b/tests/integration_tests/query_context_tests.py
index 6ad3c30cd..3a14c37a4 100644
--- a/tests/integration_tests/query_context_tests.py
+++ b/tests/integration_tests/query_context_tests.py
@@ -44,8 +44,8 @@ from superset.utils.pandas_postprocessing.utils import FLAT_COLUMN_SEPARATOR
from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.conftest import only_postgresql, only_sqlite
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
from tests.integration_tests.fixtures.query_context import get_query_context
diff --git a/tests/integration_tests/reports/api_tests.py b/tests/integration_tests/reports/api_tests.py
index a58cb9ca0..1e1d91f77 100644
--- a/tests/integration_tests/reports/api_tests.py
+++ b/tests/integration_tests/reports/api_tests.py
@@ -16,6 +16,7 @@
# under the License.
# isort:skip_file
"""Unit tests for Superset"""
+
from datetime import datetime
import json
@@ -44,8 +45,8 @@ from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.conftest import with_feature_flags
from tests.integration_tests.constants import ADMIN_USERNAME, GAMMA_USERNAME
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
from tests.integration_tests.reports.utils import insert_report_schedule
@@ -311,7 +312,7 @@ class TestReportSchedulesApi(SupersetTestCase):
ReportSchedule API: Test info
"""
self.login(ADMIN_USERNAME)
- uri = f"api/v1/report/_info"
+ uri = "api/v1/report/_info" # noqa: F541
rv = self.get_assert_metric(uri, "info")
assert rv.status_code == 200
@@ -346,7 +347,7 @@ class TestReportSchedulesApi(SupersetTestCase):
ReportSchedule Api: Test get list report schedules
"""
self.login(ADMIN_USERNAME)
- uri = f"api/v1/report/"
+ uri = "api/v1/report/" # noqa: F541
rv = self.get_assert_metric(uri, "get_list")
expected_fields = [
@@ -425,7 +426,7 @@ class TestReportSchedulesApi(SupersetTestCase):
ReportSchedule Api: Test get list report schedules for different roles
"""
self.login(username)
- uri = f"api/v1/report/"
+ uri = "api/v1/report/" # noqa: F541
rv = self.get_assert_metric(uri, "get_list")
assert rv.status_code == 200
@@ -437,7 +438,7 @@ class TestReportSchedulesApi(SupersetTestCase):
ReportSchedule Api: Test get list report schedules for regular gamma user
"""
self.login(GAMMA_USERNAME)
- uri = f"api/v1/report/"
+ uri = "api/v1/report/" # noqa: F541
rv = self.client.get(uri)
assert rv.status_code == 403
@@ -915,9 +916,9 @@ class TestReportSchedulesApi(SupersetTestCase):
ReportSchedule Api: Test create report schedule with unsaved chart
"""
self.login(ADMIN_USERNAME)
- chart = db.session.query(Slice).first()
- dashboard = db.session.query(Dashboard).first()
- example_db = get_example_database()
+ db.session.query(Slice).first() # noqa: F841
+ db.session.query(Dashboard).first() # noqa: F841
+ get_example_database() # noqa: F841
report_schedule_data = {
"type": ReportScheduleType.REPORT,
@@ -944,9 +945,9 @@ class TestReportSchedulesApi(SupersetTestCase):
ReportSchedule Api: Test create report schedule with no dashboard id
"""
self.login(ADMIN_USERNAME)
- chart = db.session.query(Slice).first()
- dashboard = db.session.query(Dashboard).first()
- example_db = get_example_database()
+ db.session.query(Slice).first() # noqa: F841
+ db.session.query(Dashboard).first() # noqa: F841
+ get_example_database() # noqa: F841
report_schedule_data = {
"type": ReportScheduleType.REPORT,
"name": "name3",
@@ -972,8 +973,8 @@ class TestReportSchedulesApi(SupersetTestCase):
"""
self.login(ADMIN_USERNAME)
chart = db.session.query(Slice).first()
- dashboard = db.session.query(Dashboard).first()
- example_db = get_example_database()
+ db.session.query(Dashboard).first() # noqa: F841
+ get_example_database() # noqa: F841
report_schedule_data = {
"type": ReportScheduleType.REPORT,
"name": "name4",
@@ -1029,9 +1030,9 @@ class TestReportSchedulesApi(SupersetTestCase):
ReportSchedule Api: Test create multiple reports with the same creation method
"""
self.login(ADMIN_USERNAME)
- chart = db.session.query(Slice).first()
+ db.session.query(Slice).first() # noqa: F841
dashboard = db.session.query(Dashboard).first()
- example_db = get_example_database()
+ get_example_database() # noqa: F841
report_schedule_data = {
"type": ReportScheduleType.REPORT,
"name": "name4",
@@ -1475,7 +1476,7 @@ class TestReportSchedulesApi(SupersetTestCase):
"description": "Updated description",
}
uri = f"api/v1/report/{existing_report.id}"
- rv = self.put_assert_metric(uri, report_schedule_data, "put")
+ self.put_assert_metric(uri, report_schedule_data, "put") # noqa: F841
updated_report = (
db.session.query(ReportSchedule)
.filter(ReportSchedule.name == "name1")
@@ -1498,7 +1499,7 @@ class TestReportSchedulesApi(SupersetTestCase):
"owners": [],
}
uri = f"api/v1/report/{existing_report.id}"
- rv = self.put_assert_metric(uri, report_schedule_data, "put")
+ self.put_assert_metric(uri, report_schedule_data, "put") # noqa: F841
updated_report = (
db.session.query(ReportSchedule)
.filter(ReportSchedule.name == "name1")
@@ -1525,7 +1526,7 @@ class TestReportSchedulesApi(SupersetTestCase):
"owners": [],
}
uri = f"api/v1/report/{existing_report.id}"
- rv = self.put_assert_metric(uri, report_update_data, "put")
+ self.put_assert_metric(uri, report_update_data, "put")
updated_report = (
db.session.query(ReportSchedule)
.filter(ReportSchedule.name == "name1")
@@ -1538,7 +1539,7 @@ class TestReportSchedulesApi(SupersetTestCase):
"owners": [gamma.id],
}
uri = f"api/v1/report/{updated_report.id}"
- rv = self.put_assert_metric(uri, report_update_data, "put")
+ self.put_assert_metric(uri, report_update_data, "put") # noqa: F841
updated_report = (
db.session.query(ReportSchedule)
.filter(ReportSchedule.name == "name1")
diff --git a/tests/integration_tests/reports/commands/create_dashboard_report_tests.py b/tests/integration_tests/reports/commands/create_dashboard_report_tests.py
index a7f3001aa..c00b523e1 100644
--- a/tests/integration_tests/reports/commands/create_dashboard_report_tests.py
+++ b/tests/integration_tests/reports/commands/create_dashboard_report_tests.py
@@ -26,7 +26,7 @@ from superset.reports.models import (
ReportRecipientType,
ReportScheduleType,
)
-from tests.integration_tests.fixtures.tabbed_dashboard import tabbed_dashboard
+from tests.integration_tests.fixtures.tabbed_dashboard import tabbed_dashboard # noqa: F401
DASHBOARD_REPORT_SCHEDULE_DEFAULTS = {
"type": ReportScheduleType.REPORT,
@@ -45,7 +45,7 @@ DASHBOARD_REPORT_SCHEDULE_DEFAULTS = {
@pytest.mark.usefixtures("login_as_admin")
-def test_accept_valid_tab_ids(tabbed_dashboard: Dashboard) -> None:
+def test_accept_valid_tab_ids(tabbed_dashboard: Dashboard) -> None: # noqa: F811
report_schedule = CreateReportScheduleCommand(
{
**DASHBOARD_REPORT_SCHEDULE_DEFAULTS,
@@ -62,7 +62,7 @@ def test_accept_valid_tab_ids(tabbed_dashboard: Dashboard) -> None:
@pytest.mark.usefixtures("login_as_admin")
-def test_raise_exception_for_invalid_tab_ids(tabbed_dashboard: Dashboard) -> None:
+def test_raise_exception_for_invalid_tab_ids(tabbed_dashboard: Dashboard) -> None: # noqa: F811
with pytest.raises(ReportScheduleInvalidError) as exc_info:
CreateReportScheduleCommand(
{
diff --git a/tests/integration_tests/reports/commands/execute_dashboard_report_tests.py b/tests/integration_tests/reports/commands/execute_dashboard_report_tests.py
index 68150a9c3..a416da437 100644
--- a/tests/integration_tests/reports/commands/execute_dashboard_report_tests.py
+++ b/tests/integration_tests/reports/commands/execute_dashboard_report_tests.py
@@ -24,7 +24,7 @@ from superset.commands.dashboard.permalink.create import CreateDashboardPermalin
from superset.commands.report.execute import AsyncExecuteReportScheduleCommand
from superset.models.dashboard import Dashboard
from superset.reports.models import ReportSourceFormat
-from tests.integration_tests.fixtures.tabbed_dashboard import tabbed_dashboard
+from tests.integration_tests.fixtures.tabbed_dashboard import tabbed_dashboard # noqa: F401
from tests.integration_tests.reports.utils import create_dashboard_report
@@ -39,7 +39,7 @@ def test_report_for_dashboard_with_tabs(
create_dashboard_permalink_mock: MagicMock,
dashboard_screenshot_mock: MagicMock,
send_email_smtp_mock: MagicMock,
- tabbed_dashboard: Dashboard,
+ tabbed_dashboard: Dashboard, # noqa: F811
) -> None:
create_dashboard_permalink_mock.return_value = "permalink"
dashboard_screenshot_mock.get_screenshot.return_value = b"test-image"
@@ -77,7 +77,7 @@ def test_report_with_header_data(
create_dashboard_permalink_mock: MagicMock,
dashboard_screenshot_mock: MagicMock,
send_email_smtp_mock: MagicMock,
- tabbed_dashboard: Dashboard,
+ tabbed_dashboard: Dashboard, # noqa: F811
) -> None:
create_dashboard_permalink_mock.return_value = "permalink"
dashboard_screenshot_mock.get_screenshot.return_value = b"test-image"
diff --git a/tests/integration_tests/reports/commands_tests.py b/tests/integration_tests/reports/commands_tests.py
index 6fde3d236..fd271b84d 100644
--- a/tests/integration_tests/reports/commands_tests.py
+++ b/tests/integration_tests/reports/commands_tests.py
@@ -79,12 +79,12 @@ from superset.reports.notifications.exceptions import (
from superset.tasks.types import ExecutorType
from superset.utils.database import get_example_database
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
from tests.integration_tests.fixtures.world_bank_dashboard import (
- load_world_bank_dashboard_with_slices_module_scope,
- load_world_bank_data,
+ load_world_bank_dashboard_with_slices_module_scope, # noqa: F401
+ load_world_bank_data, # noqa: F401
)
from tests.integration_tests.reports.utils import (
cleanup_report_schedule,
@@ -1398,7 +1398,7 @@ def test_alert_limit_is_applied(
create_alert_email_chart.database.db_engine_spec,
"fetch_data",
return_value=None,
- ) as fetch_data_mock:
+ ): # noqa: F841
AsyncExecuteReportScheduleCommand(
TEST_ID, create_alert_email_chart.id, datetime.utcnow()
).run()
@@ -1443,7 +1443,7 @@ def test_email_dashboard_report_fails_uncaught_exception(
and logs with uncaught exception
"""
# setup screenshot mock
- from smtplib import SMTPException
+ from smtplib import SMTPException # noqa: F401
screenshot_mock.return_value = SCREENSHOT_FILE
email_mock.side_effect = Exception("Uncaught exception")
@@ -1594,7 +1594,7 @@ def test_soft_timeout_alert(email_mock, create_alert_email_chart):
TEST_ID, create_alert_email_chart.id, datetime.utcnow()
).run()
- notification_targets = get_target_from_report_schedule(create_alert_email_chart)
+ get_target_from_report_schedule(create_alert_email_chart) # noqa: F841
# Assert the email smtp address, asserts a notification was sent with the error
assert email_mock.call_args[0][0] == DEFAULT_OWNER_EMAIL
@@ -1661,7 +1661,7 @@ def test_soft_timeout_csv(
TEST_ID, create_report_email_chart_with_csv.id, datetime.utcnow()
).run()
- notification_targets = get_target_from_report_schedule(
+ get_target_from_report_schedule( # noqa: F841
create_report_email_chart_with_csv
)
# Assert the email smtp address, asserts a notification was sent with the error
@@ -1701,7 +1701,7 @@ def test_generate_no_csv(
TEST_ID, create_report_email_chart_with_csv.id, datetime.utcnow()
).run()
- notification_targets = get_target_from_report_schedule(
+ get_target_from_report_schedule( # noqa: F841
create_report_email_chart_with_csv
)
# Assert the email smtp address, asserts a notification was sent with the error
@@ -1722,9 +1722,9 @@ def test_fail_screenshot(screenshot_mock, email_mock, create_report_email_chart)
"""
ExecuteReport Command: Test soft timeout on screenshot
"""
- from celery.exceptions import SoftTimeLimitExceeded
+ from celery.exceptions import SoftTimeLimitExceeded # noqa: F401
- from superset.commands.report.exceptions import AlertQueryTimeout
+ from superset.commands.report.exceptions import AlertQueryTimeout # noqa: F401
screenshot_mock.side_effect = Exception("Unexpected error")
with pytest.raises(ReportScheduleScreenshotFailedError):
@@ -1732,7 +1732,7 @@ def test_fail_screenshot(screenshot_mock, email_mock, create_report_email_chart)
TEST_ID, create_report_email_chart.id, datetime.utcnow()
).run()
- notification_targets = get_target_from_report_schedule(create_report_email_chart)
+ get_target_from_report_schedule(create_report_email_chart) # noqa: F841
# Assert the email smtp address, asserts a notification was sent with the error
assert email_mock.call_args[0][0] == DEFAULT_OWNER_EMAIL
diff --git a/tests/integration_tests/reports/utils.py b/tests/integration_tests/reports/utils.py
index 84db93aa5..8a7f1db4a 100644
--- a/tests/integration_tests/reports/utils.py
+++ b/tests/integration_tests/reports/utils.py
@@ -36,7 +36,7 @@ from superset.reports.models import (
ReportState,
)
from superset.utils.core import override_user
-from tests.integration_tests.test_app import app
+from tests.integration_tests.test_app import app # noqa: F401
from tests.integration_tests.utils import read_fixture
TEST_ID = str(uuid4())
diff --git a/tests/integration_tests/result_set_tests.py b/tests/integration_tests/result_set_tests.py
index 3e2b3656c..e58e16f07 100644
--- a/tests/integration_tests/result_set_tests.py
+++ b/tests/integration_tests/result_set_tests.py
@@ -17,7 +17,7 @@
# isort:skip_file
from datetime import datetime
-import tests.integration_tests.test_app
+import tests.integration_tests.test_app # noqa: F401
from superset.dataframe import df_to_records
from superset.db_engine_specs import BaseEngineSpec
from superset.result_set import dedup, SupersetResultSet
diff --git a/tests/integration_tests/security/analytics_db_safety_tests.py b/tests/integration_tests/security/analytics_db_safety_tests.py
index 3b686497a..7e36268e3 100644
--- a/tests/integration_tests/security/analytics_db_safety_tests.py
+++ b/tests/integration_tests/security/analytics_db_safety_tests.py
@@ -21,7 +21,6 @@ from sqlalchemy.engine.url import make_url
from superset.exceptions import SupersetSecurityException
from superset.security.analytics_db_safety import check_sqlalchemy_uri
-from tests.integration_tests.test_app import app
@pytest.mark.parametrize(
diff --git a/tests/integration_tests/security/api_tests.py b/tests/integration_tests/security/api_tests.py
index 9b96d7911..8d8f4334a 100644
--- a/tests/integration_tests/security/api_tests.py
+++ b/tests/integration_tests/security/api_tests.py
@@ -16,6 +16,7 @@
# under the License.
# isort:skip_file
"""Tests for security api methods"""
+
import json
import jwt
@@ -29,8 +30,8 @@ from superset.utils.urls import get_url_host
from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.constants import ADMIN_USERNAME, GAMMA_USERNAME
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
@@ -68,7 +69,7 @@ class TestSecurityCsrfApi(SupersetTestCase):
class TestSecurityGuestTokenApi(SupersetTestCase):
- uri = f"api/v1/security/guest_token/"
+ uri = "api/v1/security/guest_token/" # noqa: F541
def test_post_guest_token_unauthenticated(self):
"""
diff --git a/tests/integration_tests/security/guest_token_security_tests.py b/tests/integration_tests/security/guest_token_security_tests.py
index a6aed7d6f..1ccd6ed59 100644
--- a/tests/integration_tests/security/guest_token_security_tests.py
+++ b/tests/integration_tests/security/guest_token_security_tests.py
@@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
"""Unit tests for Superset"""
+
import json
from unittest.mock import Mock, patch
@@ -26,19 +27,19 @@ from superset.connectors.sqla.models import SqlaTable
from superset.daos.dashboard import EmbeddedDashboardDAO
from superset.exceptions import SupersetSecurityException
from superset.models.dashboard import Dashboard
-from superset.security.guest_token import GuestTokenResourceType
-from superset.sql_parse import Table
+from superset.security.guest_token import GuestTokenResourceType # noqa: F401
+from superset.sql_parse import Table # noqa: F401
from superset.utils.core import get_example_default_schema
from superset.utils.database import get_example_database
from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices_class_scope,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices_class_scope, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
from tests.integration_tests.fixtures.world_bank_dashboard import (
- load_world_bank_dashboard_with_slices,
- load_world_bank_dashboard_with_slices_class_scope,
- load_world_bank_data,
+ load_world_bank_dashboard_with_slices, # noqa: F401
+ load_world_bank_dashboard_with_slices_class_scope, # noqa: F401
+ load_world_bank_data, # noqa: F401
)
diff --git a/tests/integration_tests/security/migrate_roles_tests.py b/tests/integration_tests/security/migrate_roles_tests.py
index 4ab73a713..326509d08 100644
--- a/tests/integration_tests/security/migrate_roles_tests.py
+++ b/tests/integration_tests/security/migrate_roles_tests.py
@@ -15,9 +15,10 @@
# specific language governing permissions and limitations
# under the License.
"""Unit tests for alerting in Superset"""
+
import logging
from contextlib import contextmanager
-from unittest.mock import patch
+from unittest.mock import patch # noqa: F401
import pytest
from flask_appbuilder.security.sqla.models import Role
diff --git a/tests/integration_tests/security/row_level_security_tests.py b/tests/integration_tests/security/row_level_security_tests.py
index 69eaacb9f..48a95c54d 100644
--- a/tests/integration_tests/security/row_level_security_tests.py
+++ b/tests/integration_tests/security/row_level_security_tests.py
@@ -24,29 +24,29 @@ from flask import g
import json
import prison
-from superset import db, security_manager, app
+from superset import db, security_manager, app # noqa: F401
from superset.connectors.sqla.models import RowLevelSecurityFilter, SqlaTable
from superset.security.guest_token import (
GuestTokenResourceType,
GuestUser,
)
-from flask_babel import lazy_gettext as _
+from flask_babel import lazy_gettext as _ # noqa: F401
from flask_appbuilder.models.sqla import filters
from tests.integration_tests.base_tests import SupersetTestCase
-from tests.integration_tests.conftest import with_config
+from tests.integration_tests.conftest import with_config # noqa: F401
from tests.integration_tests.constants import ADMIN_USERNAME
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
from tests.integration_tests.fixtures.energy_dashboard import (
- load_energy_table_with_slice,
- load_energy_table_data,
+ load_energy_table_with_slice, # noqa: F401
+ load_energy_table_data, # noqa: F401
)
from tests.integration_tests.fixtures.unicode_dashboard import (
- UNICODE_TBL_NAME,
- load_unicode_dashboard_with_slice,
- load_unicode_data,
+ UNICODE_TBL_NAME, # noqa: F401
+ load_unicode_dashboard_with_slice, # noqa: F401
+ load_unicode_data, # noqa: F401
)
@@ -472,7 +472,7 @@ class TestRowLevelSecurityUpdateAPI(SupersetTestCase):
"roles": [roles[1].id],
}
rv = self.client.put(f"/api/v1/rowlevelsecurity/{rls.id}", json=payload)
- status_code, data = rv.status_code, json.loads(rv.data.decode("utf-8"))
+ status_code, _data = rv.status_code, json.loads(rv.data.decode("utf-8")) # noqa: F841
self.assertEqual(status_code, 201)
@@ -608,7 +608,7 @@ class TestRowLevelSecurityWithRelatedAPI(SupersetTestCase):
"superset.views.filters.current_app.config",
{"EXTRA_RELATED_QUERY_FILTERS": {"role": _base_filter}},
):
- rv = self.client.get(f"/api/v1/rowlevelsecurity/related/roles")
+ rv = self.client.get("/api/v1/rowlevelsecurity/related/roles") # noqa: F541
assert rv.status_code == 200
response = json.loads(rv.data.decode("utf-8"))
response_roles = [result["text"] for result in response["result"]]
diff --git a/tests/integration_tests/security_tests.py b/tests/integration_tests/security_tests.py
index 02e60a927..e93964316 100644
--- a/tests/integration_tests/security_tests.py
+++ b/tests/integration_tests/security_tests.py
@@ -30,7 +30,7 @@ import pytest
from flask import current_app
from flask_appbuilder.security.sqla.models import Role
-from superset.daos.datasource import DatasourceDAO
+from superset.daos.datasource import DatasourceDAO # noqa: F401
from superset.models.dashboard import Dashboard
from superset import app, appbuilder, db, security_manager, viz
from superset.connectors.sqla.models import SqlaTable
@@ -52,16 +52,16 @@ from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.constants import GAMMA_USERNAME
from tests.integration_tests.conftest import with_feature_flags
from tests.integration_tests.fixtures.public_role import (
- public_role_like_gamma,
- public_role_like_test_role,
+ public_role_like_gamma, # noqa: F401
+ public_role_like_test_role, # noqa: F401
)
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
from tests.integration_tests.fixtures.world_bank_dashboard import (
- load_world_bank_dashboard_with_slices,
- load_world_bank_data,
+ load_world_bank_dashboard_with_slices, # noqa: F401
+ load_world_bank_data, # noqa: F401
)
NEW_SECURITY_CONVERGE_VIEWS = (
@@ -898,12 +898,12 @@ class TestRolePermission(SupersetTestCase):
db.session.query(SqlaTable).filter_by(table_name="tmp_table1").one()
)
self.assertEqual(changed_table1.perm, f"[tmp_db2].[tmp_table1](id:{table1.id})")
- self.assertEqual(changed_table1.schema_perm, f"[tmp_db2].[tmp_schema]")
+ self.assertEqual(changed_table1.schema_perm, f"[tmp_db2].[tmp_schema]") # noqa: F541
# Test Chart permission changed
slice1 = db.session.query(Slice).filter_by(slice_name="tmp_slice1").one()
self.assertEqual(slice1.perm, f"[tmp_db2].[tmp_table1](id:{table1.id})")
- self.assertEqual(slice1.schema_perm, f"[tmp_db2].[tmp_schema]")
+ self.assertEqual(slice1.schema_perm, f"[tmp_db2].[tmp_schema]") # noqa: F541
# cleanup
db.session.delete(slice1)
@@ -956,12 +956,12 @@ class TestRolePermission(SupersetTestCase):
db.session.query(SqlaTable).filter_by(table_name="tmp_table1").one()
)
self.assertEqual(changed_table1.perm, f"[tmp_db1].[tmp_table1](id:{table1.id})")
- self.assertEqual(changed_table1.schema_perm, f"[tmp_db1].[tmp_schema_changed]")
+ self.assertEqual(changed_table1.schema_perm, f"[tmp_db1].[tmp_schema_changed]") # noqa: F541
# Test Chart schema permission changed
slice1 = db.session.query(Slice).filter_by(slice_name="tmp_slice1").one()
self.assertEqual(slice1.perm, f"[tmp_db1].[tmp_table1](id:{table1.id})")
- self.assertEqual(slice1.schema_perm, f"[tmp_db1].[tmp_schema_changed]")
+ self.assertEqual(slice1.schema_perm, f"[tmp_db1].[tmp_schema_changed]") # noqa: F541
# cleanup
db.session.delete(slice1)
@@ -1069,12 +1069,12 @@ class TestRolePermission(SupersetTestCase):
self.assertEqual(
changed_table1.perm, f"[tmp_db2].[tmp_table1_changed](id:{table1.id})"
)
- self.assertEqual(changed_table1.schema_perm, f"[tmp_db2].[tmp_schema]")
+ self.assertEqual(changed_table1.schema_perm, f"[tmp_db2].[tmp_schema]") # noqa: F541
# Test Chart permission changed
slice1 = db.session.query(Slice).filter_by(slice_name="tmp_slice1").one()
self.assertEqual(slice1.perm, f"[tmp_db2].[tmp_table1_changed](id:{table1.id})")
- self.assertEqual(slice1.schema_perm, f"[tmp_db2].[tmp_schema]")
+ self.assertEqual(slice1.schema_perm, f"[tmp_db2].[tmp_schema]") # noqa: F541
# cleanup
db.session.delete(slice1)
@@ -2095,7 +2095,7 @@ class TestGuestTokens(SupersetTestCase):
now = time.time()
user = {"username": "test_guest"}
resources = [{"some": "resource"}]
- aud = get_url_host()
+ aud = get_url_host() # noqa: F841
claims = {
"user": user,
diff --git a/tests/integration_tests/sql_lab/api_tests.py b/tests/integration_tests/sql_lab/api_tests.py
index 92837d42b..0c55dc332 100644
--- a/tests/integration_tests/sql_lab/api_tests.py
+++ b/tests/integration_tests/sql_lab/api_tests.py
@@ -16,6 +16,7 @@
# under the License.
# isort:skip_file
"""Unit tests for Superset"""
+
import datetime
import json
import random
@@ -25,15 +26,15 @@ import io
import pytest
import prison
-from sqlalchemy.sql import func
+from sqlalchemy.sql import func # noqa: F401
from unittest import mock
from flask_appbuilder.security.sqla.models import Role
from tests.integration_tests.test_app import app
from superset import db, sql_lab
from superset.common.db_query_status import QueryStatus
-from superset.models.core import Database
-from superset.utils.database import get_example_database, get_main_database
+from superset.models.core import Database # noqa: F401
+from superset.utils.database import get_example_database, get_main_database # noqa: F401
from superset.utils import core as utils
from superset.models.sql_lab import Query
@@ -42,8 +43,8 @@ from tests.integration_tests.constants import (
ADMIN_USERNAME,
GAMMA_SQLLAB_NO_DATA_USERNAME,
)
-from tests.integration_tests.fixtures.birth_names_dashboard import load_birth_names_data
-from tests.integration_tests.fixtures.users import create_gamma_sqllab_no_data
+from tests.integration_tests.fixtures.birth_names_dashboard import load_birth_names_data # noqa: F401
+from tests.integration_tests.fixtures.users import create_gamma_sqllab_no_data # noqa: F401
QUERIES_FIXTURE_COUNT = 10
@@ -65,7 +66,7 @@ class TestSqlLabApi(SupersetTestCase):
assert resp.status_code == 200
data = json.loads(resp.data.decode("utf-8"))
result = data.get("result")
- assert result["active_tab"] == None
+ assert result["active_tab"] is None # noqa: E711
assert result["tab_state_ids"] == []
self.assertEqual(len(result["databases"]), 0)
@@ -94,7 +95,7 @@ class TestSqlLabApi(SupersetTestCase):
assert resp.status_code == 200
data = json.loads(resp.data.decode("utf-8"))
result = data.get("result")
- assert result["active_tab"] == None
+ assert result["active_tab"] is None # noqa: E711
assert result["tab_state_ids"] == []
@pytest.mark.usefixtures("load_birth_names_data")
@@ -195,7 +196,7 @@ class TestSqlLabApi(SupersetTestCase):
"unauth_user1",
"password",
"Dummy Role",
- email=f"unauth_user1@superset.org",
+ email="unauth_user1@superset.org", # noqa: F541
)
self.login(username="unauth_user1", password="password")
rv = self.client.get("/api/v1/sqllab/")
diff --git a/tests/integration_tests/sql_lab/commands_tests.py b/tests/integration_tests/sql_lab/commands_tests.py
index 11eb5de0c..d18c7dbad 100644
--- a/tests/integration_tests/sql_lab/commands_tests.py
+++ b/tests/integration_tests/sql_lab/commands_tests.py
@@ -31,7 +31,7 @@ from superset.exceptions import (
SupersetSecurityException,
SupersetTimeoutException,
)
-from superset.models.core import Database
+from superset.models.core import Database # noqa: F401
from superset.models.sql_lab import Query
from superset.sqllab.limiting_factor import LimitingFactor
from superset.sqllab.schemas import EstimateQueryCostSchema
diff --git a/tests/integration_tests/sql_validator_tests.py b/tests/integration_tests/sql_validator_tests.py
index 850cc9ada..12cb53058 100644
--- a/tests/integration_tests/sql_validator_tests.py
+++ b/tests/integration_tests/sql_validator_tests.py
@@ -16,6 +16,7 @@
# under the License.
# isort:skip_file
"""Unit tests for Sql Lab"""
+
import unittest
from unittest.mock import MagicMock, patch
diff --git a/tests/integration_tests/sqla_models_tests.py b/tests/integration_tests/sqla_models_tests.py
index 23bdbe496..b6131f32b 100644
--- a/tests/integration_tests/sqla_models_tests.py
+++ b/tests/integration_tests/sqla_models_tests.py
@@ -34,7 +34,7 @@ from superset.connectors.sqla.models import SqlaTable, TableColumn, SqlMetric
from superset.constants import EMPTY_STRING, NULL_STRING
from superset.db_engine_specs.bigquery import BigQueryEngineSpec
from superset.db_engine_specs.druid import DruidEngineSpec
-from superset.exceptions import QueryObjectValidationError, SupersetSecurityException
+from superset.exceptions import QueryObjectValidationError, SupersetSecurityException # noqa: F401
from superset.models.core import Database
from superset.utils.core import (
AdhocMetricExpressionType,
@@ -43,10 +43,9 @@ from superset.utils.core import (
)
from superset.utils.database import get_example_database
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
-from tests.integration_tests.test_app import app
from .base_tests import SupersetTestCase
from .conftest import only_postgresql
diff --git a/tests/integration_tests/sqllab_tests.py b/tests/integration_tests/sqllab_tests.py
index ccc76a039..942653de1 100644
--- a/tests/integration_tests/sqllab_tests.py
+++ b/tests/integration_tests/sqllab_tests.py
@@ -16,6 +16,7 @@
# under the License.
# isort:skip_file
"""Unit tests for Sql Lab"""
+
import json
from datetime import datetime
from textwrap import dedent
@@ -28,7 +29,7 @@ import prison
from freezegun import freeze_time
from superset import db, security_manager
-from superset.connectors.sqla.models import SqlaTable
+from superset.connectors.sqla.models import SqlaTable # noqa: F401
from superset.db_engine_specs import BaseEngineSpec
from superset.db_engine_specs.hive import HiveEngineSpec
from superset.db_engine_specs.presto import PrestoEngineSpec
@@ -45,7 +46,7 @@ from superset.sql_lab import (
from superset.sql_parse import CtasMethod
from superset.utils.core import (
backend,
- datetime_to_epoch,
+ datetime_to_epoch, # noqa: F401
)
from superset.utils.database import get_example_database, get_main_database
@@ -58,10 +59,10 @@ from tests.integration_tests.constants import (
GAMMA_USERNAME,
)
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
-from tests.integration_tests.fixtures.users import create_gamma_sqllab_no_data
+from tests.integration_tests.fixtures.users import create_gamma_sqllab_no_data # noqa: F401
QUERY_1 = "SELECT * FROM birth_names LIMIT 1"
QUERY_2 = "SELECT * FROM NO_TABLE"
@@ -217,7 +218,7 @@ class TestSqlLab(SupersetTestCase):
f"SELECT * FROM admin_database.{tmp_table_name}"
).fetchall()
names_count = engine.execute(
- f"SELECT COUNT(*) FROM birth_names"
+ f"SELECT COUNT(*) FROM birth_names" # noqa: F541
).first()
self.assertEqual(
names_count[0], len(data)
@@ -644,7 +645,7 @@ class TestSqlLab(SupersetTestCase):
SELECT /*+ hint */ @value AS foo;
"""
)
- mock_db = mock.MagicMock()
+ mock_db = mock.MagicMock() # noqa: F841
mock_query = mock.MagicMock()
mock_query.database.allow_run_async = False
mock_cursor = mock.MagicMock()
@@ -748,7 +749,7 @@ class TestSqlLab(SupersetTestCase):
SELECT /*+ hint */ @value AS foo;
"""
)
- mock_db = mock.MagicMock()
+ mock_db = mock.MagicMock() # noqa: F841
mock_query = mock.MagicMock()
mock_query.database.allow_run_async = False
mock_cursor = mock.MagicMock()
diff --git a/tests/integration_tests/stats_logger_tests.py b/tests/integration_tests/stats_logger_tests.py
index adf7cc1db..284cf415f 100644
--- a/tests/integration_tests/stats_logger_tests.py
+++ b/tests/integration_tests/stats_logger_tests.py
@@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
"""Unit tests for Superset"""
+
from unittest import TestCase
from unittest.mock import Mock, patch
diff --git a/tests/integration_tests/strategy_tests.py b/tests/integration_tests/strategy_tests.py
index 2f2fef226..07aa7f5b7 100644
--- a/tests/integration_tests/strategy_tests.py
+++ b/tests/integration_tests/strategy_tests.py
@@ -16,19 +16,20 @@
# under the License.
# isort:skip_file
"""Unit tests for Superset cache warmup"""
-from unittest.mock import MagicMock
+
+from unittest.mock import MagicMock # noqa: F401
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
-from sqlalchemy import String, Date, Float
+from sqlalchemy import String, Date, Float # noqa: F401
import pytest
-import pandas as pd
+import pandas as pd # noqa: F401
-from superset.models.slice import Slice
-from superset.utils.database import get_example_database
+from superset.models.slice import Slice # noqa: F401
+from superset.utils.database import get_example_database # noqa: F401
from superset import db
@@ -38,18 +39,18 @@ from superset.tasks.cache import (
DashboardTagsStrategy,
TopNDashboardsStrategy,
)
-from superset.utils.urls import get_url_host
+from superset.utils.urls import get_url_host # noqa: F401
from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.constants import ADMIN_USERNAME
from tests.integration_tests.dashboard_utils import (
- create_dashboard,
- create_slice,
- create_table_metadata,
+ create_dashboard, # noqa: F401
+ create_slice, # noqa: F401
+ create_table_metadata, # noqa: F401
)
from tests.integration_tests.fixtures.unicode_dashboard import (
- load_unicode_dashboard_with_slice,
- load_unicode_data,
+ load_unicode_dashboard_with_slice, # noqa: F401
+ load_unicode_data, # noqa: F401
)
diff --git a/tests/integration_tests/superset_test_config.py b/tests/integration_tests/superset_test_config.py
index b0461e79f..5ef3e2aa0 100644
--- a/tests/integration_tests/superset_test_config.py
+++ b/tests/integration_tests/superset_test_config.py
@@ -20,7 +20,8 @@ import math
from copy import copy
from datetime import timedelta
-from superset.config import *
+from superset.config import * # noqa: F403
+from superset.config import DATA_DIR
from tests.integration_tests.superset_test_custom_template_processors import (
CustomPrestoTemplateProcessor,
)
@@ -33,25 +34,26 @@ logging.getLogger("sqlalchemy.engine.Engine").setLevel(logging.WARNING)
SECRET_KEY = "dummy_secret_key_for_test_to_silence_warnings"
AUTH_USER_REGISTRATION_ROLE = "alpha"
-SQLALCHEMY_DATABASE_URI = "sqlite:///" + os.path.join(
- DATA_DIR, "unittests.integration_tests.db"
+SQLALCHEMY_DATABASE_URI = "sqlite:///" + os.path.join( # noqa: F405
+ DATA_DIR,
+ "unittests.integration_tests.db", # noqa: F405
)
DEBUG = False
SILENCE_FAB = False
# Allowing SQLALCHEMY_DATABASE_URI and SQLALCHEMY_EXAMPLES_URI to be defined as an env vars for
# continuous integration
-if "SUPERSET__SQLALCHEMY_DATABASE_URI" in os.environ:
- SQLALCHEMY_DATABASE_URI = os.environ["SUPERSET__SQLALCHEMY_DATABASE_URI"]
+if "SUPERSET__SQLALCHEMY_DATABASE_URI" in os.environ: # noqa: F405
+ SQLALCHEMY_DATABASE_URI = os.environ["SUPERSET__SQLALCHEMY_DATABASE_URI"] # noqa: F405
SQLALCHEMY_EXAMPLES_URI = SQLALCHEMY_DATABASE_URI
-if "SUPERSET__SQLALCHEMY_EXAMPLES_URI" in os.environ:
- SQLALCHEMY_EXAMPLES_URI = os.environ["SUPERSET__SQLALCHEMY_EXAMPLES_URI"]
+if "SUPERSET__SQLALCHEMY_EXAMPLES_URI" in os.environ: # noqa: F405
+ SQLALCHEMY_EXAMPLES_URI = os.environ["SUPERSET__SQLALCHEMY_EXAMPLES_URI"] # noqa: F405
-if "UPLOAD_FOLDER" in os.environ:
- UPLOAD_FOLDER = os.environ["UPLOAD_FOLDER"]
+if "UPLOAD_FOLDER" in os.environ: # noqa: F405
+ UPLOAD_FOLDER = os.environ["UPLOAD_FOLDER"] # noqa: F405
if "sqlite" in SQLALCHEMY_DATABASE_URI:
- logger.warning(
+ logger.warning( # noqa: F405
"SQLite Database support for metadata databases will be "
"removed in a future version of Superset."
)
@@ -63,7 +65,7 @@ HIVE_POLL_INTERVAL = 0.1
SQL_MAX_ROW = 50000
SQLLAB_CTAS_NO_LIMIT = True # SQL_MAX_ROW will not take effect for the CTA queries
FEATURE_FLAGS = {
- **FEATURE_FLAGS,
+ **FEATURE_FLAGS, # noqa: F405
"foo": "bar",
"KV_STORE": True,
"SHARE_QUERIES_VIA_KV_STORE": True,
@@ -90,11 +92,11 @@ FAB_ROLES = {"TestRole": [["Security", "menu_access"], ["List Users", "menu_acce
AUTH_ROLE_PUBLIC = "Public"
EMAIL_NOTIFICATIONS = False
-REDIS_HOST = os.environ.get("REDIS_HOST", "localhost")
-REDIS_PORT = os.environ.get("REDIS_PORT", "6379")
-REDIS_CELERY_DB = os.environ.get("REDIS_CELERY_DB", 2)
-REDIS_RESULTS_DB = os.environ.get("REDIS_RESULTS_DB", 3)
-REDIS_CACHE_DB = os.environ.get("REDIS_CACHE_DB", 4)
+REDIS_HOST = os.environ.get("REDIS_HOST", "localhost") # noqa: F405
+REDIS_PORT = os.environ.get("REDIS_PORT", "6379") # noqa: F405
+REDIS_CELERY_DB = os.environ.get("REDIS_CELERY_DB", 2) # noqa: F405
+REDIS_RESULTS_DB = os.environ.get("REDIS_RESULTS_DB", 3) # noqa: F405
+REDIS_CACHE_DB = os.environ.get("REDIS_CACHE_DB", 4) # noqa: F405
RATELIMIT_ENABLED = False
diff --git a/tests/integration_tests/superset_test_config_thumbnails.py b/tests/integration_tests/superset_test_config_thumbnails.py
index e1b17998c..a7c0368ff 100644
--- a/tests/integration_tests/superset_test_config_thumbnails.py
+++ b/tests/integration_tests/superset_test_config_thumbnails.py
@@ -17,22 +17,24 @@
# type: ignore
from copy import copy
-from superset.config import *
+from superset.config import * # noqa: F403
+from superset.config import DATA_DIR
SECRET_KEY = "dummy_secret_key_for_test_to_silence_warnings"
AUTH_USER_REGISTRATION_ROLE = "alpha"
-SQLALCHEMY_DATABASE_URI = "sqlite:///" + os.path.join(
- DATA_DIR, "unittests.integration_tests.db"
+SQLALCHEMY_DATABASE_URI = "sqlite:///" + os.path.join( # noqa: F405
+ DATA_DIR,
+ "unittests.integration_tests.db", # noqa: F405
)
DEBUG = True
# Allowing SQLALCHEMY_DATABASE_URI to be defined as an env var for
# continuous integration
-if "SUPERSET__SQLALCHEMY_DATABASE_URI" in os.environ:
- SQLALCHEMY_DATABASE_URI = os.environ["SUPERSET__SQLALCHEMY_DATABASE_URI"]
+if "SUPERSET__SQLALCHEMY_DATABASE_URI" in os.environ: # noqa: F405
+ SQLALCHEMY_DATABASE_URI = os.environ["SUPERSET__SQLALCHEMY_DATABASE_URI"] # noqa: F405
if "sqlite" in SQLALCHEMY_DATABASE_URI:
- logger.warning(
+ logger.warning( # noqa: F405
"SQLite Database support for metadata databases will be removed \
in a future version of Superset."
)
@@ -55,10 +57,10 @@ EMAIL_NOTIFICATIONS = False
CACHE_CONFIG = {"CACHE_TYPE": "SimpleCache"}
-REDIS_HOST = os.environ.get("REDIS_HOST", "localhost")
-REDIS_PORT = os.environ.get("REDIS_PORT", "6379")
-REDIS_CELERY_DB = os.environ.get("REDIS_CELERY_DB", 2)
-REDIS_RESULTS_DB = os.environ.get("REDIS_RESULTS_DB", 3)
+REDIS_HOST = os.environ.get("REDIS_HOST", "localhost") # noqa: F405
+REDIS_PORT = os.environ.get("REDIS_PORT", "6379") # noqa: F405
+REDIS_CELERY_DB = os.environ.get("REDIS_CELERY_DB", 2) # noqa: F405
+REDIS_RESULTS_DB = os.environ.get("REDIS_RESULTS_DB", 3) # noqa: F405
class CeleryConfig:
diff --git a/tests/integration_tests/superset_test_custom_template_processors.py b/tests/integration_tests/superset_test_custom_template_processors.py
index 124c73931..79ece8a11 100644
--- a/tests/integration_tests/superset_test_custom_template_processors.py
+++ b/tests/integration_tests/superset_test_custom_template_processors.py
@@ -18,7 +18,7 @@
import re
from datetime import datetime, timedelta
from functools import partial
-from typing import Any, Dict, SupportsInt
+from typing import Any, Dict, SupportsInt # noqa: F401
from superset.jinja_context import PrestoTemplateProcessor
diff --git a/tests/integration_tests/tagging_tests.py b/tests/integration_tests/tagging_tests.py
index 36fb8df3f..f3bec4e93 100644
--- a/tests/integration_tests/tagging_tests.py
+++ b/tests/integration_tests/tagging_tests.py
@@ -29,7 +29,7 @@ from superset.utils.core import DatasourceType
from superset.utils.database import get_main_database
from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.conftest import with_feature_flags
-from tests.integration_tests.fixtures.tags import with_tagging_system_feature
+from tests.integration_tests.fixtures.tags import with_tagging_system_feature # noqa: F401
class TestTagging(SupersetTestCase):
diff --git a/tests/integration_tests/tags/api_tests.py b/tests/integration_tests/tags/api_tests.py
index 2b603edb4..b62ee11a2 100644
--- a/tests/integration_tests/tags/api_tests.py
+++ b/tests/integration_tests/tags/api_tests.py
@@ -16,43 +16,44 @@
# under the License.
# isort:skip_file
"""Unit tests for Superset"""
+
import json
import prison
from datetime import datetime
-from flask import g
+from flask import g # noqa: F401
import pytest
-import prison
+import prison # noqa: F811
from freezegun import freeze_time
from sqlalchemy.sql import func
-from sqlalchemy import and_
+from sqlalchemy import and_ # noqa: F401
from superset.models.dashboard import Dashboard
from superset.models.slice import Slice
-from superset.models.sql_lab import SavedQuery
-from superset.tags.models import user_favorite_tag_table
+from superset.models.sql_lab import SavedQuery # noqa: F401
+from superset.tags.models import user_favorite_tag_table # noqa: F401
from unittest.mock import patch
from urllib import parse
-import tests.integration_tests.test_app
-from superset import db, security_manager
-from superset.common.db_query_status import QueryStatus
-from superset.models.core import Database
-from superset.utils.database import get_example_database, get_main_database
+import tests.integration_tests.test_app # noqa: F401
+from superset import db, security_manager # noqa: F401
+from superset.common.db_query_status import QueryStatus # noqa: F401
+from superset.models.core import Database # noqa: F401
+from superset.utils.database import get_example_database, get_main_database # noqa: F401
from superset.tags.models import ObjectType, Tag, TagType, TaggedObject
from tests.integration_tests.constants import ADMIN_USERNAME, ALPHA_USERNAME
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
from tests.integration_tests.fixtures.world_bank_dashboard import (
- load_world_bank_dashboard_with_slices,
- load_world_bank_data,
+ load_world_bank_dashboard_with_slices, # noqa: F401
+ load_world_bank_data, # noqa: F401
)
-from tests.integration_tests.fixtures.tags import with_tagging_system_feature
+from tests.integration_tests.fixtures.tags import with_tagging_system_feature # noqa: F401
from tests.integration_tests.base_tests import SupersetTestCase
from superset.daos.tag import TagDAO
-from superset.tags.models import ObjectType
+from superset.tags.models import ObjectType # noqa: F811
TAGS_FIXTURE_COUNT = 10
@@ -442,9 +443,9 @@ class TestTagApi(SupersetTestCase):
rv = self.client.post(uri, follow_redirects=True)
self.assertEqual(rv.status_code, 200)
- from sqlalchemy import and_
- from superset.tags.models import user_favorite_tag_table
- from flask import g
+ from sqlalchemy import and_ # noqa: F811
+ from superset.tags.models import user_favorite_tag_table # noqa: F811
+ from flask import g # noqa: F401, F811
association_row = (
db.session.query(user_favorite_tag_table)
@@ -479,7 +480,7 @@ class TestTagApi(SupersetTestCase):
@pytest.mark.usefixtures("create_tags")
def test_add_tag_not_found(self):
self.login(ADMIN_USERNAME)
- uri = f"api/v1/tag/123/favorites/"
+ uri = "api/v1/tag/123/favorites/" # noqa: F541
rv = self.client.post(uri, follow_redirects=True)
self.assertEqual(rv.status_code, 404)
@@ -487,7 +488,7 @@ class TestTagApi(SupersetTestCase):
@pytest.mark.usefixtures("create_tags")
def test_delete_favorite_tag_not_found(self):
self.login(ADMIN_USERNAME)
- uri = f"api/v1/tag/123/favorites/"
+ uri = "api/v1/tag/123/favorites/" # noqa: F541
rv = self.client.delete(uri, follow_redirects=True)
self.assertEqual(rv.status_code, 404)
@@ -497,7 +498,7 @@ class TestTagApi(SupersetTestCase):
def test_add_tag_user_not_found(self, flask_g):
self.login(ADMIN_USERNAME)
flask_g.user = None
- uri = f"api/v1/tag/123/favorites/"
+ uri = "api/v1/tag/123/favorites/" # noqa: F541
rv = self.client.post(uri, follow_redirects=True)
self.assertEqual(rv.status_code, 422)
@@ -507,7 +508,7 @@ class TestTagApi(SupersetTestCase):
def test_delete_favorite_tag_user_not_found(self, flask_g):
self.login(ADMIN_USERNAME)
flask_g.user = None
- uri = f"api/v1/tag/123/favorites/"
+ uri = "api/v1/tag/123/favorites/" # noqa: F541
rv = self.client.delete(uri, follow_redirects=True)
self.assertEqual(rv.status_code, 422)
@@ -515,7 +516,7 @@ class TestTagApi(SupersetTestCase):
@pytest.mark.usefixtures("load_world_bank_dashboard_with_slices")
def test_post_tag(self):
self.login(ADMIN_USERNAME)
- uri = f"api/v1/tag/"
+ uri = "api/v1/tag/" # noqa: F541
dashboard = (
db.session.query(Dashboard)
.filter(Dashboard.dashboard_title == "World Bank's Data")
@@ -527,7 +528,7 @@ class TestTagApi(SupersetTestCase):
)
self.assertEqual(rv.status_code, 201)
- user_id = self.get_user(username="admin").get_id()
+ self.get_user(username="admin").get_id() # noqa: F841
tag = (
db.session.query(Tag)
.filter(Tag.name == "my_tag", Tag.type == TagType.custom)
@@ -538,7 +539,7 @@ class TestTagApi(SupersetTestCase):
@pytest.mark.usefixtures("load_world_bank_dashboard_with_slices")
def test_post_tag_no_name_400(self):
self.login(ADMIN_USERNAME)
- uri = f"api/v1/tag/"
+ uri = "api/v1/tag/" # noqa: F541
dashboard = (
db.session.query(Dashboard)
.filter(Dashboard.dashboard_title == "World Bank's Data")
diff --git a/tests/integration_tests/tags/commands_tests.py b/tests/integration_tests/tags/commands_tests.py
index 3644c076e..4bed9fa0b 100644
--- a/tests/integration_tests/tags/commands_tests.py
+++ b/tests/integration_tests/tags/commands_tests.py
@@ -14,44 +14,44 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
-import itertools
-from unittest.mock import MagicMock, patch
+import itertools # noqa: F401
+from unittest.mock import MagicMock, patch # noqa: F401
import pytest
-import yaml
-from werkzeug.utils import secure_filename
+import yaml # noqa: F401
+from werkzeug.utils import secure_filename # noqa: F401
-from superset import db, security_manager
-from superset.commands.dashboard.exceptions import DashboardNotFoundError
+from superset import db, security_manager # noqa: F401
+from superset.commands.dashboard.exceptions import DashboardNotFoundError # noqa: F401
from superset.commands.dashboard.export import (
- append_charts,
- ExportDashboardsCommand,
- get_default_position,
+ append_charts, # noqa: F401
+ ExportDashboardsCommand, # noqa: F401
+ get_default_position, # noqa: F401
)
-from superset.commands.dashboard.importers import v0, v1
-from superset.commands.exceptions import CommandInvalidError
-from superset.commands.importers.exceptions import IncorrectVersionError
+from superset.commands.dashboard.importers import v0, v1 # noqa: F401
+from superset.commands.exceptions import CommandInvalidError # noqa: F401
+from superset.commands.importers.exceptions import IncorrectVersionError # noqa: F401
from superset.commands.tag.create import CreateCustomTagCommand
from superset.commands.tag.delete import DeleteTaggedObjectCommand, DeleteTagsCommand
-from superset.connectors.sqla.models import SqlaTable
-from superset.models.core import Database
+from superset.connectors.sqla.models import SqlaTable # noqa: F401
+from superset.models.core import Database # noqa: F401
from superset.models.dashboard import Dashboard
-from superset.models.slice import Slice
+from superset.models.slice import Slice # noqa: F401
from superset.tags.models import ObjectType, Tag, TaggedObject, TagType
from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.fixtures.importexport import (
- chart_config,
- dashboard_config,
- dashboard_export,
- dashboard_metadata_config,
- database_config,
- dataset_config,
- dataset_metadata_config,
+ chart_config, # noqa: F401
+ dashboard_config, # noqa: F401
+ dashboard_export, # noqa: F401
+ dashboard_metadata_config, # noqa: F401
+ database_config, # noqa: F401
+ dataset_config, # noqa: F401
+ dataset_metadata_config, # noqa: F401
)
-from tests.integration_tests.fixtures.tags import with_tagging_system_feature
+from tests.integration_tests.fixtures.tags import with_tagging_system_feature # noqa: F401
from tests.integration_tests.fixtures.world_bank_dashboard import (
- load_world_bank_dashboard_with_slices,
- load_world_bank_data,
+ load_world_bank_dashboard_with_slices, # noqa: F401
+ load_world_bank_data, # noqa: F401
)
diff --git a/tests/integration_tests/tags/dao_tests.py b/tests/integration_tests/tags/dao_tests.py
index 38bb4d090..b06e22054 100644
--- a/tests/integration_tests/tags/dao_tests.py
+++ b/tests/integration_tests/tags/dao_tests.py
@@ -16,26 +16,26 @@
# under the License.
# isort:skip_file
from operator import and_
-from unittest.mock import patch
+from unittest.mock import patch # noqa: F401
import pytest
-from superset.daos.exceptions import DAOCreateFailedError, DAOException
+from superset.daos.exceptions import DAOCreateFailedError, DAOException # noqa: F401
from superset.models.slice import Slice
-from superset.models.sql_lab import SavedQuery
+from superset.models.sql_lab import SavedQuery # noqa: F401
from superset.daos.tag import TagDAO
-from superset.tags.exceptions import InvalidTagNameError
+from superset.tags.exceptions import InvalidTagNameError # noqa: F401
from superset.tags.models import ObjectType, Tag, TaggedObject
from tests.integration_tests.tags.api_tests import TAGS_FIXTURE_COUNT
-import tests.integration_tests.test_app # pylint: disable=unused-import
-from superset import db, security_manager
-from superset.daos.dashboard import DashboardDAO
+import tests.integration_tests.test_app # pylint: disable=unused-import # noqa: F401
+from superset import db, security_manager # noqa: F401
+from superset.daos.dashboard import DashboardDAO # noqa: F401
from superset.models.dashboard import Dashboard
from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.fixtures.world_bank_dashboard import (
- load_world_bank_dashboard_with_slices,
- load_world_bank_data,
+ load_world_bank_dashboard_with_slices, # noqa: F401
+ load_world_bank_data, # noqa: F401
)
-from tests.integration_tests.fixtures.tags import with_tagging_system_feature
+from tests.integration_tests.fixtures.tags import with_tagging_system_feature # noqa: F401
class TestTagsDAO(SupersetTestCase):
diff --git a/tests/integration_tests/tasks/async_queries_tests.py b/tests/integration_tests/tasks/async_queries_tests.py
index 01880b7a6..bd487ff69 100644
--- a/tests/integration_tests/tasks/async_queries_tests.py
+++ b/tests/integration_tests/tasks/async_queries_tests.py
@@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
"""Unit tests for async query celery jobs in Superset"""
+
from unittest import mock
from uuid import uuid4
@@ -27,11 +28,11 @@ from superset.exceptions import SupersetException
from superset.extensions import async_query_manager, security_manager
from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
from tests.integration_tests.fixtures.query_context import get_query_context
-from tests.integration_tests.fixtures.tags import with_tagging_system_feature
+from tests.integration_tests.fixtures.tags import with_tagging_system_feature # noqa: F401
from tests.integration_tests.test_app import app
diff --git a/tests/integration_tests/test_jinja_context.py b/tests/integration_tests/test_jinja_context.py
index 6f776017f..8fa61841c 100644
--- a/tests/integration_tests/test_jinja_context.py
+++ b/tests/integration_tests/test_jinja_context.py
@@ -159,7 +159,7 @@ def test_custom_process_template(app_context: AppContext, mocker: MockFixture) -
tp = get_template_processor(database=database)
template = "SELECT '$DATE()'"
- assert tp.process_template(template) == f"SELECT '1970-01-01'"
+ assert tp.process_template(template) == f"SELECT '1970-01-01'" # noqa: F541
template = "SELECT '$DATE(1, 2)'"
assert tp.process_template(template) == "SELECT '1970-01-02'"
diff --git a/tests/integration_tests/thumbnails_tests.py b/tests/integration_tests/thumbnails_tests.py
index cfd2f9bca..623f69d88 100644
--- a/tests/integration_tests/thumbnails_tests.py
+++ b/tests/integration_tests/thumbnails_tests.py
@@ -39,8 +39,8 @@ from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.conftest import with_feature_flags
from tests.integration_tests.constants import ADMIN_USERNAME, ALPHA_USERNAME
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
from tests.integration_tests.test_app import app
@@ -64,7 +64,7 @@ class TestThumbnailsSeleniumLive(LiveServerTestCase):
"""
Thumbnails: Simple get async dashboard screenshot
"""
- with patch("superset.dashboards.api.DashboardRestApi.get") as mock_get:
+ with patch("superset.dashboards.api.DashboardRestApi.get") as mock_get: # noqa: F841
rv = self.client.get(DASHBOARD_URL)
resp = json.loads(rv.data.decode("utf-8"))
thumbnail_url = resp["result"][0]["thumbnail_url"]
@@ -237,14 +237,17 @@ class TestThumbnails(SupersetTestCase):
Thumbnails: Simple get async dashboard screenshot as selenium user
"""
self.login(ALPHA_USERNAME)
- with patch.dict(
- "superset.thumbnails.digest.current_app.config",
- {
- "THUMBNAIL_EXECUTE_AS": [ExecutorType.SELENIUM],
- },
- ), patch(
- "superset.thumbnails.digest._adjust_string_for_executor"
- ) as mock_adjust_string:
+ with (
+ patch.dict(
+ "superset.thumbnails.digest.current_app.config",
+ {
+ "THUMBNAIL_EXECUTE_AS": [ExecutorType.SELENIUM],
+ },
+ ),
+ patch(
+ "superset.thumbnails.digest._adjust_string_for_executor"
+ ) as mock_adjust_string,
+ ):
mock_adjust_string.return_value = self.digest_return_value
_, thumbnail_url = self._get_id_and_thumbnail_url(DASHBOARD_URL)
assert self.digest_hash in thumbnail_url
@@ -262,14 +265,17 @@ class TestThumbnails(SupersetTestCase):
"""
username = "alpha"
self.login(username)
- with patch.dict(
- "superset.thumbnails.digest.current_app.config",
- {
- "THUMBNAIL_EXECUTE_AS": [ExecutorType.CURRENT_USER],
- },
- ), patch(
- "superset.thumbnails.digest._adjust_string_for_executor"
- ) as mock_adjust_string:
+ with (
+ patch.dict(
+ "superset.thumbnails.digest.current_app.config",
+ {
+ "THUMBNAIL_EXECUTE_AS": [ExecutorType.CURRENT_USER],
+ },
+ ),
+ patch(
+ "superset.thumbnails.digest._adjust_string_for_executor"
+ ) as mock_adjust_string,
+ ):
mock_adjust_string.return_value = self.digest_return_value
_, thumbnail_url = self._get_id_and_thumbnail_url(DASHBOARD_URL)
assert self.digest_hash in thumbnail_url
@@ -309,14 +315,17 @@ class TestThumbnails(SupersetTestCase):
Thumbnails: Simple get async chart screenshot as selenium user
"""
self.login(ADMIN_USERNAME)
- with patch.dict(
- "superset.thumbnails.digest.current_app.config",
- {
- "THUMBNAIL_EXECUTE_AS": [ExecutorType.SELENIUM],
- },
- ), patch(
- "superset.thumbnails.digest._adjust_string_for_executor"
- ) as mock_adjust_string:
+ with (
+ patch.dict(
+ "superset.thumbnails.digest.current_app.config",
+ {
+ "THUMBNAIL_EXECUTE_AS": [ExecutorType.SELENIUM],
+ },
+ ),
+ patch(
+ "superset.thumbnails.digest._adjust_string_for_executor"
+ ) as mock_adjust_string,
+ ):
mock_adjust_string.return_value = self.digest_return_value
_, thumbnail_url = self._get_id_and_thumbnail_url(CHART_URL)
assert self.digest_hash in thumbnail_url
@@ -334,14 +343,17 @@ class TestThumbnails(SupersetTestCase):
"""
username = "alpha"
self.login(username)
- with patch.dict(
- "superset.thumbnails.digest.current_app.config",
- {
- "THUMBNAIL_EXECUTE_AS": [ExecutorType.CURRENT_USER],
- },
- ), patch(
- "superset.thumbnails.digest._adjust_string_for_executor"
- ) as mock_adjust_string:
+ with (
+ patch.dict(
+ "superset.thumbnails.digest.current_app.config",
+ {
+ "THUMBNAIL_EXECUTE_AS": [ExecutorType.CURRENT_USER],
+ },
+ ),
+ patch(
+ "superset.thumbnails.digest._adjust_string_for_executor"
+ ) as mock_adjust_string,
+ ):
mock_adjust_string.return_value = self.digest_return_value
_, thumbnail_url = self._get_id_and_thumbnail_url(CHART_URL)
assert self.digest_hash in thumbnail_url
diff --git a/tests/integration_tests/users/api_tests.py b/tests/integration_tests/users/api_tests.py
index 44711d96f..e4fc7ddd7 100644
--- a/tests/integration_tests/users/api_tests.py
+++ b/tests/integration_tests/users/api_tests.py
@@ -16,11 +16,12 @@
# under the License.
# type: ignore
"""Unit tests for Superset"""
+
import json
from unittest.mock import patch
from superset import security_manager
-from superset.utils import slack
+from superset.utils import slack # noqa: F401
from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.conftest import with_config
from tests.integration_tests.constants import ADMIN_USERNAME
diff --git a/tests/integration_tests/utils/cache_manager_tests.py b/tests/integration_tests/utils/cache_manager_tests.py
index c5d4b390f..5133bf738 100644
--- a/tests/integration_tests/utils/cache_manager_tests.py
+++ b/tests/integration_tests/utils/cache_manager_tests.py
@@ -14,10 +14,10 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
-import pytest
+import pytest # noqa: F401
from superset.extensions import cache_manager
-from superset.utils.core import backend, DatasourceType
+from superset.utils.core import backend, DatasourceType # noqa: F401
from tests.integration_tests.base_tests import SupersetTestCase
@@ -46,4 +46,4 @@ class UtilsCacheManagerTests(SupersetTestCase):
}
def test_get_explore_form_data_cache_invalid_key(self):
- assert cache_manager.explore_form_data_cache.get("foo") == None
+ assert cache_manager.explore_form_data_cache.get("foo") is None # noqa: E711
diff --git a/tests/integration_tests/utils/core_tests.py b/tests/integration_tests/utils/core_tests.py
index 6954a0610..29b94d6d3 100644
--- a/tests/integration_tests/utils/core_tests.py
+++ b/tests/integration_tests/utils/core_tests.py
@@ -17,7 +17,6 @@
import pytest
from superset.utils.core import form_data_to_adhoc, simple_filter_to_adhoc
-from tests.integration_tests.test_app import app
def test_simple_filter_to_adhoc_generates_deterministic_values():
diff --git a/tests/integration_tests/utils/csv_tests.py b/tests/integration_tests/utils/csv_tests.py
index 38c1dd51a..bef7e23ca 100644
--- a/tests/integration_tests/utils/csv_tests.py
+++ b/tests/integration_tests/utils/csv_tests.py
@@ -18,7 +18,7 @@ import io
import pandas as pd
import pyarrow as pa
-import pytest
+import pytest # noqa: F401
from superset.utils import csv
diff --git a/tests/integration_tests/utils/encrypt_tests.py b/tests/integration_tests/utils/encrypt_tests.py
index 33a742cbd..0edca2d7f 100644
--- a/tests/integration_tests/utils/encrypt_tests.py
+++ b/tests/integration_tests/utils/encrypt_tests.py
@@ -40,9 +40,9 @@ class CustomEncFieldAdapter(AbstractEncryptedFieldAdapter):
class EncryptedFieldTest(SupersetTestCase):
def setUp(self) -> None:
- self.app.config[
- "SQLALCHEMY_ENCRYPTED_FIELD_TYPE_ADAPTER"
- ] = SQLAlchemyUtilsAdapter
+ self.app.config["SQLALCHEMY_ENCRYPTED_FIELD_TYPE_ADAPTER"] = (
+ SQLAlchemyUtilsAdapter
+ )
encrypted_field_factory.init_app(self.app)
super().setUp()
@@ -53,9 +53,9 @@ class EncryptedFieldTest(SupersetTestCase):
self.assertEqual(self.app.config["SECRET_KEY"], field.key)
def test_custom_adapter(self):
- self.app.config[
- "SQLALCHEMY_ENCRYPTED_FIELD_TYPE_ADAPTER"
- ] = CustomEncFieldAdapter
+ self.app.config["SQLALCHEMY_ENCRYPTED_FIELD_TYPE_ADAPTER"] = (
+ CustomEncFieldAdapter
+ )
encrypted_field_factory.init_app(self.app)
field = encrypted_field_factory.create(String(1024))
self.assertTrue(isinstance(field, StringEncryptedType))
diff --git a/tests/integration_tests/utils/hashing_tests.py b/tests/integration_tests/utils/hashing_tests.py
index 406d383d7..719d03ff2 100644
--- a/tests/integration_tests/utils/hashing_tests.py
+++ b/tests/integration_tests/utils/hashing_tests.py
@@ -18,7 +18,7 @@ import datetime
import math
from typing import Any
-import pytest
+import pytest # noqa: F401
from superset.utils.hashing import md5_sha_from_dict, md5_sha_from_str
diff --git a/tests/integration_tests/utils_tests.py b/tests/integration_tests/utils_tests.py
index 18e2ab801..6f33a14dc 100644
--- a/tests/integration_tests/utils_tests.py
+++ b/tests/integration_tests/utils_tests.py
@@ -22,28 +22,28 @@ import json
import os
import re
from typing import Any, Optional
-from unittest.mock import Mock, patch
+from unittest.mock import Mock, patch # noqa: F401
from superset.commands.database.exceptions import DatabaseInvalidError
from tests.integration_tests.fixtures.birth_names_dashboard import (
- load_birth_names_dashboard_with_slices,
- load_birth_names_data,
+ load_birth_names_dashboard_with_slices, # noqa: F401
+ load_birth_names_data, # noqa: F401
)
import numpy as np
import pandas as pd
import pytest
-from flask import Flask, g
+from flask import Flask, g # noqa: F401
import marshmallow
-from sqlalchemy.exc import ArgumentError
+from sqlalchemy.exc import ArgumentError # noqa: F401
-import tests.integration_tests.test_app
+import tests.integration_tests.test_app # noqa: F401
from superset import app, db, security_manager
from superset.constants import NO_TIME_RANGE
-from superset.exceptions import CertificateException, SupersetException
+from superset.exceptions import CertificateException, SupersetException # noqa: F401
from superset.models.core import Database, Log
-from superset.models.dashboard import Dashboard
-from superset.models.slice import Slice
+from superset.models.dashboard import Dashboard # noqa: F401
+from superset.models.slice import Slice # noqa: F401
from superset.utils.core import (
base_json_conv,
cast_to_num,
@@ -74,12 +74,12 @@ from superset.utils.core import (
from superset.utils.database import get_or_create_db
from superset.utils import schema
from superset.utils.hashing import md5_sha_from_str
-from superset.views.utils import build_extra_filters, get_form_data
+from superset.views.utils import build_extra_filters, get_form_data # noqa: F401
from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.constants import ADMIN_USERNAME
from tests.integration_tests.fixtures.world_bank_dashboard import (
- load_world_bank_dashboard_with_slices,
- load_world_bank_data,
+ load_world_bank_dashboard_with_slices, # noqa: F401
+ load_world_bank_data, # noqa: F401
)
from .fixtures.certificates import ssl_certificate
@@ -901,7 +901,7 @@ class TestUtils(SupersetTestCase):
dashboard_id = 1
assert slc.viz is not None
- resp = self.get_json_resp(
+ resp = self.get_json_resp( # noqa: F841
f"/superset/explore_json/{slc.datasource_type}/{slc.datasource_id}/"
+ f'?form_data={{"slice_id": {slc.id}}}&dashboard_id={dashboard_id}',
{"form_data": json.dumps(slc.viz.form_data)},
diff --git a/tests/integration_tests/viz_tests.py b/tests/integration_tests/viz_tests.py
index 5c7a494d8..86f3853a4 100644
--- a/tests/integration_tests/viz_tests.py
+++ b/tests/integration_tests/viz_tests.py
@@ -24,7 +24,7 @@ import numpy as np
import pandas as pd
import pytest
-import tests.integration_tests.test_app
+import tests.integration_tests.test_app # noqa: F401
import superset.viz as viz
from superset import app
from superset.constants import NULL_STRING
@@ -117,7 +117,7 @@ class TestBaseViz(SupersetTestCase):
datasource.get_column = Mock(return_value=mock_dttm_col)
mock_dttm_col.python_date_format = "epoch_ms"
result = test_viz.get_df(query_obj)
- import logging
+ import logging # noqa: F401
logger.info(result)
pd.testing.assert_series_equal(
diff --git a/tests/unit_tests/annotation_layers/schema_tests.py b/tests/unit_tests/annotation_layers/schema_tests.py
index b8efcbb00..d84c09301 100644
--- a/tests/unit_tests/annotation_layers/schema_tests.py
+++ b/tests/unit_tests/annotation_layers/schema_tests.py
@@ -105,7 +105,7 @@ def test_annotation_post_schema_short_descr_null() -> None:
def test_annotation_post_schema_start_dttm_null() -> None:
with pytest.raises(ValidationError):
- result = AnnotationPostSchema().load(
+ AnnotationPostSchema().load( # noqa: F841
{"short_descr": "foo", "start_dttm": None, "end_dttm": END_STR}
)
diff --git a/tests/unit_tests/charts/commands/importers/v1/import_test.py b/tests/unit_tests/charts/commands/importers/v1/import_test.py
index e6f6d0020..4e1a3703c 100644
--- a/tests/unit_tests/charts/commands/importers/v1/import_test.py
+++ b/tests/unit_tests/charts/commands/importers/v1/import_test.py
@@ -24,7 +24,7 @@ from flask_appbuilder.security.sqla.models import Role, User
from pytest_mock import MockFixture
from sqlalchemy.orm.session import Session
-from superset import db, security_manager
+from superset import security_manager
from superset.commands.chart.importers.v1.utils import import_chart
from superset.commands.exceptions import ImportFailedError
from superset.connectors.sqla.models import Database, SqlaTable
diff --git a/tests/unit_tests/charts/test_post_processing.py b/tests/unit_tests/charts/test_post_processing.py
index 9f8962f85..790c49451 100644
--- a/tests/unit_tests/charts/test_post_processing.py
+++ b/tests/unit_tests/charts/test_post_processing.py
@@ -19,7 +19,6 @@
import pandas as pd
import pytest
from flask_babel import lazy_gettext as _
-from numpy import True_
from sqlalchemy.orm.session import Session
from superset.charts.post_processing import apply_post_process, pivot_df, table
@@ -1391,7 +1390,7 @@ def test_apply_post_process_without_result_format():
with pytest.raises(Exception) as ex:
apply_post_process(result, form_data)
- assert ex.match("Result format foo not supported") == True
+ assert ex.match("Result format foo not supported") is True # noqa: E712
def test_apply_post_process_json_format():
diff --git a/tests/unit_tests/commands/test_utils.py b/tests/unit_tests/commands/test_utils.py
index cb99ac37b..d60bf4791 100644
--- a/tests/unit_tests/commands/test_utils.py
+++ b/tests/unit_tests/commands/test_utils.py
@@ -17,7 +17,6 @@
from unittest.mock import MagicMock, patch
-import pytest
from superset.commands.utils import compute_owner_list, populate_owner_list, User
diff --git a/tests/unit_tests/common/test_query_object_factory.py b/tests/unit_tests/common/test_query_object_factory.py
index 590ace3f1..a67f2887d 100644
--- a/tests/unit_tests/common/test_query_object_factory.py
+++ b/tests/unit_tests/common/test_query_object_factory.py
@@ -15,9 +15,9 @@
# specific language governing permissions and limitations
# under the License.
from typing import Any, Optional
-from unittest.mock import Mock, patch
+from unittest.mock import Mock
-from pytest import fixture, mark
+from pytest import fixture
from superset.common.query_object_factory import QueryObjectFactory
from tests.common.query_context_generator import QueryContextGenerator
diff --git a/tests/unit_tests/dao/tag_test.py b/tests/unit_tests/dao/tag_test.py
index 652d3729b..d50e7d8a2 100644
--- a/tests/unit_tests/dao/tag_test.py
+++ b/tests/unit_tests/dao/tag_test.py
@@ -14,10 +14,8 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
-from collections.abc import Iterator
import pytest
-from sqlalchemy.orm.session import Session
def test_user_favorite_tag(mocker):
@@ -79,7 +77,7 @@ def test_remove_user_favorite_tag_no_user(mocker):
from superset.exceptions import MissingUserContextException
# Mock the behavior of TagDAO and g
- mock_session = mocker.patch("superset.daos.tag.db.session")
+ mocker.patch("superset.daos.tag.db.session") # noqa: F841
mock_TagDAO = mocker.patch("superset.daos.tag.TagDAO")
mock_tag = mocker.MagicMock(users_favorited=[])
mock_TagDAO.find_by_id.return_value = mock_tag
@@ -94,7 +92,6 @@ def test_remove_user_favorite_tag_no_user(mocker):
def test_remove_user_favorite_tag_exc_raise(mocker):
from superset.daos.tag import TagDAO
- from superset.exceptions import MissingUserContextException
# Mock the behavior of TagDAO and g
mock_session = mocker.patch("superset.daos.tag.db.session")
@@ -102,7 +99,9 @@ def test_remove_user_favorite_tag_exc_raise(mocker):
mock_tag = mocker.MagicMock(users_favorited=[])
mock_TagDAO.find_by_id.return_value = mock_tag
- mock_g = mocker.patch("superset.daos.tag.g") # Replace with the actual path to g
+ mocker.patch( # noqa: F841
+ "superset.daos.tag.g"
+ ) # Replace with the actual path to g # noqa: F841
# Test that exception is raised when commit fails
mock_session.commit.side_effect = Exception("DB Error")
@@ -115,7 +114,7 @@ def test_user_favorite_tag_no_user(mocker):
from superset.exceptions import MissingUserContextException
# Mock the behavior of TagDAO and g
- mock_session = mocker.patch("superset.daos.tag.db.session")
+ mocker.patch("superset.daos.tag.db.session") # noqa: F841
mock_TagDAO = mocker.patch("superset.daos.tag.TagDAO")
mock_tag = mocker.MagicMock(users_favorited=[])
mock_TagDAO.find_by_id.return_value = mock_tag
@@ -130,7 +129,6 @@ def test_user_favorite_tag_no_user(mocker):
def test_user_favorite_tag_exc_raise(mocker):
from superset.daos.tag import TagDAO
- from superset.exceptions import MissingUserContextException
# Mock the behavior of TagDAO and g
mock_session = mocker.patch("superset.daos.tag.db.session")
@@ -138,7 +136,9 @@ def test_user_favorite_tag_exc_raise(mocker):
mock_tag = mocker.MagicMock(users_favorited=[])
mock_TagDAO.find_by_id.return_value = mock_tag
- mock_g = mocker.patch("superset.daos.tag.g") # Replace with the actual path to g
+ mocker.patch( # noqa: F841
+ "superset.daos.tag.g"
+ ) # Replace with the actual path to g # noqa: F841
# Test that exception is raised when commit fails
mock_session.commit.side_effect = Exception("DB Error")
@@ -150,7 +150,6 @@ def test_create_tag_relationship(mocker):
from superset.daos.tag import TagDAO
from superset.tags.models import ( # Assuming these are defined in the same module
ObjectType,
- TaggedObject,
)
mock_session = mocker.patch("superset.daos.tag.db.session")
diff --git a/tests/unit_tests/dao/user_test.py b/tests/unit_tests/dao/user_test.py
index 3808be28c..a2a74a554 100644
--- a/tests/unit_tests/dao/user_test.py
+++ b/tests/unit_tests/dao/user_test.py
@@ -18,10 +18,9 @@ from unittest.mock import MagicMock
import pytest
from flask_appbuilder.security.sqla.models import User
-from sqlalchemy.orm import Query
from sqlalchemy.orm.exc import NoResultFound
-from superset.daos.user import db, UserDAO
+from superset.daos.user import UserDAO
from superset.models.user_attributes import UserAttribute
@@ -44,7 +43,7 @@ def test_get_by_id_found(mock_db_session):
mock_query.filter_by.return_value.one.return_value = mock_user
# Execute
- result = UserDAO.get_by_id(user_id)
+ UserDAO.get_by_id(user_id) # noqa: F841
# Assert
mock_db_session.query.assert_called_with(User)
diff --git a/tests/unit_tests/dashboards/commands/importers/v1/import_test.py b/tests/unit_tests/dashboards/commands/importers/v1/import_test.py
index ac3d2a919..208b72343 100644
--- a/tests/unit_tests/dashboards/commands/importers/v1/import_test.py
+++ b/tests/unit_tests/dashboards/commands/importers/v1/import_test.py
@@ -24,7 +24,7 @@ from flask_appbuilder.security.sqla.models import Role, User
from pytest_mock import MockFixture
from sqlalchemy.orm.session import Session
-from superset import db, security_manager
+from superset import security_manager
from superset.commands.dashboard.importers.v1.utils import import_dashboard
from superset.commands.exceptions import ImportFailedError
from superset.models.dashboard import Dashboard
diff --git a/tests/unit_tests/databases/api_test.py b/tests/unit_tests/databases/api_test.py
index d27e5a873..2f3c11f9a 100644
--- a/tests/unit_tests/databases/api_test.py
+++ b/tests/unit_tests/databases/api_test.py
@@ -411,71 +411,72 @@ def test_delete_ssh_tunnel(
"""
Test that we can delete SSH Tunnel
"""
- from superset.daos.database import DatabaseDAO
- from superset.databases.api import DatabaseRestApi
- from superset.databases.ssh_tunnel.models import SSHTunnel
- from superset.models.core import Database
+ with app.app_context():
+ from superset.daos.database import DatabaseDAO
+ from superset.databases.api import DatabaseRestApi
+ from superset.databases.ssh_tunnel.models import SSHTunnel
+ from superset.models.core import Database
- DatabaseRestApi.datamodel.session = session
+ DatabaseRestApi.datamodel.session = session
- # create table for databases
- Database.metadata.create_all(session.get_bind()) # pylint: disable=no-member
+ # create table for databases
+ Database.metadata.create_all(session.get_bind()) # pylint: disable=no-member
- # Create our Database
- database = Database(
- database_name="my_database",
- sqlalchemy_uri="gsheets://",
- encrypted_extra=json.dumps(
- {
- "service_account_info": {
- "type": "service_account",
- "project_id": "black-sanctum-314419",
- "private_key_id": "259b0d419a8f840056158763ff54d8b08f7b8173",
- "private_key": "SECRET",
- "client_email": "google-spreadsheets-demo-servi@black-sanctum-314419.iam.gserviceaccount.com",
- "client_id": "SSH_TUNNEL_CREDENTIALS_CLIENT",
- "auth_uri": "https://accounts.google.com/o/oauth2/auth",
- "token_uri": "https://oauth2.googleapis.com/token",
- "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
- "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/google-spreadsheets-demo-servi%40black-sanctum-314419.iam.gserviceaccount.com",
- },
- }
- ),
- )
- db.session.add(database)
- db.session.commit()
+ # Create our Database
+ database = Database(
+ database_name="my_database",
+ sqlalchemy_uri="gsheets://",
+ encrypted_extra=json.dumps(
+ {
+ "service_account_info": {
+ "type": "service_account",
+ "project_id": "black-sanctum-314419",
+ "private_key_id": "259b0d419a8f840056158763ff54d8b08f7b8173",
+ "private_key": "SECRET",
+ "client_email": "google-spreadsheets-demo-servi@black-sanctum-314419.iam.gserviceaccount.com",
+ "client_id": "SSH_TUNNEL_CREDENTIALS_CLIENT",
+ "auth_uri": "https://accounts.google.com/o/oauth2/auth",
+ "token_uri": "https://oauth2.googleapis.com/token",
+ "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
+ "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/google-spreadsheets-demo-servi%40black-sanctum-314419.iam.gserviceaccount.com",
+ },
+ }
+ ),
+ )
+ db.session.add(database)
+ db.session.commit()
- # mock the lookup so that we don't need to include the driver
- mocker.patch("sqlalchemy.engine.URL.get_driver_name", return_value="gsheets")
- mocker.patch("superset.utils.log.DBEventLogger.log")
- mocker.patch(
- "superset.commands.database.ssh_tunnel.delete.is_feature_enabled",
- return_value=True,
- )
+ # mock the lookup so that we don't need to include the driver
+ mocker.patch("sqlalchemy.engine.URL.get_driver_name", return_value="gsheets")
+ mocker.patch("superset.utils.log.DBEventLogger.log")
+ mocker.patch(
+ "superset.commands.database.ssh_tunnel.delete.is_feature_enabled",
+ return_value=True,
+ )
- # Create our SSHTunnel
- tunnel = SSHTunnel(
- database_id=1,
- database=database,
- )
+ # Create our SSHTunnel
+ tunnel = SSHTunnel(
+ database_id=1,
+ database=database,
+ )
- db.session.add(tunnel)
- db.session.commit()
+ db.session.add(tunnel)
+ db.session.commit()
- # Get our recently created SSHTunnel
- response_tunnel = DatabaseDAO.get_ssh_tunnel(1)
- assert response_tunnel
- assert isinstance(response_tunnel, SSHTunnel)
- assert 1 == response_tunnel.database_id
+ # Get our recently created SSHTunnel
+ response_tunnel = DatabaseDAO.get_ssh_tunnel(1)
+ assert response_tunnel
+ assert isinstance(response_tunnel, SSHTunnel)
+ assert 1 == response_tunnel.database_id
- # Delete the recently created SSHTunnel
- response_delete_tunnel = client.delete(
- f"/api/v1/database/{database.id}/ssh_tunnel/"
- )
- assert response_delete_tunnel.json["message"] == "OK"
+ # Delete the recently created SSHTunnel
+ response_delete_tunnel = client.delete(
+ f"/api/v1/database/{database.id}/ssh_tunnel/"
+ )
+ assert response_delete_tunnel.json["message"] == "OK"
- response_tunnel = DatabaseDAO.get_ssh_tunnel(1)
- assert response_tunnel is None
+ response_tunnel = DatabaseDAO.get_ssh_tunnel(1)
+ assert response_tunnel is None
def test_delete_ssh_tunnel_not_found(
@@ -488,69 +489,70 @@ def test_delete_ssh_tunnel_not_found(
"""
Test that we cannot delete a tunnel that does not exist
"""
- from superset.daos.database import DatabaseDAO
- from superset.databases.api import DatabaseRestApi
- from superset.databases.ssh_tunnel.models import SSHTunnel
- from superset.models.core import Database
+ with app.app_context():
+ from superset.daos.database import DatabaseDAO
+ from superset.databases.api import DatabaseRestApi
+ from superset.databases.ssh_tunnel.models import SSHTunnel
+ from superset.models.core import Database
- DatabaseRestApi.datamodel.session = session
+ DatabaseRestApi.datamodel.session = session
- # create table for databases
- Database.metadata.create_all(session.get_bind()) # pylint: disable=no-member
+ # create table for databases
+ Database.metadata.create_all(session.get_bind()) # pylint: disable=no-member
- # Create our Database
- database = Database(
- database_name="my_database",
- sqlalchemy_uri="gsheets://",
- encrypted_extra=json.dumps(
- {
- "service_account_info": {
- "type": "service_account",
- "project_id": "black-sanctum-314419",
- "private_key_id": "259b0d419a8f840056158763ff54d8b08f7b8173",
- "private_key": "SECRET",
- "client_email": "google-spreadsheets-demo-servi@black-sanctum-314419.iam.gserviceaccount.com",
- "client_id": "SSH_TUNNEL_CREDENTIALS_CLIENT",
- "auth_uri": "https://accounts.google.com/o/oauth2/auth",
- "token_uri": "https://oauth2.googleapis.com/token",
- "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
- "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/google-spreadsheets-demo-servi%40black-sanctum-314419.iam.gserviceaccount.com",
- },
- }
- ),
- )
- db.session.add(database)
- db.session.commit()
+ # Create our Database
+ database = Database(
+ database_name="my_database",
+ sqlalchemy_uri="gsheets://",
+ encrypted_extra=json.dumps(
+ {
+ "service_account_info": {
+ "type": "service_account",
+ "project_id": "black-sanctum-314419",
+ "private_key_id": "259b0d419a8f840056158763ff54d8b08f7b8173",
+ "private_key": "SECRET",
+ "client_email": "google-spreadsheets-demo-servi@black-sanctum-314419.iam.gserviceaccount.com",
+ "client_id": "SSH_TUNNEL_CREDENTIALS_CLIENT",
+ "auth_uri": "https://accounts.google.com/o/oauth2/auth",
+ "token_uri": "https://oauth2.googleapis.com/token",
+ "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
+ "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/google-spreadsheets-demo-servi%40black-sanctum-314419.iam.gserviceaccount.com",
+ },
+ }
+ ),
+ )
+ db.session.add(database)
+ db.session.commit()
- # mock the lookup so that we don't need to include the driver
- mocker.patch("sqlalchemy.engine.URL.get_driver_name", return_value="gsheets")
- mocker.patch("superset.utils.log.DBEventLogger.log")
- mocker.patch(
- "superset.commands.database.ssh_tunnel.delete.is_feature_enabled",
- return_value=True,
- )
+ # mock the lookup so that we don't need to include the driver
+ mocker.patch("sqlalchemy.engine.URL.get_driver_name", return_value="gsheets")
+ mocker.patch("superset.utils.log.DBEventLogger.log")
+ mocker.patch(
+ "superset.commands.database.ssh_tunnel.delete.is_feature_enabled",
+ return_value=True,
+ )
- # Create our SSHTunnel
- tunnel = SSHTunnel(
- database_id=1,
- database=database,
- )
+ # Create our SSHTunnel
+ tunnel = SSHTunnel(
+ database_id=1,
+ database=database,
+ )
- db.session.add(tunnel)
- db.session.commit()
+ db.session.add(tunnel)
+ db.session.commit()
- # Delete the recently created SSHTunnel
- response_delete_tunnel = client.delete("/api/v1/database/2/ssh_tunnel/")
- assert response_delete_tunnel.json["message"] == "Not found"
+ # Delete the recently created SSHTunnel
+ response_delete_tunnel = client.delete("/api/v1/database/2/ssh_tunnel/")
+ assert response_delete_tunnel.json["message"] == "Not found"
- # Get our recently created SSHTunnel
- response_tunnel = DatabaseDAO.get_ssh_tunnel(1)
- assert response_tunnel
- assert isinstance(response_tunnel, SSHTunnel)
- assert 1 == response_tunnel.database_id
+ # Get our recently created SSHTunnel
+ response_tunnel = DatabaseDAO.get_ssh_tunnel(1)
+ assert response_tunnel
+ assert isinstance(response_tunnel, SSHTunnel)
+ assert 1 == response_tunnel.database_id
- response_tunnel = DatabaseDAO.get_ssh_tunnel(2)
- assert response_tunnel is None
+ response_tunnel = DatabaseDAO.get_ssh_tunnel(2)
+ assert response_tunnel is None
def test_apply_dynamic_database_filter(
@@ -566,87 +568,87 @@ def test_apply_dynamic_database_filter(
defining a filter function and patching the config to get
the filtered results.
"""
- from superset.daos.database import DatabaseDAO
- from superset.databases.api import DatabaseRestApi
- from superset.databases.ssh_tunnel.models import SSHTunnel
- from superset.models.core import Database
-
- DatabaseRestApi.datamodel.session = session
-
- # create table for databases
- Database.metadata.create_all(session.get_bind()) # pylint: disable=no-member
-
- # Create our First Database
- database = Database(
- database_name="first-database",
- sqlalchemy_uri="gsheets://",
- encrypted_extra=json.dumps(
- {
- "metadata_params": {},
- "engine_params": {},
- "metadata_cache_timeout": {},
- "schemas_allowed_for_file_upload": [],
- }
- ),
- )
- db.session.add(database)
- db.session.commit()
-
- # Create our Second Database
- database = Database(
- database_name="second-database",
- sqlalchemy_uri="gsheets://",
- encrypted_extra=json.dumps(
- {
- "metadata_params": {},
- "engine_params": {},
- "metadata_cache_timeout": {},
- "schemas_allowed_for_file_upload": [],
- }
- ),
- )
- db.session.add(database)
- db.session.commit()
-
- # mock the lookup so that we don't need to include the driver
- mocker.patch("sqlalchemy.engine.URL.get_driver_name", return_value="gsheets")
- mocker.patch("superset.utils.log.DBEventLogger.log")
- mocker.patch(
- "superset.commands.database.ssh_tunnel.delete.is_feature_enabled",
- return_value=False,
- )
-
- def _base_filter(query):
+ with app.app_context():
+ from superset.daos.database import DatabaseDAO
+ from superset.databases.api import DatabaseRestApi
from superset.models.core import Database
- return query.filter(Database.database_name.startswith("second"))
+ DatabaseRestApi.datamodel.session = session
- # Create a mock object
- base_filter_mock = Mock(side_effect=_base_filter)
+ # create table for databases
+ Database.metadata.create_all(session.get_bind()) # pylint: disable=no-member
- # Get our recently created Databases
- response_databases = DatabaseDAO.find_all()
- assert response_databases
- expected_db_names = ["first-database", "second-database"]
- actual_db_names = [db.database_name for db in response_databases]
- assert actual_db_names == expected_db_names
+ # Create our First Database
+ database = Database(
+ database_name="first-database",
+ sqlalchemy_uri="gsheets://",
+ encrypted_extra=json.dumps(
+ {
+ "metadata_params": {},
+ "engine_params": {},
+ "metadata_cache_timeout": {},
+ "schemas_allowed_for_file_upload": [],
+ }
+ ),
+ )
+ db.session.add(database)
+ db.session.commit()
- # Ensure that the filter has not been called because it's not in our config
- assert base_filter_mock.call_count == 0
+ # Create our Second Database
+ database = Database(
+ database_name="second-database",
+ sqlalchemy_uri="gsheets://",
+ encrypted_extra=json.dumps(
+ {
+ "metadata_params": {},
+ "engine_params": {},
+ "metadata_cache_timeout": {},
+ "schemas_allowed_for_file_upload": [],
+ }
+ ),
+ )
+ db.session.add(database)
+ db.session.commit()
- original_config = current_app.config.copy()
- original_config["EXTRA_DYNAMIC_QUERY_FILTERS"] = {"databases": base_filter_mock}
+ # mock the lookup so that we don't need to include the driver
+ mocker.patch("sqlalchemy.engine.URL.get_driver_name", return_value="gsheets")
+ mocker.patch("superset.utils.log.DBEventLogger.log")
+ mocker.patch(
+ "superset.commands.database.ssh_tunnel.delete.is_feature_enabled",
+ return_value=False,
+ )
- mocker.patch("superset.views.filters.current_app.config", new=original_config)
- # Get filtered list
- response_databases = DatabaseDAO.find_all()
- assert response_databases
- expected_db_names = ["second-database"]
- actual_db_names = [db.database_name for db in response_databases]
- assert actual_db_names == expected_db_names
+ def _base_filter(query):
+ from superset.models.core import Database
- # Ensure that the filter has been called once
- assert base_filter_mock.call_count == 1
+ return query.filter(Database.database_name.startswith("second"))
+
+ # Create a mock object
+ base_filter_mock = Mock(side_effect=_base_filter)
+
+ # Get our recently created Databases
+ response_databases = DatabaseDAO.find_all()
+ assert response_databases
+ expected_db_names = ["first-database", "second-database"]
+ actual_db_names = [db.database_name for db in response_databases]
+ assert actual_db_names == expected_db_names
+
+ # Ensure that the filter has not been called because it's not in our config
+ assert base_filter_mock.call_count == 0
+
+ original_config = current_app.config.copy()
+ original_config["EXTRA_DYNAMIC_QUERY_FILTERS"] = {"databases": base_filter_mock}
+
+ mocker.patch("superset.views.filters.current_app.config", new=original_config)
+ # Get filtered list
+ response_databases = DatabaseDAO.find_all()
+ assert response_databases
+ expected_db_names = ["second-database"]
+ actual_db_names = [db.database_name for db in response_databases]
+ assert actual_db_names == expected_db_names
+
+ # Ensure that the filter has been called once
+ assert base_filter_mock.call_count == 1
def test_oauth2_happy_path(
@@ -934,7 +936,7 @@ def test_csv_upload(
reader_mock = mocker.patch.object(CSVReader, "__init__")
reader_mock.return_value = None
response = client.post(
- f"/api/v1/database/1/csv_upload/",
+ "/api/v1/database/1/csv_upload/",
data=payload,
content_type="multipart/form-data",
)
@@ -1071,7 +1073,7 @@ def test_csv_upload_validation(
_ = mocker.patch.object(UploadCommand, "run")
response = client.post(
- f"/api/v1/database/1/csv_upload/",
+ "/api/v1/database/1/csv_upload/",
data=payload,
content_type="multipart/form-data",
)
@@ -1090,7 +1092,7 @@ def test_csv_upload_file_size_validation(
_ = mocker.patch.object(UploadCommand, "run")
current_app.config["CSV_UPLOAD_MAX_SIZE"] = 5
response = client.post(
- f"/api/v1/database/1/csv_upload/",
+ "/api/v1/database/1/csv_upload/",
data={
"file": (create_csv_file(), "out.csv"),
"table_name": "table1",
@@ -1131,7 +1133,7 @@ def test_csv_upload_file_extension_invalid(
"""
_ = mocker.patch.object(UploadCommand, "run")
response = client.post(
- f"/api/v1/database/1/csv_upload/",
+ "/api/v1/database/1/csv_upload/",
data={
"file": (create_csv_file(), filename),
"table_name": "table1",
@@ -1167,7 +1169,7 @@ def test_csv_upload_file_extension_valid(
"""
_ = mocker.patch.object(UploadCommand, "run")
response = client.post(
- f"/api/v1/database/1/csv_upload/",
+ "/api/v1/database/1/csv_upload/",
data={
"file": (create_csv_file(), filename),
"table_name": "table1",
@@ -1276,7 +1278,7 @@ def test_excel_upload(
reader_mock = mocker.patch.object(ExcelReader, "__init__")
reader_mock.return_value = None
response = client.post(
- f"/api/v1/database/1/excel_upload/",
+ "/api/v1/database/1/excel_upload/",
data=payload,
content_type="multipart/form-data",
)
@@ -1368,7 +1370,7 @@ def test_excel_upload_validation(
_ = mocker.patch.object(UploadCommand, "run")
response = client.post(
- f"/api/v1/database/1/excel_upload/",
+ "/api/v1/database/1/excel_upload/",
data=payload,
content_type="multipart/form-data",
)
@@ -1402,7 +1404,7 @@ def test_excel_upload_file_extension_invalid(
"""
_ = mocker.patch.object(UploadCommand, "run")
response = client.post(
- f"/api/v1/database/1/excel_upload/",
+ "/api/v1/database/1/excel_upload/",
data={
"file": (create_excel_file(), filename),
"table_name": "table1",
diff --git a/tests/unit_tests/databases/schema_tests.py b/tests/unit_tests/databases/schema_tests.py
index 15dcb18a9..aac7a3e3a 100644
--- a/tests/unit_tests/databases/schema_tests.py
+++ b/tests/unit_tests/databases/schema_tests.py
@@ -25,7 +25,6 @@ from pytest_mock import MockFixture
if TYPE_CHECKING:
from superset.databases.schemas import DatabaseParametersSchemaMixin
- from superset.db_engine_specs.base import BasicParametersMixin
# pylint: disable=too-few-public-methods
diff --git a/tests/unit_tests/databases/ssh_tunnel/commands/create_test.py b/tests/unit_tests/databases/ssh_tunnel/commands/create_test.py
index c80b52931..b20578784 100644
--- a/tests/unit_tests/databases/ssh_tunnel/commands/create_test.py
+++ b/tests/unit_tests/databases/ssh_tunnel/commands/create_test.py
@@ -17,7 +17,6 @@
import pytest
-from sqlalchemy.orm.session import Session
from superset.commands.database.ssh_tunnel.exceptions import (
SSHTunnelDatabasePortError,
@@ -52,7 +51,6 @@ def test_create_ssh_tunnel_command() -> None:
def test_create_ssh_tunnel_command_invalid_params() -> None:
from superset.commands.database.ssh_tunnel.create import CreateSSHTunnelCommand
- from superset.databases.ssh_tunnel.models import SSHTunnel
from superset.models.core import Database
database = Database(
@@ -80,7 +78,6 @@ def test_create_ssh_tunnel_command_invalid_params() -> None:
def test_create_ssh_tunnel_command_no_port() -> None:
from superset.commands.database.ssh_tunnel.create import CreateSSHTunnelCommand
- from superset.databases.ssh_tunnel.models import SSHTunnel
from superset.models.core import Database
database = Database(
diff --git a/tests/unit_tests/databases/ssh_tunnel/dao_tests.py b/tests/unit_tests/databases/ssh_tunnel/dao_tests.py
index 4646e12c1..1456f7fd8 100644
--- a/tests/unit_tests/databases/ssh_tunnel/dao_tests.py
+++ b/tests/unit_tests/databases/ssh_tunnel/dao_tests.py
@@ -16,12 +16,8 @@
# under the License.
-import pytest
-from sqlalchemy.orm.session import Session
-
-
def test_create_ssh_tunnel():
- from superset.daos.database import DatabaseDAO, SSHTunnelDAO
+ from superset.daos.database import SSHTunnelDAO
from superset.databases.ssh_tunnel.models import SSHTunnel
from superset.models.core import Database
diff --git a/tests/unit_tests/datasets/commands/export_test.py b/tests/unit_tests/datasets/commands/export_test.py
index 550d885f8..fbfa8d346 100644
--- a/tests/unit_tests/datasets/commands/export_test.py
+++ b/tests/unit_tests/datasets/commands/export_test.py
@@ -91,9 +91,7 @@ def test_export(session: Session) -> None:
export = [
(file[0], file[1]())
for file in list(
- ExportDatasetsCommand._export(
- sqla_table
- ) # pylint: disable=protected-access
+ ExportDatasetsCommand._export(sqla_table) # pylint: disable=protected-access
)
]
diff --git a/tests/unit_tests/datasets/commands/importers/v1/import_test.py b/tests/unit_tests/datasets/commands/importers/v1/import_test.py
index a7660d6c0..511b60188 100644
--- a/tests/unit_tests/datasets/commands/importers/v1/import_test.py
+++ b/tests/unit_tests/datasets/commands/importers/v1/import_test.py
@@ -31,7 +31,6 @@ from sqlalchemy.orm.session import Session
from superset import db
from superset.commands.dataset.exceptions import (
DatasetForbiddenDataURI,
- ImportFailedError,
)
from superset.commands.dataset.importers.v1.utils import validate_data_uri
@@ -156,7 +155,6 @@ def test_import_dataset_duplicate_column(mocker: MockFixture, session: Session)
Test importing a dataset with a column that already exists.
"""
from superset import security_manager
- from superset.columns.models import Column as NewColumn
from superset.commands.dataset.importers.v1.utils import import_dataset
from superset.connectors.sqla.models import SqlaTable, TableColumn
from superset.models.core import Database
@@ -283,7 +281,7 @@ def test_import_column_extra_is_string(mocker: MockFixture, session: Session) ->
"""
from superset import security_manager
from superset.commands.dataset.importers.v1.utils import import_dataset
- from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn
+ from superset.connectors.sqla.models import SqlaTable
from superset.datasets.schemas import ImportV1DatasetSchema
from superset.models.core import Database
@@ -420,7 +418,7 @@ def test_import_dataset_extra_empty_string(
dataset_config["database_id"] = database.id
sqla_table = import_dataset(dataset_config)
- assert sqla_table.extra == None
+ assert sqla_table.extra is None # noqa: E711
@patch("superset.commands.dataset.importers.v1.utils.request")
diff --git a/tests/unit_tests/datasource/dao_tests.py b/tests/unit_tests/datasource/dao_tests.py
index adc674d0f..8fed1b73d 100644
--- a/tests/unit_tests/datasource/dao_tests.py
+++ b/tests/unit_tests/datasource/dao_tests.py
@@ -167,7 +167,6 @@ def test_get_datasource_sl_dataset(session_with_data: Session) -> None:
def test_get_datasource_w_str_param(session_with_data: Session) -> None:
from superset.connectors.sqla.models import SqlaTable
from superset.daos.datasource import DatasourceDAO
- from superset.datasets.models import Dataset
from superset.tables.models import Table
assert isinstance(
diff --git a/tests/unit_tests/db_engine_specs/test_athena.py b/tests/unit_tests/db_engine_specs/test_athena.py
index f0811a3e1..2e95576d9 100644
--- a/tests/unit_tests/db_engine_specs/test_athena.py
+++ b/tests/unit_tests/db_engine_specs/test_athena.py
@@ -23,7 +23,7 @@ import pytest
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm
-from tests.unit_tests.fixtures.common import dttm
+from tests.unit_tests.fixtures.common import dttm # noqa
SYNTAX_ERROR_REGEX = re.compile(
": mismatched input '(?P.*?)'. Expecting: "
@@ -39,7 +39,9 @@ SYNTAX_ERROR_REGEX = re.compile(
],
)
def test_convert_dttm(
- target_type: str, expected_result: Optional[str], dttm: datetime
+ target_type: str,
+ expected_result: Optional[str],
+ dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.athena import AthenaEngineSpec as spec
diff --git a/tests/unit_tests/db_engine_specs/test_bigquery.py b/tests/unit_tests/db_engine_specs/test_bigquery.py
index 3870297db..663fd7cac 100644
--- a/tests/unit_tests/db_engine_specs/test_bigquery.py
+++ b/tests/unit_tests/db_engine_specs/test_bigquery.py
@@ -29,7 +29,7 @@ from sqlalchemy_bigquery import BigQueryDialect
from superset.superset_typing import ResultSetColumnType
from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm
-from tests.unit_tests.fixtures.common import dttm
+from tests.unit_tests.fixtures.common import dttm # noqa: F401
def test_get_fields() -> None:
@@ -323,7 +323,9 @@ def test_parse_error_raises_exception() -> None:
],
)
def test_convert_dttm(
- target_type: str, expected_result: Optional[str], dttm: datetime
+ target_type: str,
+ expected_result: Optional[str],
+ dttm: datetime, # noqa: F811
) -> None:
"""
DB Eng Specs (bigquery): Test conversion to date time
diff --git a/tests/unit_tests/db_engine_specs/test_clickhouse.py b/tests/unit_tests/db_engine_specs/test_clickhouse.py
index 65f4d7903..369dd934c 100644
--- a/tests/unit_tests/db_engine_specs/test_clickhouse.py
+++ b/tests/unit_tests/db_engine_specs/test_clickhouse.py
@@ -38,7 +38,7 @@ from tests.unit_tests.db_engine_specs.utils import (
assert_column_spec,
assert_convert_dttm,
)
-from tests.unit_tests.fixtures.common import dttm
+from tests.unit_tests.fixtures.common import dttm # noqa: F401
@pytest.mark.parametrize(
@@ -50,7 +50,9 @@ from tests.unit_tests.fixtures.common import dttm
],
)
def test_convert_dttm(
- target_type: str, expected_result: Optional[str], dttm: datetime
+ target_type: str,
+ expected_result: Optional[str],
+ dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.clickhouse import ClickHouseEngineSpec as spec
@@ -80,7 +82,9 @@ def test_execute_connection_error() -> None:
],
)
def test_connect_convert_dttm(
- target_type: str, expected_result: Optional[str], dttm: datetime
+ target_type: str,
+ expected_result: Optional[str],
+ dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.clickhouse import ClickHouseEngineSpec as spec
diff --git a/tests/unit_tests/db_engine_specs/test_crate.py b/tests/unit_tests/db_engine_specs/test_crate.py
index d2bace955..2df50c6dd 100644
--- a/tests/unit_tests/db_engine_specs/test_crate.py
+++ b/tests/unit_tests/db_engine_specs/test_crate.py
@@ -20,7 +20,7 @@ from typing import Optional
import pytest
from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm
-from tests.unit_tests.fixtures.common import dttm
+from tests.unit_tests.fixtures.common import dttm # noqa: F401
def test_epoch_to_dttm() -> None:
@@ -64,7 +64,9 @@ def test_alter_new_orm_column() -> None:
],
)
def test_convert_dttm(
- target_type: str, expected_result: Optional[str], dttm: datetime
+ target_type: str,
+ expected_result: Optional[str],
+ dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.crate import CrateEngineSpec as spec
diff --git a/tests/unit_tests/db_engine_specs/test_crdb.py b/tests/unit_tests/db_engine_specs/test_crdb.py
index e2067d3c9..507088b83 100644
--- a/tests/unit_tests/db_engine_specs/test_crdb.py
+++ b/tests/unit_tests/db_engine_specs/test_crdb.py
@@ -21,7 +21,7 @@ from typing import Optional
import pytest
from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm
-from tests.unit_tests.fixtures.common import dttm
+from tests.unit_tests.fixtures.common import dttm # noqa: F401
@pytest.mark.parametrize(
@@ -33,7 +33,9 @@ from tests.unit_tests.fixtures.common import dttm
],
)
def test_convert_dttm(
- target_type: str, expected_result: Optional[str], dttm: datetime
+ target_type: str,
+ expected_result: Optional[str],
+ dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.cockroachdb import CockroachDbEngineSpec as spec
diff --git a/tests/unit_tests/db_engine_specs/test_databend.py b/tests/unit_tests/db_engine_specs/test_databend.py
index 8e8cfe310..8252f96e2 100644
--- a/tests/unit_tests/db_engine_specs/test_databend.py
+++ b/tests/unit_tests/db_engine_specs/test_databend.py
@@ -37,7 +37,7 @@ from tests.unit_tests.db_engine_specs.utils import (
assert_column_spec,
assert_convert_dttm,
)
-from tests.unit_tests.fixtures.common import dttm
+from tests.unit_tests.fixtures.common import dttm # noqa: F401
@pytest.mark.parametrize(
@@ -49,7 +49,9 @@ from tests.unit_tests.fixtures.common import dttm
],
)
def test_convert_dttm(
- target_type: str, expected_result: Optional[str], dttm: datetime
+ target_type: str,
+ expected_result: Optional[str],
+ dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.databend import DatabendEngineSpec as spec
diff --git a/tests/unit_tests/db_engine_specs/test_databricks.py b/tests/unit_tests/db_engine_specs/test_databricks.py
index 0bfa1a4c7..de06f919b 100644
--- a/tests/unit_tests/db_engine_specs/test_databricks.py
+++ b/tests/unit_tests/db_engine_specs/test_databricks.py
@@ -26,7 +26,7 @@ from pytest_mock import MockerFixture
from superset.db_engine_specs.databricks import DatabricksNativeEngineSpec
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm
-from tests.unit_tests.fixtures.common import dttm
+from tests.unit_tests.fixtures.common import dttm # noqa: F401
def test_get_parameters_from_uri() -> None:
@@ -238,7 +238,9 @@ def test_extract_errors_with_context() -> None:
],
)
def test_convert_dttm(
- target_type: str, expected_result: Optional[str], dttm: datetime
+ target_type: str,
+ expected_result: Optional[str],
+ dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.databricks import DatabricksNativeEngineSpec as spec
diff --git a/tests/unit_tests/db_engine_specs/test_db2.py b/tests/unit_tests/db_engine_specs/test_db2.py
index d7dd19ad5..6d0d604a2 100644
--- a/tests/unit_tests/db_engine_specs/test_db2.py
+++ b/tests/unit_tests/db_engine_specs/test_db2.py
@@ -15,7 +15,7 @@
# specific language governing permissions and limitations
# under the License.
-import pytest
+import pytest # noqa: F401
from pytest_mock import MockerFixture
@@ -59,7 +59,7 @@ def test_get_table_comment_empty(mocker: MockerFixture):
mock_inspector.get_table_comment.return_value = {}
assert (
- Db2EngineSpec.get_table_comment(mock_inspector, "my_table", "my_schema") == None
+ Db2EngineSpec.get_table_comment(mock_inspector, "my_table", "my_schema") is None # noqa: E711
)
diff --git a/tests/unit_tests/db_engine_specs/test_dremio.py b/tests/unit_tests/db_engine_specs/test_dremio.py
index eb77e7f10..487f5a9b7 100644
--- a/tests/unit_tests/db_engine_specs/test_dremio.py
+++ b/tests/unit_tests/db_engine_specs/test_dremio.py
@@ -21,7 +21,7 @@ import pytest
from pytest_mock import MockerFixture
from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm
-from tests.unit_tests.fixtures.common import dttm
+from tests.unit_tests.fixtures.common import dttm # noqa: F401
@pytest.mark.parametrize(
@@ -36,7 +36,9 @@ from tests.unit_tests.fixtures.common import dttm
],
)
def test_convert_dttm(
- target_type: str, expected_result: Optional[str], dttm: datetime
+ target_type: str,
+ expected_result: Optional[str],
+ dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.dremio import DremioEngineSpec as spec
diff --git a/tests/unit_tests/db_engine_specs/test_drill.py b/tests/unit_tests/db_engine_specs/test_drill.py
index c0d260100..7ff4aa757 100644
--- a/tests/unit_tests/db_engine_specs/test_drill.py
+++ b/tests/unit_tests/db_engine_specs/test_drill.py
@@ -23,7 +23,7 @@ import pytest
from sqlalchemy.engine.url import make_url
from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm
-from tests.unit_tests.fixtures.common import dttm
+from tests.unit_tests.fixtures.common import dttm # noqa: F401
def test_odbc_impersonation() -> None:
@@ -102,7 +102,9 @@ def test_invalid_impersonation() -> None:
],
)
def test_convert_dttm(
- target_type: str, expected_result: Optional[str], dttm: datetime
+ target_type: str,
+ expected_result: Optional[str],
+ dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.drill import DrillEngineSpec as spec
diff --git a/tests/unit_tests/db_engine_specs/test_druid.py b/tests/unit_tests/db_engine_specs/test_druid.py
index 0ab468821..178c8d0e9 100644
--- a/tests/unit_tests/db_engine_specs/test_druid.py
+++ b/tests/unit_tests/db_engine_specs/test_druid.py
@@ -22,7 +22,7 @@ import pytest
from sqlalchemy import column
from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm
-from tests.unit_tests.fixtures.common import dttm
+from tests.unit_tests.fixtures.common import dttm # noqa: F401
@pytest.mark.parametrize(
@@ -35,7 +35,9 @@ from tests.unit_tests.fixtures.common import dttm
],
)
def test_convert_dttm(
- target_type: str, expected_result: Optional[str], dttm: datetime
+ target_type: str,
+ expected_result: Optional[str],
+ dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.druid import DruidEngineSpec as spec
diff --git a/tests/unit_tests/db_engine_specs/test_duckdb.py b/tests/unit_tests/db_engine_specs/test_duckdb.py
index 1e33522de..37b8dd007 100644
--- a/tests/unit_tests/db_engine_specs/test_duckdb.py
+++ b/tests/unit_tests/db_engine_specs/test_duckdb.py
@@ -24,7 +24,7 @@ from pytest_mock import MockerFixture
from superset.config import VERSION_STRING
from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm
-from tests.unit_tests.fixtures.common import dttm
+from tests.unit_tests.fixtures.common import dttm # noqa: F401
@pytest.mark.parametrize(
@@ -36,7 +36,9 @@ from tests.unit_tests.fixtures.common import dttm
],
)
def test_convert_dttm(
- target_type: str, expected_result: Optional[str], dttm: datetime
+ target_type: str,
+ expected_result: Optional[str],
+ dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.duckdb import DuckDBEngineSpec as spec
diff --git a/tests/unit_tests/db_engine_specs/test_dynamodb.py b/tests/unit_tests/db_engine_specs/test_dynamodb.py
index 26196f5b4..df439a38e 100644
--- a/tests/unit_tests/db_engine_specs/test_dynamodb.py
+++ b/tests/unit_tests/db_engine_specs/test_dynamodb.py
@@ -21,7 +21,7 @@ from typing import Optional
import pytest
from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm
-from tests.unit_tests.fixtures.common import dttm
+from tests.unit_tests.fixtures.common import dttm # noqa: F401
@pytest.mark.parametrize(
@@ -33,7 +33,9 @@ from tests.unit_tests.fixtures.common import dttm
],
)
def test_convert_dttm(
- target_type: str, expected_result: Optional[str], dttm: datetime
+ target_type: str,
+ expected_result: Optional[str],
+ dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.dynamodb import DynamoDBEngineSpec as spec
diff --git a/tests/unit_tests/db_engine_specs/test_elasticsearch.py b/tests/unit_tests/db_engine_specs/test_elasticsearch.py
index 1fc3d11ca..36e072708 100644
--- a/tests/unit_tests/db_engine_specs/test_elasticsearch.py
+++ b/tests/unit_tests/db_engine_specs/test_elasticsearch.py
@@ -19,10 +19,10 @@ from typing import Any, Optional
from unittest.mock import MagicMock
import pytest
-from sqlalchemy import column
+from sqlalchemy import column # noqa: F401
from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm
-from tests.unit_tests.fixtures.common import dttm
+from tests.unit_tests.fixtures.common import dttm # noqa: F401
@pytest.mark.parametrize(
@@ -51,7 +51,7 @@ def test_elasticsearch_convert_dttm(
target_type: str,
db_extra: Optional[dict[str, Any]],
expected_result: Optional[str],
- dttm: datetime,
+ dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.elasticsearch import ElasticSearchEngineSpec as spec
@@ -68,7 +68,7 @@ def test_elasticsearch_convert_dttm(
def test_opendistro_convert_dttm(
target_type: str,
expected_result: Optional[str],
- dttm: datetime,
+ dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.elasticsearch import OpenDistroEngineSpec as spec
diff --git a/tests/unit_tests/db_engine_specs/test_firebird.py b/tests/unit_tests/db_engine_specs/test_firebird.py
index c1add9167..7e5fc187a 100644
--- a/tests/unit_tests/db_engine_specs/test_firebird.py
+++ b/tests/unit_tests/db_engine_specs/test_firebird.py
@@ -21,7 +21,7 @@ from typing import Optional
import pytest
from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm
-from tests.unit_tests.fixtures.common import dttm
+from tests.unit_tests.fixtures.common import dttm # noqa: F401
@pytest.mark.parametrize(
@@ -95,7 +95,9 @@ def test_epoch_to_dttm() -> None:
],
)
def test_convert_dttm(
- target_type: str, expected_result: Optional[str], dttm: datetime
+ target_type: str,
+ expected_result: Optional[str],
+ dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.firebird import FirebirdEngineSpec as spec
diff --git a/tests/unit_tests/db_engine_specs/test_firebolt.py b/tests/unit_tests/db_engine_specs/test_firebolt.py
index eb84bb14b..b3fcdb582 100644
--- a/tests/unit_tests/db_engine_specs/test_firebolt.py
+++ b/tests/unit_tests/db_engine_specs/test_firebolt.py
@@ -22,7 +22,7 @@ from typing import Optional
import pytest
from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm
-from tests.unit_tests.fixtures.common import dttm
+from tests.unit_tests.fixtures.common import dttm # noqa: F401
@pytest.mark.parametrize(
@@ -41,7 +41,9 @@ from tests.unit_tests.fixtures.common import dttm
],
)
def test_convert_dttm(
- target_type: str, expected_result: Optional[str], dttm: datetime
+ target_type: str,
+ expected_result: Optional[str],
+ dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.firebolt import FireboltEngineSpec as spec
diff --git a/tests/unit_tests/db_engine_specs/test_hana.py b/tests/unit_tests/db_engine_specs/test_hana.py
index 1d1ac6390..996c5f6e0 100644
--- a/tests/unit_tests/db_engine_specs/test_hana.py
+++ b/tests/unit_tests/db_engine_specs/test_hana.py
@@ -21,7 +21,7 @@ from typing import Optional
import pytest
from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm
-from tests.unit_tests.fixtures.common import dttm
+from tests.unit_tests.fixtures.common import dttm # noqa: F401
@pytest.mark.parametrize(
@@ -36,7 +36,9 @@ from tests.unit_tests.fixtures.common import dttm
],
)
def test_convert_dttm(
- target_type: str, expected_result: Optional[str], dttm: datetime
+ target_type: str,
+ expected_result: Optional[str],
+ dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.hana import HanaEngineSpec as spec
diff --git a/tests/unit_tests/db_engine_specs/test_hive.py b/tests/unit_tests/db_engine_specs/test_hive.py
index ba6471b89..ce2b5a8e0 100644
--- a/tests/unit_tests/db_engine_specs/test_hive.py
+++ b/tests/unit_tests/db_engine_specs/test_hive.py
@@ -23,7 +23,7 @@ import pytest
from sqlalchemy.engine.url import make_url
from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm
-from tests.unit_tests.fixtures.common import dttm
+from tests.unit_tests.fixtures.common import dttm # noqa: F401
@pytest.mark.parametrize(
@@ -38,7 +38,9 @@ from tests.unit_tests.fixtures.common import dttm
],
)
def test_convert_dttm(
- target_type: str, expected_result: Optional[str], dttm: datetime
+ target_type: str,
+ expected_result: Optional[str],
+ dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.hive import HiveEngineSpec as spec
diff --git a/tests/unit_tests/db_engine_specs/test_impala.py b/tests/unit_tests/db_engine_specs/test_impala.py
index 8a4244052..efaed81cb 100644
--- a/tests/unit_tests/db_engine_specs/test_impala.py
+++ b/tests/unit_tests/db_engine_specs/test_impala.py
@@ -21,7 +21,7 @@ from typing import Optional
import pytest
from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm
-from tests.unit_tests.fixtures.common import dttm
+from tests.unit_tests.fixtures.common import dttm # noqa: F401
@pytest.mark.parametrize(
@@ -33,7 +33,9 @@ from tests.unit_tests.fixtures.common import dttm
],
)
def test_convert_dttm(
- target_type: str, expected_result: Optional[str], dttm: datetime
+ target_type: str,
+ expected_result: Optional[str],
+ dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.impala import ImpalaEngineSpec as spec
diff --git a/tests/unit_tests/db_engine_specs/test_kusto.py b/tests/unit_tests/db_engine_specs/test_kusto.py
index 538eafc6b..9fc1cd39f 100644
--- a/tests/unit_tests/db_engine_specs/test_kusto.py
+++ b/tests/unit_tests/db_engine_specs/test_kusto.py
@@ -21,7 +21,7 @@ from typing import Optional
import pytest
from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm
-from tests.unit_tests.fixtures.common import dttm
+from tests.unit_tests.fixtures.common import dttm # noqa: F401
@pytest.mark.parametrize(
@@ -119,7 +119,9 @@ def test_kql_parse_sql() -> None:
],
)
def test_kql_convert_dttm(
- target_type: str, expected_result: Optional[str], dttm: datetime
+ target_type: str,
+ expected_result: Optional[str],
+ dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.kusto import KustoKqlEngineSpec as spec
@@ -137,7 +139,9 @@ def test_kql_convert_dttm(
],
)
def test_sql_convert_dttm(
- target_type: str, expected_result: Optional[str], dttm: datetime
+ target_type: str,
+ expected_result: Optional[str],
+ dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.kusto import KustoSqlEngineSpec as spec
diff --git a/tests/unit_tests/db_engine_specs/test_kylin.py b/tests/unit_tests/db_engine_specs/test_kylin.py
index cbc8c9133..4e29c4b4b 100644
--- a/tests/unit_tests/db_engine_specs/test_kylin.py
+++ b/tests/unit_tests/db_engine_specs/test_kylin.py
@@ -21,7 +21,7 @@ from typing import Optional
import pytest
from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm
-from tests.unit_tests.fixtures.common import dttm
+from tests.unit_tests.fixtures.common import dttm # noqa: F401
@pytest.mark.parametrize(
@@ -33,7 +33,9 @@ from tests.unit_tests.fixtures.common import dttm
],
)
def test_convert_dttm(
- target_type: str, expected_result: Optional[str], dttm: datetime
+ target_type: str,
+ expected_result: Optional[str],
+ dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.kylin import KylinEngineSpec as spec
diff --git a/tests/unit_tests/db_engine_specs/test_mssql.py b/tests/unit_tests/db_engine_specs/test_mssql.py
index 56ff88792..9a8cc36fc 100644
--- a/tests/unit_tests/db_engine_specs/test_mssql.py
+++ b/tests/unit_tests/db_engine_specs/test_mssql.py
@@ -32,7 +32,7 @@ from tests.unit_tests.db_engine_specs.utils import (
assert_column_spec,
assert_convert_dttm,
)
-from tests.unit_tests.fixtures.common import dttm
+from tests.unit_tests.fixtures.common import dttm # noqa: F401
@pytest.mark.parametrize(
@@ -124,7 +124,7 @@ def test_time_exp_mixd_case_col_1y() -> None:
def test_convert_dttm(
target_type: str,
expected_result: Optional[str],
- dttm: datetime,
+ dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.mssql import MssqlEngineSpec as spec
diff --git a/tests/unit_tests/db_engine_specs/test_mysql.py b/tests/unit_tests/db_engine_specs/test_mysql.py
index ed6434701..0e48450dc 100644
--- a/tests/unit_tests/db_engine_specs/test_mysql.py
+++ b/tests/unit_tests/db_engine_specs/test_mysql.py
@@ -34,14 +34,14 @@ from sqlalchemy.dialects.mysql import (
TINYINT,
TINYTEXT,
)
-from sqlalchemy.engine.url import make_url, URL
+from sqlalchemy.engine.url import make_url, URL # noqa: F401
from superset.utils.core import GenericDataType
from tests.unit_tests.db_engine_specs.utils import (
assert_column_spec,
assert_convert_dttm,
)
-from tests.unit_tests.fixtures.common import dttm
+from tests.unit_tests.fixtures.common import dttm # noqa: F401
@pytest.mark.parametrize(
@@ -94,7 +94,9 @@ def test_get_column_spec(
],
)
def test_convert_dttm(
- target_type: str, expected_result: Optional[str], dttm: datetime
+ target_type: str,
+ expected_result: Optional[str],
+ dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.mysql import MySQLEngineSpec as spec
diff --git a/tests/unit_tests/db_engine_specs/test_oracle.py b/tests/unit_tests/db_engine_specs/test_oracle.py
index 210dff99d..745a8c44a 100644
--- a/tests/unit_tests/db_engine_specs/test_oracle.py
+++ b/tests/unit_tests/db_engine_specs/test_oracle.py
@@ -25,7 +25,7 @@ from sqlalchemy.dialects.oracle import DATE, NVARCHAR, VARCHAR
from sqlalchemy.sql import quoted_name
from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm
-from tests.unit_tests.fixtures.common import dttm
+from tests.unit_tests.fixtures.common import dttm # noqa: F401
@pytest.mark.parametrize(
@@ -106,7 +106,9 @@ def test_fetch_data() -> None:
],
)
def test_convert_dttm(
- target_type: str, expected_result: Optional[str], dttm: datetime
+ target_type: str,
+ expected_result: Optional[str],
+ dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.oracle import OracleEngineSpec as spec
diff --git a/tests/unit_tests/db_engine_specs/test_postgres.py b/tests/unit_tests/db_engine_specs/test_postgres.py
index 59d1829f1..bec77b3b7 100644
--- a/tests/unit_tests/db_engine_specs/test_postgres.py
+++ b/tests/unit_tests/db_engine_specs/test_postgres.py
@@ -30,7 +30,7 @@ from tests.unit_tests.db_engine_specs.utils import (
assert_column_spec,
assert_convert_dttm,
)
-from tests.unit_tests.fixtures.common import dttm
+from tests.unit_tests.fixtures.common import dttm # noqa: F401
@pytest.mark.parametrize(
@@ -49,7 +49,9 @@ from tests.unit_tests.fixtures.common import dttm
],
)
def test_convert_dttm(
- target_type: str, expected_result: Optional[str], dttm: datetime
+ target_type: str,
+ expected_result: Optional[str],
+ dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.postgres import PostgresEngineSpec as spec
diff --git a/tests/unit_tests/db_engine_specs/test_redshift.py b/tests/unit_tests/db_engine_specs/test_redshift.py
index ddd2c1a5e..9ce54384c 100644
--- a/tests/unit_tests/db_engine_specs/test_redshift.py
+++ b/tests/unit_tests/db_engine_specs/test_redshift.py
@@ -21,7 +21,7 @@ from typing import Optional
import pytest
from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm
-from tests.unit_tests.fixtures.common import dttm
+from tests.unit_tests.fixtures.common import dttm # noqa: F401
@pytest.mark.parametrize(
@@ -40,7 +40,9 @@ from tests.unit_tests.fixtures.common import dttm
],
)
def test_convert_dttm(
- target_type: str, expected_result: Optional[str], dttm: datetime
+ target_type: str,
+ expected_result: Optional[str],
+ dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.redshift import RedshiftEngineSpec as spec
diff --git a/tests/unit_tests/db_engine_specs/test_rockset.py b/tests/unit_tests/db_engine_specs/test_rockset.py
index c501dccf2..9ac1f5375 100644
--- a/tests/unit_tests/db_engine_specs/test_rockset.py
+++ b/tests/unit_tests/db_engine_specs/test_rockset.py
@@ -21,7 +21,7 @@ from typing import Optional
import pytest
from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm
-from tests.unit_tests.fixtures.common import dttm
+from tests.unit_tests.fixtures.common import dttm # noqa: F401
@pytest.mark.parametrize(
@@ -34,7 +34,9 @@ from tests.unit_tests.fixtures.common import dttm
],
)
def test_convert_dttm(
- target_type: str, expected_result: Optional[str], dttm: datetime
+ target_type: str,
+ expected_result: Optional[str],
+ dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.rockset import RocksetEngineSpec as spec
diff --git a/tests/unit_tests/db_engine_specs/test_snowflake.py b/tests/unit_tests/db_engine_specs/test_snowflake.py
index 5d560dd89..cf2393e84 100644
--- a/tests/unit_tests/db_engine_specs/test_snowflake.py
+++ b/tests/unit_tests/db_engine_specs/test_snowflake.py
@@ -28,7 +28,7 @@ from sqlalchemy.engine.url import make_url
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm
-from tests.unit_tests.fixtures.common import dttm
+from tests.unit_tests.fixtures.common import dttm # noqa: F401
@pytest.mark.parametrize(
@@ -52,7 +52,9 @@ from tests.unit_tests.fixtures.common import dttm
],
)
def test_convert_dttm(
- target_type: str, expected_result: Optional[str], dttm: datetime
+ target_type: str,
+ expected_result: Optional[str],
+ dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.snowflake import SnowflakeEngineSpec as spec
diff --git a/tests/unit_tests/db_engine_specs/test_sqlite.py b/tests/unit_tests/db_engine_specs/test_sqlite.py
index a31992afc..5b95ba789 100644
--- a/tests/unit_tests/db_engine_specs/test_sqlite.py
+++ b/tests/unit_tests/db_engine_specs/test_sqlite.py
@@ -23,7 +23,7 @@ from sqlalchemy.engine import create_engine
from superset.constants import TimeGrain
from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm
-from tests.unit_tests.fixtures.common import dttm
+from tests.unit_tests.fixtures.common import dttm # noqa: F401
@pytest.mark.parametrize(
@@ -38,7 +38,7 @@ from tests.unit_tests.fixtures.common import dttm
def test_convert_dttm(
target_type: str,
expected_result: Optional[str],
- dttm: datetime,
+ dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.sqlite import SqliteEngineSpec as spec
@@ -116,7 +116,7 @@ def test_convert_dttm(
("2022-12-04T05:06:07.89Z", TimeGrain.QUARTER, "2022-10-01 00:00:00"),
],
)
-def test_time_grain_expressions(dttm: str, grain: str, expected: str) -> None:
+def test_time_grain_expressions(dttm: str, grain: str, expected: str) -> None: # noqa: F811
from superset.db_engine_specs.sqlite import SqliteEngineSpec
engine = create_engine("sqlite://")
diff --git a/tests/unit_tests/db_engine_specs/test_trino.py b/tests/unit_tests/db_engine_specs/test_trino.py
index 7ce9199c5..535357885 100644
--- a/tests/unit_tests/db_engine_specs/test_trino.py
+++ b/tests/unit_tests/db_engine_specs/test_trino.py
@@ -44,7 +44,7 @@ from tests.unit_tests.db_engine_specs.utils import (
assert_column_spec,
assert_convert_dttm,
)
-from tests.unit_tests.fixtures.common import dttm
+from tests.unit_tests.fixtures.common import dttm # noqa: F401
def _assert_columns_equal(actual_cols, expected_cols) -> None:
@@ -304,7 +304,7 @@ def test_get_column_spec(
def test_convert_dttm(
target_type: str,
expected_result: Optional[str],
- dttm: datetime,
+ dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.trino import TrinoEngineSpec
diff --git a/tests/unit_tests/explore/api_test.py b/tests/unit_tests/explore/api_test.py
index 147790844..f8f6c54bd 100644
--- a/tests/unit_tests/explore/api_test.py
+++ b/tests/unit_tests/explore/api_test.py
@@ -17,8 +17,6 @@
from typing import Any
-import pytest
-
def test_explore_datasource_not_found(client: Any, full_api_access: None) -> None:
# validating the payload for a dataset that doesn't exist
diff --git a/tests/unit_tests/explore/utils_test.py b/tests/unit_tests/explore/utils_test.py
index fa99091f0..9638392a5 100644
--- a/tests/unit_tests/explore/utils_test.py
+++ b/tests/unit_tests/explore/utils_test.py
@@ -28,8 +28,6 @@ from superset.commands.dataset.exceptions import (
)
from superset.commands.exceptions import (
DatasourceNotFoundValidationError,
- DatasourceTypeInvalidError,
- OwnersNotFoundValidationError,
QueryNotFoundValidationError,
)
from superset.exceptions import SupersetSecurityException
@@ -242,11 +240,11 @@ def test_dataset_has_access(mocker: MockFixture) -> None:
mocker.patch(is_owner, return_value=False)
mocker.patch(can_access, return_value=True)
assert (
- check_datasource_access(
+ check_datasource_access( # noqa: E712
datasource_id=1,
datasource_type=DatasourceType.TABLE,
)
- == True
+ is True
)
@@ -260,18 +258,17 @@ def test_query_has_access(mocker: MockFixture) -> None:
mocker.patch(is_owner, return_value=False)
mocker.patch(can_access, return_value=True)
assert (
- check_datasource_access(
+ check_datasource_access( # noqa: E712
datasource_id=1,
datasource_type=DatasourceType.QUERY,
)
- == True
+ is True
)
def test_query_no_access(mocker: MockFixture, client) -> None:
from superset.connectors.sqla.models import SqlaTable
from superset.explore.utils import check_datasource_access
- from superset.models.core import Database
from superset.models.sql_lab import Query
database = mocker.MagicMock()
diff --git a/tests/unit_tests/extensions/ssh_test.py b/tests/unit_tests/extensions/ssh_test.py
index 13bf905e6..a36f0fe03 100644
--- a/tests/unit_tests/extensions/ssh_test.py
+++ b/tests/unit_tests/extensions/ssh_test.py
@@ -14,9 +14,8 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
-from unittest.mock import Mock, patch
+from unittest.mock import Mock
-import pytest
import sshtunnel
from superset.extensions.ssh import SSHManagerFactory
diff --git a/tests/unit_tests/jinja_context_test.py b/tests/unit_tests/jinja_context_test.py
index 15fe81aeb..441104c42 100644
--- a/tests/unit_tests/jinja_context_test.py
+++ b/tests/unit_tests/jinja_context_test.py
@@ -392,9 +392,9 @@ def test_user_macros_without_user_info(mocker: MockFixture):
mock_g = mocker.patch("superset.utils.core.g")
mock_g.user = None
cache = ExtraCache()
- assert cache.current_user_id() == None
- assert cache.current_username() == None
- assert cache.current_user_email() == None
+ assert cache.current_user_id() == None # noqa: E711
+ assert cache.current_username() == None # noqa: E711
+ assert cache.current_user_email() == None # noqa: E711
def test_where_in() -> None:
diff --git a/tests/unit_tests/legacy_tests.py b/tests/unit_tests/legacy_tests.py
index c5e7e183c..62281b62e 100644
--- a/tests/unit_tests/legacy_tests.py
+++ b/tests/unit_tests/legacy_tests.py
@@ -20,7 +20,7 @@ import copy
from typing import Any
from superset.legacy import update_time_range
-from tests.unit_tests.conftest import with_feature_flags
+from tests.unit_tests.conftest import with_feature_flags # noqa: F401
original_form_data = {
"granularity_sqla": "order_date",
diff --git a/tests/unit_tests/models/sql_lab_test.py b/tests/unit_tests/models/sql_lab_test.py
index fb5db9747..a4c09d41c 100644
--- a/tests/unit_tests/models/sql_lab_test.py
+++ b/tests/unit_tests/models/sql_lab_test.py
@@ -23,7 +23,7 @@ from pytest_mock import MockFixture
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
from superset.exceptions import SupersetSecurityException
-from superset.models.sql_lab import Query, SavedQuery, SqlTablesMixin
+from superset.models.sql_lab import Query, SavedQuery
@pytest.mark.parametrize(
diff --git a/tests/unit_tests/notifications/slack_tests.py b/tests/unit_tests/notifications/slack_tests.py
index e423527df..4cadf198f 100644
--- a/tests/unit_tests/notifications/slack_tests.py
+++ b/tests/unit_tests/notifications/slack_tests.py
@@ -18,7 +18,6 @@ import uuid
from unittest.mock import MagicMock, patch
import pandas as pd
-from flask import g
@patch("superset.reports.notifications.slack.g")
diff --git a/tests/unit_tests/queries/query_object_test.py b/tests/unit_tests/queries/query_object_test.py
index 81a654653..8f42a460e 100644
--- a/tests/unit_tests/queries/query_object_test.py
+++ b/tests/unit_tests/queries/query_object_test.py
@@ -17,7 +17,6 @@
from unittest.mock import call, patch
from flask_appbuilder.security.sqla.models import User
-from pytest_mock import MockFixture
from superset.common.query_object import QueryObject
from superset.connectors.sqla.models import SqlaTable
diff --git a/tests/unit_tests/reports/schemas_test.py b/tests/unit_tests/reports/schemas_test.py
index 0fab6d11b..2cc18199e 100644
--- a/tests/unit_tests/reports/schemas_test.py
+++ b/tests/unit_tests/reports/schemas_test.py
@@ -19,7 +19,7 @@ import pytest
from marshmallow import ValidationError
from pytest_mock import MockFixture
-from superset.reports.schemas import ReportSchedulePostSchema, ReportSchedulePutSchema
+from superset.reports.schemas import ReportSchedulePostSchema
def test_report_post_schema_custom_width_validation(mocker: MockFixture) -> None:
diff --git a/tests/unit_tests/scripts/docker_build.py b/tests/unit_tests/scripts/docker_build.py
index 293f9447c..ef390c679 100644
--- a/tests/unit_tests/scripts/docker_build.py
+++ b/tests/unit_tests/scripts/docker_build.py
@@ -16,7 +16,6 @@
# under the License.
import os
import sys
-from unittest.mock import patch
import pytest
@@ -32,7 +31,7 @@ scripts_dir = os.path.abspath(
)
sys.path.append(scripts_dir)
-import build_docker as docker_utils # Replace with the actual function name
+import build_docker as docker_utils # Replace with the actual function name # noqa: E402
@pytest.fixture(autouse=True)
@@ -266,7 +265,7 @@ def test_get_docker_tags(
SHA,
"push",
"master",
- [f"--platform linux/arm64,linux/amd64"],
+ ["--platform linux/arm64,linux/amd64"],
),
],
)
diff --git a/tests/unit_tests/scripts/tag_latest_release_test.py b/tests/unit_tests/scripts/tag_latest_release_test.py
index eebd316db..0b7e33e21 100644
--- a/tests/unit_tests/scripts/tag_latest_release_test.py
+++ b/tests/unit_tests/scripts/tag_latest_release_test.py
@@ -16,7 +16,6 @@
# under the License.
import subprocess
from unittest import mock
-from unittest.mock import patch
import pytest
diff --git a/tests/unit_tests/sql_parse_tests.py b/tests/unit_tests/sql_parse_tests.py
index ac24e0597..0655b3029 100644
--- a/tests/unit_tests/sql_parse_tests.py
+++ b/tests/unit_tests/sql_parse_tests.py
@@ -118,9 +118,8 @@ def test_extract_tables_subselect() -> None:
"""
Test that tables inside subselects are parsed correctly.
"""
- assert (
- extract_tables(
- """
+ assert extract_tables(
+ """
SELECT sub.*
FROM (
SELECT *
@@ -129,13 +128,10 @@ FROM (
) sub, s2.t2
WHERE sub.resolution = 'NONE'
"""
- )
- == {Table("t1", "s1"), Table("t2", "s2")}
- )
+ ) == {Table("t1", "s1"), Table("t2", "s2")}
- assert (
- extract_tables(
- """
+ assert extract_tables(
+ """
SELECT sub.*
FROM (
SELECT *
@@ -144,13 +140,10 @@ FROM (
) sub
WHERE sub.resolution = 'NONE'
"""
- )
- == {Table("t1", "s1")}
- )
+ ) == {Table("t1", "s1")}
- assert (
- extract_tables(
- """
+ assert extract_tables(
+ """
SELECT * FROM t1
WHERE s11 > ANY (
SELECT COUNT(*) /* no hint */ FROM t2
@@ -162,9 +155,7 @@ WHERE s11 > ANY (
)
)
"""
- )
- == {Table("t1"), Table("t2"), Table("t3"), Table("t4")}
- )
+ ) == {Table("t1"), Table("t2"), Table("t3"), Table("t4")}
def test_extract_tables_select_in_expression() -> None:
@@ -235,30 +226,24 @@ def test_extract_tables_select_array() -> None:
"""
Test that queries selecting arrays work as expected.
"""
- assert (
- extract_tables(
- """
+ assert extract_tables(
+ """
SELECT ARRAY[1, 2, 3] AS my_array
FROM t1 LIMIT 10
"""
- )
- == {Table("t1")}
- )
+ ) == {Table("t1")}
def test_extract_tables_select_if() -> None:
"""
Test that queries with an ``IF`` work as expected.
"""
- assert (
- extract_tables(
- """
+ assert extract_tables(
+ """
SELECT IF(CARDINALITY(my_array) >= 3, my_array[3], NULL)
FROM t1 LIMIT 10
"""
- )
- == {Table("t1")}
- )
+ ) == {Table("t1")}
def test_extract_tables_with_catalog() -> None:
@@ -326,38 +311,29 @@ def test_extract_tables_where_subquery() -> None:
"""
Test that tables in a ``WHERE`` subquery are parsed correctly.
"""
- assert (
- extract_tables(
- """
+ assert extract_tables(
+ """
SELECT name
FROM t1
WHERE regionkey = (SELECT max(regionkey) FROM t2)
"""
- )
- == {Table("t1"), Table("t2")}
- )
+ ) == {Table("t1"), Table("t2")}
- assert (
- extract_tables(
- """
+ assert extract_tables(
+ """
SELECT name
FROM t1
WHERE regionkey IN (SELECT regionkey FROM t2)
"""
- )
- == {Table("t1"), Table("t2")}
- )
+ ) == {Table("t1"), Table("t2")}
- assert (
- extract_tables(
- """
+ assert extract_tables(
+ """
SELECT name
FROM t1
WHERE EXISTS (SELECT 1 FROM t2 WHERE t1.regionkey = t2.regionkey);
"""
- )
- == {Table("t1"), Table("t2")}
- )
+ ) == {Table("t1"), Table("t2")}
def test_extract_tables_describe() -> None:
@@ -371,15 +347,12 @@ def test_extract_tables_show_partitions() -> None:
"""
Test ``SHOW PARTITIONS``.
"""
- assert (
- extract_tables(
- """
+ assert extract_tables(
+ """
SHOW PARTITIONS FROM orders
WHERE ds >= '2013-01-01' ORDER BY ds DESC
"""
- )
- == {Table("orders")}
- )
+ ) == {Table("orders")}
def test_extract_tables_join() -> None:
@@ -391,9 +364,8 @@ def test_extract_tables_join() -> None:
Table("t2"),
}
- assert (
- extract_tables(
- """
+ assert extract_tables(
+ """
SELECT a.date, b.name
FROM left_table a
JOIN (
@@ -404,13 +376,10 @@ JOIN (
) b
ON a.date = b.date
"""
- )
- == {Table("left_table"), Table("right_table")}
- )
+ ) == {Table("left_table"), Table("right_table")}
- assert (
- extract_tables(
- """
+ assert extract_tables(
+ """
SELECT a.date, b.name
FROM left_table a
LEFT INNER JOIN (
@@ -421,13 +390,10 @@ LEFT INNER JOIN (
) b
ON a.date = b.date
"""
- )
- == {Table("left_table"), Table("right_table")}
- )
+ ) == {Table("left_table"), Table("right_table")}
- assert (
- extract_tables(
- """
+ assert extract_tables(
+ """
SELECT a.date, b.name
FROM left_table a
RIGHT OUTER JOIN (
@@ -438,13 +404,10 @@ RIGHT OUTER JOIN (
) b
ON a.date = b.date
"""
- )
- == {Table("left_table"), Table("right_table")}
- )
+ ) == {Table("left_table"), Table("right_table")}
- assert (
- extract_tables(
- """
+ assert extract_tables(
+ """
SELECT a.date, b.name
FROM left_table a
FULL OUTER JOIN (
@@ -455,18 +418,15 @@ FULL OUTER JOIN (
) b
ON a.date = b.date
"""
- )
- == {Table("left_table"), Table("right_table")}
- )
+ ) == {Table("left_table"), Table("right_table")}
def test_extract_tables_semi_join() -> None:
"""
Test ``LEFT SEMI JOIN``.
"""
- assert (
- extract_tables(
- """
+ assert extract_tables(
+ """
SELECT a.date, b.name
FROM left_table a
LEFT SEMI JOIN (
@@ -477,18 +437,15 @@ LEFT SEMI JOIN (
) b
ON a.data = b.date
"""
- )
- == {Table("left_table"), Table("right_table")}
- )
+ ) == {Table("left_table"), Table("right_table")}
def test_extract_tables_combinations() -> None:
"""
Test a complex case with nested queries.
"""
- assert (
- extract_tables(
- """
+ assert extract_tables(
+ """
SELECT * FROM t1
WHERE s11 > ANY (
SELECT * FROM t1 UNION ALL SELECT * FROM (
@@ -502,13 +459,10 @@ WHERE s11 > ANY (
)
)
"""
- )
- == {Table("t1"), Table("t3"), Table("t4"), Table("t6")}
- )
+ ) == {Table("t1"), Table("t3"), Table("t4"), Table("t6")}
- assert (
- extract_tables(
- """
+ assert extract_tables(
+ """
SELECT * FROM (
SELECT * FROM (
SELECT * FROM (
@@ -517,56 +471,45 @@ SELECT * FROM (
) AS S2
) AS S3
"""
- )
- == {Table("EmployeeS")}
- )
+ ) == {Table("EmployeeS")}
def test_extract_tables_with() -> None:
"""
Test ``WITH``.
"""
- assert (
- extract_tables(
- """
+ assert extract_tables(
+ """
WITH
x AS (SELECT a FROM t1),
y AS (SELECT a AS b FROM t2),
z AS (SELECT b AS c FROM t3)
SELECT c FROM z
"""
- )
- == {Table("t1"), Table("t2"), Table("t3")}
- )
+ ) == {Table("t1"), Table("t2"), Table("t3")}
- assert (
- extract_tables(
- """
+ assert extract_tables(
+ """
WITH
x AS (SELECT a FROM t1),
y AS (SELECT a AS b FROM x),
z AS (SELECT b AS c FROM y)
SELECT c FROM z
"""
- )
- == {Table("t1")}
- )
+ ) == {Table("t1")}
def test_extract_tables_reusing_aliases() -> None:
"""
Test that the parser follows aliases.
"""
- assert (
- extract_tables(
- """
+ assert extract_tables(
+ """
with q1 as ( select key from q2 where key = '5'),
q2 as ( select key from src where key = '5')
select * from (select key from q1) a
"""
- )
- == {Table("src")}
- )
+ ) == {Table("src")}
# weird query with circular dependency
assert (
@@ -603,9 +546,8 @@ def test_extract_tables_complex() -> None:
"""
Test a few complex queries.
"""
- assert (
- extract_tables(
- """
+ assert extract_tables(
+ """
SELECT sum(m_examples) AS "sum__m_example"
FROM (
SELECT
@@ -626,29 +568,23 @@ FROM (
ORDER BY "sum__m_example" DESC
LIMIT 10;
"""
- )
- == {
- Table("my_l_table"),
- Table("my_b_table"),
- Table("my_t_table"),
- Table("inner_table"),
- }
- )
+ ) == {
+ Table("my_l_table"),
+ Table("my_b_table"),
+ Table("my_t_table"),
+ Table("inner_table"),
+ }
- assert (
- extract_tables(
- """
+ assert extract_tables(
+ """
SELECT *
FROM table_a AS a, table_b AS b, table_c as c
WHERE a.id = b.id and b.id = c.id
"""
- )
- == {Table("table_a"), Table("table_b"), Table("table_c")}
- )
+ ) == {Table("table_a"), Table("table_b"), Table("table_c")}
- assert (
- extract_tables(
- """
+ assert extract_tables(
+ """
SELECT somecol AS somecol
FROM (
WITH bla AS (
@@ -692,63 +628,51 @@ FROM (
LIMIT 50000
)
"""
- )
- == {Table("a"), Table("b"), Table("c"), Table("d"), Table("e"), Table("f")}
- )
+ ) == {Table("a"), Table("b"), Table("c"), Table("d"), Table("e"), Table("f")}
def test_extract_tables_mixed_from_clause() -> None:
"""
Test that the parser handles a ``FROM`` clause with table and subselect.
"""
- assert (
- extract_tables(
- """
+ assert extract_tables(
+ """
SELECT *
FROM table_a AS a, (select * from table_b) AS b, table_c as c
WHERE a.id = b.id and b.id = c.id
"""
- )
- == {Table("table_a"), Table("table_b"), Table("table_c")}
- )
+ ) == {Table("table_a"), Table("table_b"), Table("table_c")}
def test_extract_tables_nested_select() -> None:
"""
Test that the parser handles selects inside functions.
"""
- assert (
- extract_tables(
- """
+ assert extract_tables(
+ """
select (extractvalue(1,concat(0x7e,(select GROUP_CONCAT(TABLE_NAME)
from INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_SCHEMA like "%bi%"),0x7e)));
""",
- "mysql",
- )
- == {Table("COLUMNS", "INFORMATION_SCHEMA")}
- )
+ "mysql",
+ ) == {Table("COLUMNS", "INFORMATION_SCHEMA")}
- assert (
- extract_tables(
- """
+ assert extract_tables(
+ """
select (extractvalue(1,concat(0x7e,(select GROUP_CONCAT(COLUMN_NAME)
from INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_NAME="bi_achievement_daily"),0x7e)));
""",
- "mysql",
- )
- == {Table("COLUMNS", "INFORMATION_SCHEMA")}
- )
+ "mysql",
+ ) == {Table("COLUMNS", "INFORMATION_SCHEMA")}
def test_extract_tables_complex_cte_with_prefix() -> None:
"""
Test that the parser handles CTEs with prefixes.
"""
- assert (
- extract_tables(
- """
+ assert extract_tables(
+ """
WITH CTE__test (SalesPersonID, SalesOrderID, SalesYear)
AS (
SELECT SalesPersonID, SalesOrderID, YEAR(OrderDate) AS SalesYear
@@ -760,26 +684,21 @@ FROM CTE__test
GROUP BY SalesYear, SalesPersonID
ORDER BY SalesPersonID, SalesYear;
"""
- )
- == {Table("SalesOrderHeader")}
- )
+ ) == {Table("SalesOrderHeader")}
def test_extract_tables_identifier_list_with_keyword_as_alias() -> None:
"""
Test that aliases that are keywords are parsed correctly.
"""
- assert (
- extract_tables(
- """
+ assert extract_tables(
+ """
WITH
f AS (SELECT * FROM foo),
match AS (SELECT * FROM f)
SELECT * FROM match
"""
- )
- == {Table("foo")}
- )
+ ) == {Table("foo")}
def test_update() -> None:
diff --git a/tests/unit_tests/tags/commands/create_test.py b/tests/unit_tests/tags/commands/create_test.py
index 1e1895bb7..2697e74ae 100644
--- a/tests/unit_tests/tags/commands/create_test.py
+++ b/tests/unit_tests/tags/commands/create_test.py
@@ -28,7 +28,7 @@ def session_with_data(session: Session):
from superset.models.core import Database
from superset.models.dashboard import Dashboard
from superset.models.slice import Slice
- from superset.models.sql_lab import Query, SavedQuery
+ from superset.models.sql_lab import SavedQuery
engine = session.get_bind()
SqlaTable.metadata.create_all(engine) # pylint: disable=no-member
@@ -43,7 +43,7 @@ def session_with_data(session: Session):
database = Database(database_name="my_database", sqlalchemy_uri="postgresql://")
- columns = [
+ [ # noqa: F841
TableColumn(column_name="a", type="INTEGER"),
]
@@ -69,11 +69,9 @@ def session_with_data(session: Session):
def test_create_command_success(session_with_data: Session, mocker: MockFixture):
from superset.commands.tag.create import CreateCustomTagWithRelationshipsCommand
- from superset.connectors.sqla.models import SqlaTable
- from superset.daos.tag import TagDAO
from superset.models.dashboard import Dashboard
from superset.models.slice import Slice
- from superset.models.sql_lab import Query, SavedQuery
+ from superset.models.sql_lab import SavedQuery
from superset.tags.models import ObjectType, TaggedObject
# Define a list of objects to tag
@@ -112,11 +110,9 @@ def test_create_command_success(session_with_data: Session, mocker: MockFixture)
def test_create_command_success_clear(session_with_data: Session, mocker: MockFixture):
from superset.commands.tag.create import CreateCustomTagWithRelationshipsCommand
- from superset.connectors.sqla.models import SqlaTable
- from superset.daos.tag import TagDAO
from superset.models.dashboard import Dashboard
from superset.models.slice import Slice
- from superset.models.sql_lab import Query, SavedQuery
+ from superset.models.sql_lab import SavedQuery
from superset.tags.models import ObjectType, TaggedObject
# Define a list of objects to tag
diff --git a/tests/unit_tests/tags/commands/update_test.py b/tests/unit_tests/tags/commands/update_test.py
index 75636ab0a..d3786b239 100644
--- a/tests/unit_tests/tags/commands/update_test.py
+++ b/tests/unit_tests/tags/commands/update_test.py
@@ -28,7 +28,7 @@ def session_with_data(session: Session):
from superset.models.core import Database
from superset.models.dashboard import Dashboard
from superset.models.slice import Slice
- from superset.models.sql_lab import Query, SavedQuery
+ from superset.models.sql_lab import SavedQuery
from superset.tags.models import Tag
engine = session.get_bind()
@@ -48,7 +48,7 @@ def session_with_data(session: Session):
TableColumn(column_name="a", type="INTEGER"),
]
- sqla_table = SqlaTable(
+ SqlaTable( # noqa: F841
table_name="my_sqla_table",
columns=columns,
metrics=[],
@@ -63,7 +63,7 @@ def session_with_data(session: Session):
published=True,
)
- saved_query = SavedQuery(
+ SavedQuery( # noqa: F841
label="test_query", database=database, sql="select * from foo"
)
@@ -95,7 +95,7 @@ def test_update_command_success(session_with_data: Session, mocker: MockFixture)
]
tag_to_update = TagDAO.find_by_name("test_name")
- changed_model = UpdateTagCommand(
+ UpdateTagCommand( # noqa: F841
tag_to_update.id,
{
"name": "new_name",
diff --git a/tests/unit_tests/tasks/test_utils.py b/tests/unit_tests/tasks/test_utils.py
index b3fbfca8a..2d85a1c05 100644
--- a/tests/unit_tests/tasks/test_utils.py
+++ b/tests/unit_tests/tasks/test_utils.py
@@ -31,7 +31,7 @@ SELENIUM_USERNAME = "admin"
def _get_users(
- params: Optional[Union[int, list[int]]]
+ params: Optional[Union[int, list[int]]],
) -> Optional[Union[User, list[User]]]:
if params is None:
return None
diff --git a/tests/unit_tests/thumbnails/test_digest.py b/tests/unit_tests/thumbnails/test_digest.py
index 68bd7a58f..987488ffe 100644
--- a/tests/unit_tests/thumbnails/test_digest.py
+++ b/tests/unit_tests/thumbnails/test_digest.py
@@ -172,13 +172,16 @@ def test_dashboard_digest(
user = User(id=1, username="1")
func = CUSTOM_DASHBOARD_FUNC if use_custom_digest else None
- with patch.dict(
- app.config,
- {
- "THUMBNAIL_EXECUTE_AS": execute_as,
- "THUMBNAIL_DASHBOARD_DIGEST_FUNC": func,
- },
- ), override_user(user):
+ with (
+ patch.dict(
+ app.config,
+ {
+ "THUMBNAIL_EXECUTE_AS": execute_as,
+ "THUMBNAIL_DASHBOARD_DIGEST_FUNC": func,
+ },
+ ),
+ override_user(user),
+ ):
cm = (
pytest.raises(type(expected_result))
if isinstance(expected_result, Exception)
@@ -242,13 +245,16 @@ def test_chart_digest(
user = User(id=1, username="1")
func = CUSTOM_CHART_FUNC if use_custom_digest else None
- with patch.dict(
- app.config,
- {
- "THUMBNAIL_EXECUTE_AS": execute_as,
- "THUMBNAIL_CHART_DIGEST_FUNC": func,
- },
- ), override_user(user):
+ with (
+ patch.dict(
+ app.config,
+ {
+ "THUMBNAIL_EXECUTE_AS": execute_as,
+ "THUMBNAIL_CHART_DIGEST_FUNC": func,
+ },
+ ),
+ override_user(user),
+ ):
cm = (
pytest.raises(type(expected_result))
if isinstance(expected_result, Exception)
diff --git a/tests/unit_tests/utils/docker.py b/tests/unit_tests/utils/docker.py
index ef926daf3..0cc23d811 100644
--- a/tests/unit_tests/utils/docker.py
+++ b/tests/unit_tests/utils/docker.py
@@ -16,7 +16,6 @@
# under the License.
import os
import sys
-from unittest.mock import patch
import pytest
@@ -32,7 +31,7 @@ scripts_dir = os.path.abspath(
)
sys.path.append(scripts_dir)
-import build_docker as docker_utils # Replace with the actual function name
+import build_docker as docker_utils # Replace with the actual function name # noqa: E402
@pytest.fixture(autouse=True)
diff --git a/tests/unit_tests/utils/lock_tests.py b/tests/unit_tests/utils/lock_tests.py
index 0042f6f5d..270d08236 100644
--- a/tests/unit_tests/utils/lock_tests.py
+++ b/tests/unit_tests/utils/lock_tests.py
@@ -26,7 +26,7 @@ from pytest_mock import MockerFixture
from superset.exceptions import CreateKeyValueDistributedLockFailedException
from superset.key_value.exceptions import KeyValueCreateFailedError
from superset.key_value.types import KeyValueResource
-from superset.utils.lock import KeyValueDistributedLock, serialize
+from superset.utils.lock import KeyValueDistributedLock
def test_KeyValueDistributedLock_happy_path(mocker: MockerFixture) -> None:
diff --git a/tests/unit_tests/utils/test_core.py b/tests/unit_tests/utils/test_core.py
index 3e73637da..a8081e0f3 100644
--- a/tests/unit_tests/utils/test_core.py
+++ b/tests/unit_tests/utils/test_core.py
@@ -21,7 +21,6 @@ from unittest.mock import MagicMock, patch
import pandas as pd
import pytest
-from sqlalchemy import CheckConstraint, Column, Integer, MetaData, Table
from superset.exceptions import SupersetException
from superset.utils.core import (
@@ -314,7 +313,6 @@ def test_generic_constraint_name_not_found():
table_name = "my_table"
columns = {"column1", "column2"}
referenced_table_name = "other_table"
- constraint_name = "my_constraint"
# Create a mock table object with the same structure but no matching constraint
table_mock = MagicMock()
diff --git a/tests/unit_tests/utils/urls_tests.py b/tests/unit_tests/utils/urls_tests.py
index 8ead2dacb..77dbfd8b0 100644
--- a/tests/unit_tests/utils/urls_tests.py
+++ b/tests/unit_tests/utils/urls_tests.py
@@ -14,7 +14,6 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
-import pytest
from superset.utils.urls import modify_url_query