chore: enforce more ruff rules (#31447)

Co-authored-by: Elizabeth Thompson <eschutho@gmail.com>
This commit is contained in:
Maxime Beauchemin 2024-12-18 17:41:34 -08:00 committed by GitHub
parent 9da65d6bfd
commit e51b95ffa8
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
375 changed files with 1821 additions and 1718 deletions

View File

@ -272,14 +272,14 @@ class GitLogs:
@staticmethod @staticmethod
def _git_get_current_head() -> str: def _git_get_current_head() -> str:
output = os.popen("git status | head -1").read() output = os.popen("git status | head -1").read() # noqa: S605, S607
match = re.match("(?:HEAD detached at|On branch) (.*)", output) match = re.match("(?:HEAD detached at|On branch) (.*)", output)
if not match: if not match:
return "" return ""
return match.group(1) return match.group(1)
def _git_checkout(self, git_ref: str) -> None: def _git_checkout(self, git_ref: str) -> None:
os.popen(f"git checkout {git_ref}").read() os.popen(f"git checkout {git_ref}").read() # noqa: S605
current_head = self._git_get_current_head() current_head = self._git_get_current_head()
if current_head != git_ref: if current_head != git_ref:
print(f"Could not checkout {git_ref}") print(f"Could not checkout {git_ref}")
@ -290,7 +290,7 @@ class GitLogs:
current_git_ref = self._git_get_current_head() current_git_ref = self._git_get_current_head()
self._git_checkout(self._git_ref) self._git_checkout(self._git_ref)
output = ( output = (
os.popen('git --no-pager log --pretty=format:"%h|%an|%ae|%ad|%s|"') os.popen('git --no-pager log --pretty=format:"%h|%an|%ae|%ad|%s|"') # noqa: S605, S607
.read() .read()
.split("\n") .split("\n")
) )

View File

@ -31,7 +31,7 @@ except ModuleNotFoundError:
RECEIVER_EMAIL = "dev@superset.apache.org" RECEIVER_EMAIL = "dev@superset.apache.org"
PROJECT_NAME = "Superset" PROJECT_NAME = "Superset"
PROJECT_MODULE = "superset" PROJECT_MODULE = "superset"
PROJECT_DESCRIPTION = "Apache Superset is a modern, enterprise-ready business intelligence web application." PROJECT_DESCRIPTION = "Apache Superset is a modern, enterprise-ready business intelligence web application." # noqa: E501
def string_comma_to_list(message: str) -> list[str]: def string_comma_to_list(message: str) -> list[str]:

View File

@ -23,12 +23,12 @@ from typing import Optional
import requests import requests
# Part 1: Verify SHA512 hash - this is the same as running `shasum -a 512 {release}` and comparing it against `{release}.sha512` # Part 1: Verify SHA512 hash - this is the same as running `shasum -a 512 {release}` and comparing it against `{release}.sha512` # noqa: E501
def get_sha512_hash(filename: str) -> str: def get_sha512_hash(filename: str) -> str:
"""Run the shasum command on the file and return the SHA512 hash.""" """Run the shasum command on the file and return the SHA512 hash."""
result = subprocess.run(["shasum", "-a", "512", filename], stdout=subprocess.PIPE) result = subprocess.run(["shasum", "-a", "512", filename], stdout=subprocess.PIPE) # noqa: S603, S607
sha512_hash = result.stdout.decode().split()[0] sha512_hash = result.stdout.decode().split()[0]
return sha512_hash return sha512_hash
@ -43,7 +43,7 @@ def read_sha512_file(filename: str) -> str:
def verify_sha512(filename: str) -> str: def verify_sha512(filename: str) -> str:
"""Verify if the SHA512 hash of the file matches with the hash in the .sha512 file.""" """Verify if the SHA512 hash of the file matches with the hash in the .sha512 file.""" # noqa: E501
sha512_hash = get_sha512_hash(filename) sha512_hash = get_sha512_hash(filename)
sha512_file_content = read_sha512_file(filename) sha512_file_content = read_sha512_file(filename)
@ -53,14 +53,15 @@ def verify_sha512(filename: str) -> str:
return "SHA failed" return "SHA failed"
# Part 2: Verify RSA key - this is the same as running `gpg --verify {release}.asc {release}` and comparing the RSA key and email address against the KEYS file # Part 2: Verify RSA key - this is the same as running `gpg --verify {release}.asc {release}` and comparing the RSA key and email address against the KEYS file # noqa: E501
def get_gpg_info(filename: str) -> tuple[Optional[str], Optional[str]]: def get_gpg_info(filename: str) -> tuple[Optional[str], Optional[str]]:
"""Run the GPG verify command and extract RSA key and email address.""" """Run the GPG verify command and extract RSA key and email address."""
asc_filename = filename + ".asc" asc_filename = filename + ".asc"
result = subprocess.run( result = subprocess.run( # noqa: S603
["gpg", "--verify", asc_filename, filename], capture_output=True ["gpg", "--verify", asc_filename, filename], # noqa: S607
capture_output=True, # noqa: S607
) )
output = result.stderr.decode() output = result.stderr.decode()
@ -90,7 +91,7 @@ def get_gpg_info(filename: str) -> tuple[Optional[str], Optional[str]]:
def verify_key(key: str, email: Optional[str]) -> str: def verify_key(key: str, email: Optional[str]) -> str:
"""Fetch the KEYS file and verify if the RSA/EDDSA key and email match.""" """Fetch the KEYS file and verify if the RSA/EDDSA key and email match."""
url = "https://downloads.apache.org/superset/KEYS" url = "https://downloads.apache.org/superset/KEYS"
response = requests.get(url) response = requests.get(url) # noqa: S113
if response.status_code == 200: if response.status_code == 200:
if key not in response.text: if key not in response.text:
return "RSA/EDDSA key not found on KEYS page" return "RSA/EDDSA key not found on KEYS page"

View File

@ -99,7 +99,7 @@ CELERY_CONFIG = CeleryConfig
FEATURE_FLAGS = {"ALERT_REPORTS": True} FEATURE_FLAGS = {"ALERT_REPORTS": True}
ALERT_REPORTS_NOTIFICATION_DRY_RUN = True ALERT_REPORTS_NOTIFICATION_DRY_RUN = True
WEBDRIVER_BASEURL = "http://superset:8088/" # When using docker compose baseurl should be http://superset_app:8088/ WEBDRIVER_BASEURL = "http://superset:8088/" # When using docker compose baseurl should be http://superset_app:8088/ # noqa: E501
# The base URL for the email report hyperlinks. # The base URL for the email report hyperlinks.
WEBDRIVER_BASEURL_USER_FRIENDLY = WEBDRIVER_BASEURL WEBDRIVER_BASEURL_USER_FRIENDLY = WEBDRIVER_BASEURL
SQLLAB_CTAS_NO_LIMIT = True SQLLAB_CTAS_NO_LIMIT = True

View File

@ -24,7 +24,7 @@ name = "apache-superset"
description = "A modern, enterprise-ready business intelligence web application" description = "A modern, enterprise-ready business intelligence web application"
readme = "README.md" readme = "README.md"
dynamic = ["version", "scripts", "entry-points"] dynamic = ["version", "scripts", "entry-points"]
requires-python = "~=3.9" requires-python = ">=3.9"
license = { file="LICENSE.txt" } license = { file="LICENSE.txt" }
authors = [ authors = [
{ name = "Apache Software Foundation", email = "dev@superset.apache.org" }, { name = "Apache Software Foundation", email = "dev@superset.apache.org" },
@ -276,8 +276,8 @@ exclude = [
line-length = 88 line-length = 88
indent-width = 4 indent-width = 4
# Assume Python 3.8 # Assume Python 3.9
target-version = "py310" target-version = "py39"
[tool.ruff.lint] [tool.ruff.lint]
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default. # Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
@ -290,22 +290,24 @@ select = [
"E9", "E9",
"PT009", "PT009",
"TRY201", "TRY201",
# TODO add these rules in follow up PR "B",
# "B", "C",
# "C", "E",
# "E", "F",
# "F", "F",
#"F", "I",
# "I", "N",
# "N", "PT",
# "PT", "Q",
# "Q", "S",
# "S", "T",
# "T", "W",
#"W",
] ]
ignore = [ ignore = [
"S101", "S101",
"PT006",
"T201",
"N999",
] ]
extend-select = ["I"] extend-select = ["I"]

View File

@ -70,7 +70,7 @@ def extract_modified_tables(module: ModuleType) -> set[str]:
return tables return tables
def find_models(module: ModuleType) -> list[type[Model]]: def find_models(module: ModuleType) -> list[type[Model]]: # noqa: C901
""" """
Find all models in a migration script. Find all models in a migration script.
""" """
@ -94,7 +94,7 @@ def find_models(module: ModuleType) -> list[type[Model]]:
# downgrade # downgrade
sqlalchemy_uri = current_app.config["SQLALCHEMY_DATABASE_URI"] sqlalchemy_uri = current_app.config["SQLALCHEMY_DATABASE_URI"]
engine = create_engine(sqlalchemy_uri) engine = create_engine(sqlalchemy_uri)
Base = automap_base() Base = automap_base() # noqa: N806
Base.prepare(engine, reflect=True) Base.prepare(engine, reflect=True)
seen = set() seen = set()
while tables: while tables:
@ -138,7 +138,7 @@ def find_models(module: ModuleType) -> list[type[Model]]:
@click.option("--limit", default=1000, help="Maximum number of entities.") @click.option("--limit", default=1000, help="Maximum number of entities.")
@click.option("--force", is_flag=True, help="Do not prompt for confirmation.") @click.option("--force", is_flag=True, help="Do not prompt for confirmation.")
@click.option("--no-auto-cleanup", is_flag=True, help="Do not remove created models.") @click.option("--no-auto-cleanup", is_flag=True, help="Do not remove created models.")
def main( def main( # noqa: C901
filepath: str, limit: int = 1000, force: bool = False, no_auto_cleanup: bool = False filepath: str, limit: int = 1000, force: bool = False, no_auto_cleanup: bool = False
) -> None: ) -> None:
auto_cleanup = not no_auto_cleanup auto_cleanup = not no_auto_cleanup

View File

@ -49,7 +49,7 @@ github_repo = os.environ.get("GITHUB_REPOSITORY", "apache/superset")
def request( def request(
method: Literal["GET", "POST", "DELETE", "PUT"], endpoint: str, **kwargs: Any method: Literal["GET", "POST", "DELETE", "PUT"], endpoint: str, **kwargs: Any
) -> dict[str, Any]: ) -> dict[str, Any]:
resp = requests.request( resp = requests.request( # noqa: S113
method, method,
f"https://api.github.com/{endpoint.lstrip('/')}", f"https://api.github.com/{endpoint.lstrip('/')}",
headers={"Authorization": f"Bearer {github_token}"}, headers={"Authorization": f"Bearer {github_token}"},
@ -152,7 +152,7 @@ Date: {date_str}
help="Whether to also cancel running workflows.", help="Whether to also cancel running workflows.",
) )
@click.argument("branch_or_pull", required=False) @click.argument("branch_or_pull", required=False)
def cancel_github_workflows( def cancel_github_workflows( # noqa: C901
branch_or_pull: Optional[str], branch_or_pull: Optional[str],
repo: str, repo: str,
event: list[str], event: list[str],

View File

@ -51,12 +51,12 @@ GITHUB_TOKEN = os.environ.get("GITHUB_TOKEN")
def fetch_files_github_api(url: str): # type: ignore def fetch_files_github_api(url: str): # type: ignore
"""Fetches data using GitHub API.""" """Fetches data using GitHub API."""
req = Request(url) req = Request(url) # noqa: S310
req.add_header("Authorization", f"Bearer {GITHUB_TOKEN}") req.add_header("Authorization", f"Bearer {GITHUB_TOKEN}")
req.add_header("Accept", "application/vnd.github.v3+json") req.add_header("Accept", "application/vnd.github.v3+json")
print(f"Fetching from {url}") print(f"Fetching from {url}")
with urlopen(req) as response: with urlopen(req) as response: # noqa: S310
body = response.read() body = response.read()
return json.loads(body) return json.loads(body)
@ -130,7 +130,7 @@ def main(event_type: str, sha: str, repo: str) -> None:
) )
# Output results # Output results
output_path = os.getenv("GITHUB_OUTPUT") or "/tmp/GITHUB_OUTPUT.txt" output_path = os.getenv("GITHUB_OUTPUT") or "/tmp/GITHUB_OUTPUT.txt" # noqa: S108
with open(output_path, "a") as f: with open(output_path, "a") as f:
for check, changed in changes_detected.items(): for check, changed in changes_detected.items():
if changed: if changed:
@ -139,8 +139,8 @@ def main(event_type: str, sha: str, repo: str) -> None:
def get_git_sha() -> str: def get_git_sha() -> str:
return os.getenv("GITHUB_SHA") or subprocess.check_output( return os.getenv("GITHUB_SHA") or subprocess.check_output( # noqa: S603
["git", "rev-parse", "HEAD"] ["git", "rev-parse", "HEAD"] # noqa: S607
).strip().decode("utf-8") ).strip().decode("utf-8")

View File

@ -47,7 +47,7 @@ class Requirement:
def get_version(self) -> Optional[str]: def get_version(self) -> Optional[str]:
try: try:
version = subprocess.check_output(self.command, shell=True).decode().strip() version = subprocess.check_output(self.command, shell=True).decode().strip() # noqa: S602
if self.version_post_process: if self.version_post_process:
version = self.version_post_process(version) version = self.version_post_process(version)
return version.split()[-1] return version.split()[-1]
@ -76,7 +76,7 @@ class Requirement:
def format_result(self) -> str: def format_result(self) -> str:
ideal_range_str = f"{self.ideal_range[0]} - {self.ideal_range[1]}" ideal_range_str = f"{self.ideal_range[0]} - {self.ideal_range[1]}"
supported_range_str = f"{self.supported_range[0]} - {self.supported_range[1]}" supported_range_str = f"{self.supported_range[0]} - {self.supported_range[1]}"
return f"{self.status.split()[0]} {self.name:<25} {self.version or 'N/A':<25} {ideal_range_str:<25} {supported_range_str:<25}" return f"{self.status.split()[0]} {self.name:<25} {self.version or 'N/A':<25} {ideal_range_str:<25} {supported_range_str:<25}" # noqa: E501
def check_memory(min_gb: int) -> str: def check_memory(min_gb: int) -> str:
@ -101,8 +101,9 @@ def get_cpu_info() -> str:
def get_docker_platform() -> str: def get_docker_platform() -> str:
try: try:
output = ( output = (
subprocess.check_output( subprocess.check_output( # noqa: S602
"docker info --format '{{.OperatingSystem}}'", shell=True "docker info --format '{{.OperatingSystem}}'", # noqa: S607
shell=True, # noqa: S607
) )
.decode() .decode()
.strip() .strip()
@ -117,7 +118,7 @@ def get_docker_platform() -> str:
@click.command( @click.command(
help=""" help="""
This script checks the local environment for various software versions and other requirements, providing feedback on whether they are ideal, supported, or unsupported. This script checks the local environment for various software versions and other requirements, providing feedback on whether they are ideal, supported, or unsupported.
""" """ # noqa: E501
) )
@click.option( @click.option(
"--docker", is_flag=True, help="Check Docker and Docker Compose requirements" "--docker", is_flag=True, help="Check Docker and Docker Compose requirements"
@ -128,7 +129,7 @@ This script checks the local environment for various software versions and other
help="Check frontend requirements (npm, Node.js, memory)", help="Check frontend requirements (npm, Node.js, memory)",
) )
@click.option("--backend", is_flag=True, help="Check backend requirements (Python)") @click.option("--backend", is_flag=True, help="Check backend requirements (Python)")
def main(docker: bool, frontend: bool, backend: bool) -> None: def main(docker: bool, frontend: bool, backend: bool) -> None: # noqa: C901
requirements = [ requirements = [
Requirement( Requirement(
"python", "python",

View File

@ -74,7 +74,7 @@ def run_cypress_for_test_file(
print(f"DRY RUN: {cmd}") print(f"DRY RUN: {cmd}")
return 0 return 0
process = subprocess.Popen( process = subprocess.Popen( # noqa: S602
cmd, cmd,
shell=True, shell=True,
stdout=subprocess.PIPE, stdout=subprocess.PIPE,

View File

@ -171,7 +171,7 @@ def generate_erd(file_path: str) -> None:
""" """
data = introspect_models() data = introspect_models()
templates_path = os.path.dirname(__file__) templates_path = os.path.dirname(__file__)
env = jinja2.Environment(loader=jinja2.FileSystemLoader(templates_path)) env = jinja2.Environment(loader=jinja2.FileSystemLoader(templates_path)) # noqa: S701
# Load the template # Load the template
template = env.get_template("erd.template.puml") template = env.get_template("erd.template.puml")

View File

@ -30,7 +30,7 @@ with open(PACKAGE_JSON) as package_file:
def get_git_sha() -> str: def get_git_sha() -> str:
try: try:
output = subprocess.check_output(["git", "rev-parse", "HEAD"]) output = subprocess.check_output(["git", "rev-parse", "HEAD"]) # noqa: S603, S607
return output.decode().strip() return output.decode().strip()
except Exception: # pylint: disable=broad-except except Exception: # pylint: disable=broad-except
return "" return ""
@ -58,7 +58,7 @@ setup(
zip_safe=False, zip_safe=False,
entry_points={ entry_points={
"console_scripts": ["superset=superset.cli.main:superset"], "console_scripts": ["superset=superset.cli.main:superset"],
# the `postgres` and `postgres+psycopg2://` schemes were removed in SQLAlchemy 1.4 # the `postgres` and `postgres+psycopg2://` schemes were removed in SQLAlchemy 1.4 # noqa: E501
# add an alias here to prevent breaking existing databases # add an alias here to prevent breaking existing databases
"sqlalchemy.dialects": [ "sqlalchemy.dialects": [
"postgres.psycopg2 = sqlalchemy.dialects.postgresql:dialect", "postgres.psycopg2 = sqlalchemy.dialects.postgresql:dialect",

View File

@ -39,7 +39,7 @@ class AdvancedDataTypeRestApi(BaseSupersetApi):
-Will return available AdvancedDataTypes when the /types endpoint is accessed -Will return available AdvancedDataTypes when the /types endpoint is accessed
-Will return a AdvancedDataTypeResponse object when the /convert endpoint is accessed -Will return a AdvancedDataTypeResponse object when the /convert endpoint is accessed
and is passed in valid arguments and is passed in valid arguments
""" """ # noqa: E501
allow_browser_login = True allow_browser_login = True
resource_name = "advanced_data_type" resource_name = "advanced_data_type"
@ -92,7 +92,7 @@ class AdvancedDataTypeRestApi(BaseSupersetApi):
$ref: '#/components/responses/404' $ref: '#/components/responses/404'
500: 500:
$ref: '#/components/responses/500' $ref: '#/components/responses/500'
""" """ # noqa: E501
item = kwargs["rison"] item = kwargs["rison"]
advanced_data_type = item["type"] advanced_data_type = item["type"]
values = item["values"] values = item["values"]

View File

@ -65,7 +65,7 @@ def cidr_func(req: AdvancedDataTypeRequest) -> AdvancedDataTypeResponse:
break break
else: else:
resp["display_value"] = ", ".join( resp["display_value"] = ", ".join(
map( map( # noqa: C417
lambda x: f"{x['start']} - {x['end']}" lambda x: f"{x['start']} - {x['end']}"
if isinstance(x, dict) if isinstance(x, dict)
else str(x), else str(x),
@ -76,7 +76,7 @@ def cidr_func(req: AdvancedDataTypeRequest) -> AdvancedDataTypeResponse:
# Make this return a single clause # Make this return a single clause
def cidr_translate_filter_func( def cidr_translate_filter_func( # noqa: C901
col: Column, operator: FilterOperator, values: list[Any] col: Column, operator: FilterOperator, values: list[Any]
) -> Any: ) -> Any:
""" """

View File

@ -94,7 +94,7 @@ def port_translation_func(req: AdvancedDataTypeRequest) -> AdvancedDataTypeRespo
break break
else: else:
resp["display_value"] = ", ".join( resp["display_value"] = ", ".join(
map( map( # noqa: C417
lambda x: f"{x['start']} - {x['end']}" lambda x: f"{x['start']} - {x['end']}"
if isinstance(x, dict) if isinstance(x, dict)
else str(x), else str(x),
@ -104,7 +104,7 @@ def port_translation_func(req: AdvancedDataTypeRequest) -> AdvancedDataTypeRespo
return resp return resp
def port_translate_filter_func( def port_translate_filter_func( # noqa: C901
col: Column, operator: FilterOperator, values: list[Any] col: Column, operator: FilterOperator, values: list[Any]
) -> Any: ) -> Any:
""" """

View File

@ -181,7 +181,7 @@ class AnnotationRestApi(BaseSupersetModelRestApi):
$ref: '#/components/responses/422' $ref: '#/components/responses/422'
500: 500:
$ref: '#/components/responses/500' $ref: '#/components/responses/500'
""" """ # noqa: E501
self._apply_layered_relation_to_rison(pk, kwargs["rison"]) self._apply_layered_relation_to_rison(pk, kwargs["rison"])
return self.get_list_headless(**kwargs) return self.get_list_headless(**kwargs)

View File

@ -35,15 +35,15 @@ from superset.utils.core import get_user_id
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class CacheBackendNotInitialized(Exception): class CacheBackendNotInitialized(Exception): # noqa: N818
pass pass
class AsyncQueryTokenException(Exception): class AsyncQueryTokenException(Exception): # noqa: N818
pass pass
class AsyncQueryJobException(Exception): class AsyncQueryJobException(Exception): # noqa: N818
pass pass
@ -88,7 +88,7 @@ def get_cache_backend(
return RedisSentinelCacheBackend.from_config(cache_config) return RedisSentinelCacheBackend.from_config(cache_config)
# TODO: Deprecate hardcoded plain Redis code and expand cache backend options. # TODO: Deprecate hardcoded plain Redis code and expand cache backend options.
# Maintain backward compatibility with 'GLOBAL_ASYNC_QUERIES_REDIS_CONFIG' until it is deprecated. # Maintain backward compatibility with 'GLOBAL_ASYNC_QUERIES_REDIS_CONFIG' until it is deprecated. # noqa: E501
return redis.Redis( return redis.Redis(
**config["GLOBAL_ASYNC_QUERIES_REDIS_CONFIG"], decode_responses=True **config["GLOBAL_ASYNC_QUERIES_REDIS_CONFIG"], decode_responses=True
) )
@ -265,7 +265,7 @@ class AsyncQueryManager:
stream_name = f"{self._stream_prefix}{channel}" stream_name = f"{self._stream_prefix}{channel}"
start_id = increment_id(last_id) if last_id else "-" start_id = increment_id(last_id) if last_id else "-"
results = self._cache.xrange(stream_name, start_id, "+", self.MAX_EVENT_COUNT) results = self._cache.xrange(stream_name, start_id, "+", self.MAX_EVENT_COUNT)
# Decode bytes to strings, decode_responses is not supported at RedisCache and RedisSentinelCache # Decode bytes to strings, decode_responses is not supported at RedisCache and RedisSentinelCache # noqa: E501
if isinstance(self._cache, (RedisSentinelCacheBackend, RedisCacheBackend)): if isinstance(self._cache, (RedisSentinelCacheBackend, RedisCacheBackend)):
decoded_results = [ decoded_results = [
( (

View File

@ -995,7 +995,7 @@ class ChartRestApi(BaseSupersetModelRestApi):
$ref: '#/components/responses/404' $ref: '#/components/responses/404'
500: 500:
$ref: '#/components/responses/500' $ref: '#/components/responses/500'
""" """ # noqa: E501
try: try:
body = ChartCacheWarmUpRequestSchema().load(request.json) body = ChartCacheWarmUpRequestSchema().load(request.json)
except ValidationError as error: except ValidationError as error:

View File

@ -306,7 +306,7 @@ class ChartDataRestApi(ChartRestApi):
cached_data = self._load_query_context_form_from_cache(cache_key) cached_data = self._load_query_context_form_from_cache(cache_key)
# Set form_data in Flask Global as it is used as a fallback # Set form_data in Flask Global as it is used as a fallback
# for async queries with jinja context # for async queries with jinja context
setattr(g, "form_data", cached_data) g.form_data = cached_data
query_context = self._create_query_context_from_form(cached_data) query_context = self._create_query_context_from_form(cached_data)
command = ChartDataCommand(query_context) command = ChartDataCommand(query_context)
command.validate() command.validate()
@ -343,7 +343,7 @@ class ChartDataRestApi(ChartRestApi):
result = async_command.run(form_data, get_user_id()) result = async_command.run(form_data, get_user_id())
return self.response(202, **result) return self.response(202, **result)
def _send_chart_response( def _send_chart_response( # noqa: C901
self, self,
result: dict[Any, Any], result: dict[Any, Any],
form_data: dict[str, Any] | None = None, form_data: dict[str, Any] | None = None,

View File

@ -59,7 +59,7 @@ def get_column_key(label: tuple[str, ...], metrics: list[str]) -> tuple[Any, ...
return tuple(parts) return tuple(parts)
def pivot_df( # pylint: disable=too-many-locals, too-many-arguments, too-many-statements, too-many-branches def pivot_df( # pylint: disable=too-many-locals, too-many-arguments, too-many-statements, too-many-branches # noqa: C901
df: pd.DataFrame, df: pd.DataFrame,
rows: list[str], rows: list[str],
columns: list[str], columns: list[str],
@ -173,7 +173,7 @@ def pivot_df( # pylint: disable=too-many-locals, too-many-arguments, too-many-s
subtotal = pivot_v2_aggfunc_map[aggfunc](df.iloc[:, slice_], axis=1) subtotal = pivot_v2_aggfunc_map[aggfunc](df.iloc[:, slice_], axis=1)
depth = df.columns.nlevels - len(subgroup) - 1 depth = df.columns.nlevels - len(subgroup) - 1
total = metric_name if level == 0 else __("Subtotal") total = metric_name if level == 0 else __("Subtotal")
subtotal_name = tuple([*subgroup, total, *([""] * depth)]) subtotal_name = tuple([*subgroup, total, *([""] * depth)]) # noqa: C409
# insert column after subgroup # insert column after subgroup
df.insert(int(slice_.stop), subtotal_name, subtotal) df.insert(int(slice_.stop), subtotal_name, subtotal)
@ -190,7 +190,7 @@ def pivot_df( # pylint: disable=too-many-locals, too-many-arguments, too-many-s
) )
depth = df.index.nlevels - len(subgroup) - 1 depth = df.index.nlevels - len(subgroup) - 1
total = metric_name if level == 0 else __("Subtotal") total = metric_name if level == 0 else __("Subtotal")
subtotal.name = tuple([*subgroup, total, *([""] * depth)]) subtotal.name = tuple([*subgroup, total, *([""] * depth)]) # noqa: C409
# insert row after subgroup # insert row after subgroup
df = pd.concat( df = pd.concat(
[df[: slice_.stop], subtotal.to_frame().T, df[slice_.stop :]] [df[: slice_.stop], subtotal.to_frame().T, df[slice_.stop :]]
@ -284,7 +284,7 @@ def table(
format_ = "{:" + config["d3NumberFormat"] + "}" format_ = "{:" + config["d3NumberFormat"] + "}"
try: try:
df[column] = df[column].apply(format_.format) df[column] = df[column].apply(format_.format)
except Exception: # pylint: disable=broad-except except Exception: # pylint: disable=broad-except # noqa: S110
# if we can't format the column for any reason, send as is # if we can't format the column for any reason, send as is
pass pass
@ -298,7 +298,7 @@ post_processors = {
@event_logger.log_this @event_logger.log_this
def apply_post_process( def apply_post_process( # noqa: C901
result: dict[Any, Any], result: dict[Any, Any],
form_data: Optional[dict[str, Any]] = None, form_data: Optional[dict[str, Any]] = None,
datasource: Optional[Union["BaseDatasource", "Query"]] = None, datasource: Optional[Union["BaseDatasource", "Query"]] = None,

View File

@ -320,7 +320,7 @@ class ChartDataAdhocMetricSchema(Schema):
Ad-hoc metrics are used to define metrics outside the datasource. Ad-hoc metrics are used to define metrics outside the datasource.
""" """
expressionType = fields.String( expressionType = fields.String( # noqa: N815
metadata={"description": "Simple or SQL metric", "example": "SQL"}, metadata={"description": "Simple or SQL metric", "example": "SQL"},
required=True, required=True,
validate=validate.OneOf(choices=("SIMPLE", "SQL")), validate=validate.OneOf(choices=("SIMPLE", "SQL")),
@ -335,7 +335,7 @@ class ChartDataAdhocMetricSchema(Schema):
), ),
) )
column = fields.Nested(ChartDataColumnSchema) column = fields.Nested(ChartDataColumnSchema)
sqlExpression = fields.String( sqlExpression = fields.String( # noqa: N815
metadata={ metadata={
"description": "The metric as defined by a SQL aggregate expression. " "description": "The metric as defined by a SQL aggregate expression. "
"Only required for SQL expression type.", "Only required for SQL expression type.",
@ -349,14 +349,14 @@ class ChartDataAdhocMetricSchema(Schema):
"example": "Weighted observations", "example": "Weighted observations",
}, },
) )
hasCustomLabel = fields.Boolean( hasCustomLabel = fields.Boolean( # noqa: N815
metadata={ metadata={
"description": "When false, the label will be automatically generated based " "description": "When false, the label will be automatically generated based " # noqa: E501
"on the aggregate expression. When true, a custom label has to be specified.", "on the aggregate expression. When true, a custom label has to be specified.", # noqa: E501
"example": True, "example": True,
}, },
) )
optionName = fields.String( optionName = fields.String( # noqa: N815
metadata={ metadata={
"description": "Unique identifier. Can be any string value, as long as all " "description": "Unique identifier. Can be any string value, as long as all "
"metrics have a unique identifier. If undefined, a random name" "metrics have a unique identifier. If undefined, a random name"
@ -364,15 +364,15 @@ class ChartDataAdhocMetricSchema(Schema):
"example": "metric_aec60732-fac0-4b17-b736-93f1a5c93e30", "example": "metric_aec60732-fac0-4b17-b736-93f1a5c93e30",
}, },
) )
timeGrain = fields.String( timeGrain = fields.String( # noqa: N815
metadata={ metadata={
"description": "Optional time grain for temporal filters", "description": "Optional time grain for temporal filters",
"example": "PT1M", "example": "PT1M",
}, },
) )
isExtra = fields.Boolean( isExtra = fields.Boolean( # noqa: N815
metadata={ metadata={
"description": "Indicates if the filter has been added by a filter component " "description": "Indicates if the filter has been added by a filter component " # noqa: E501
"as opposed to being a part of the original query." "as opposed to being a part of the original query."
} }
) )
@ -437,8 +437,8 @@ class ChartDataRollingOptionsSchema(ChartDataPostProcessingOperationOptionsSchem
metadata={ metadata={
"description": "columns on which to perform rolling, mapping source " "description": "columns on which to perform rolling, mapping source "
"column to target column. For instance, `{'y': 'y'}` will replace the " "column to target column. For instance, `{'y': 'y'}` will replace the "
"column `y` with the rolling value in `y`, while `{'y': 'y2'}` will add " "column `y` with the rolling value in `y`, while `{'y': 'y2'}` will add " # noqa: E501
"a column `y2` based on rolling values calculated from `y`, leaving the " "a column `y2` based on rolling values calculated from `y`, leaving the " # noqa: E501
"original column `y` unchanged.", "original column `y` unchanged.",
"example": {"weekly_rolling_sales": "sales"}, "example": {"weekly_rolling_sales": "sales"},
}, },
@ -541,7 +541,7 @@ class ChartDataSelectOptionsSchema(ChartDataPostProcessingOperationOptionsSchema
columns = fields.List( columns = fields.List(
fields.String(), fields.String(),
metadata={ metadata={
"description": "Columns which to select from the input data, in the desired " "description": "Columns which to select from the input data, in the desired " # noqa: E501
"order. If columns are renamed, the original column name should be " "order. If columns are renamed, the original column name should be "
"referenced here.", "referenced here.",
"example": ["country", "gender", "age"], "example": ["country", "gender", "age"],
@ -691,8 +691,8 @@ class ChartDataBoxplotOptionsSchema(ChartDataPostProcessingOperationOptionsSchem
"references to datasource metrics (strings), or ad-hoc metrics" "references to datasource metrics (strings), or ad-hoc metrics"
"which are defined only within the query object. See " "which are defined only within the query object. See "
"`ChartDataAdhocMetricSchema` for the structure of ad-hoc metrics. " "`ChartDataAdhocMetricSchema` for the structure of ad-hoc metrics. "
"When metrics is undefined or null, the query is executed without a groupby. " "When metrics is undefined or null, the query is executed without a groupby. " # noqa: E501
"However, when metrics is an array (length >= 0), a groupby clause is added " "However, when metrics is an array (length >= 0), a groupby clause is added " # noqa: E501
"to the query." "to the query."
}, },
allow_none=True, allow_none=True,
@ -907,7 +907,7 @@ class ChartDataPostProcessingOperationSchema(Schema):
class ChartDataFilterSchema(Schema): class ChartDataFilterSchema(Schema):
col = fields.Raw( col = fields.Raw(
metadata={ metadata={
"description": "The column to filter by. Can be either a string (physical or " "description": "The column to filter by. Can be either a string (physical or " # noqa: E501
"saved expression) or an object (adhoc column)", "saved expression) or an object (adhoc column)",
"example": "country", "example": "country",
}, },
@ -934,7 +934,7 @@ class ChartDataFilterSchema(Schema):
"example": "PT1M", "example": "PT1M",
}, },
) )
isExtra = fields.Boolean( isExtra = fields.Boolean( # noqa: N815
metadata={ metadata={
"description": "Indicates if the filter has been added by a filter " "description": "Indicates if the filter has been added by a filter "
"component as opposed to being a part of the original query." "component as opposed to being a part of the original query."
@ -995,7 +995,7 @@ class ChartDataExtrasSchema(Schema):
class AnnotationLayerSchema(Schema): class AnnotationLayerSchema(Schema):
annotationType = fields.String( annotationType = fields.String( # noqa: N815
metadata={"description": "Type of annotation layer"}, metadata={"description": "Type of annotation layer"},
validate=validate.OneOf(choices=[ann.value for ann in AnnotationType]), validate=validate.OneOf(choices=[ann.value for ann in AnnotationType]),
) )
@ -1003,20 +1003,20 @@ class AnnotationLayerSchema(Schema):
metadata={"description": "Layer color"}, metadata={"description": "Layer color"},
allow_none=True, allow_none=True,
) )
descriptionColumns = fields.List( descriptionColumns = fields.List( # noqa: N815
fields.String(), fields.String(),
metadata={ metadata={
"description": "Columns to use as the description. If none are provided, " "description": "Columns to use as the description. If none are provided, "
"all will be shown." "all will be shown."
}, },
) )
hideLine = fields.Boolean( hideLine = fields.Boolean( # noqa: N815
metadata={ metadata={
"description": "Should line be hidden. Only applies to line annotations" "description": "Should line be hidden. Only applies to line annotations"
}, },
allow_none=True, allow_none=True,
) )
intervalEndColumn = fields.String( intervalEndColumn = fields.String( # noqa: N815
metadata={ metadata={
"description": "Column containing end of interval. " "description": "Column containing end of interval. "
"Only applies to interval layers" "Only applies to interval layers"
@ -1046,17 +1046,17 @@ class AnnotationLayerSchema(Schema):
show = fields.Boolean( show = fields.Boolean(
metadata={"description": "Should the layer be shown"}, required=True metadata={"description": "Should the layer be shown"}, required=True
) )
showLabel = fields.Boolean( showLabel = fields.Boolean( # noqa: N815
metadata={"description": "Should the label always be shown"}, metadata={"description": "Should the label always be shown"},
allow_none=True, allow_none=True,
) )
showMarkers = fields.Boolean( showMarkers = fields.Boolean( # noqa: N815
metadata={ metadata={
"description": "Should markers be shown. Only applies to line annotations." "description": "Should markers be shown. Only applies to line annotations."
}, },
required=True, required=True,
) )
sourceType = fields.String( sourceType = fields.String( # noqa: N815
metadata={"description": "Type of source for annotation data"}, metadata={"description": "Type of source for annotation data"},
validate=validate.OneOf( validate=validate.OneOf(
choices=( choices=(
@ -1078,11 +1078,11 @@ class AnnotationLayerSchema(Schema):
) )
), ),
) )
timeColumn = fields.String( timeColumn = fields.String( # noqa: N815
metadata={"description": "Column with event date or interval start date"}, metadata={"description": "Column with event date or interval start date"},
allow_none=True, allow_none=True,
) )
titleColumn = fields.String( titleColumn = fields.String( # noqa: N815
metadata={"description": "Column with title"}, metadata={"description": "Column with title"},
allow_none=True, allow_none=True,
) )
@ -1180,7 +1180,7 @@ class ChartDataQueryObjectSchema(Schema):
fields.Nested(ChartDataPostProcessingOperationSchema, allow_none=True), fields.Nested(ChartDataPostProcessingOperationSchema, allow_none=True),
allow_none=True, allow_none=True,
metadata={ metadata={
"description": "Post processing operations to be applied to the result set. " "description": "Post processing operations to be applied to the result set. " # noqa: E501
"Operations are applied to the result set in sequential order." "Operations are applied to the result set in sequential order."
}, },
) )
@ -1420,7 +1420,7 @@ class ChartDataResponseResult(Schema):
) )
cache_timeout = fields.Integer( cache_timeout = fields.Integer(
metadata={ metadata={
"description": "Cache timeout in following order: custom timeout, datasource " "description": "Cache timeout in following order: custom timeout, datasource " # noqa: E501
"timeout, cache default timeout, config default cache timeout." "timeout, cache default timeout, config default cache timeout."
}, },
required=True, required=True,
@ -1531,7 +1531,7 @@ class GetFavStarIdsSchema(Schema):
result = fields.List( result = fields.List(
fields.Nested(ChartFavStarResponseResult), fields.Nested(ChartFavStarResponseResult),
metadata={ metadata={
"description": "A list of results for each corresponding chart in the request" "description": "A list of results for each corresponding chart in the request" # noqa: E501
}, },
) )
@ -1559,7 +1559,7 @@ class ChartCacheWarmUpRequestSchema(Schema):
) )
dashboard_id = fields.Integer( dashboard_id = fields.Integer(
metadata={ metadata={
"description": "The ID of the dashboard to get filters for when warming cache" "description": "The ID of the dashboard to get filters for when warming cache" # noqa: E501
} }
) )
extra_filters = fields.String( extra_filters = fields.String(

View File

@ -29,7 +29,7 @@ if feature_flags_func:
try: try:
# pylint: disable=not-callable # pylint: disable=not-callable
feature_flags = feature_flags_func(feature_flags) feature_flags = feature_flags_func(feature_flags)
except Exception: # pylint: disable=broad-except except Exception: # pylint: disable=broad-except # noqa: S110
# bypass any feature flags that depend on context # bypass any feature flags that depend on context
# that's not available # that's not available
pass pass

View File

@ -47,7 +47,7 @@ def superset() -> None:
# add sub-commands # add sub-commands
for load, module_name, is_pkg in pkgutil.walk_packages( for load, module_name, is_pkg in pkgutil.walk_packages( # noqa: B007
cli.__path__, cli.__name__ + "." cli.__path__, cli.__name__ + "."
): ):
module = importlib.import_module(module_name) module = importlib.import_module(module_name)

View File

@ -76,5 +76,5 @@ def load_test_users() -> None:
"user", "user",
username + "@fab.org", username + "@fab.org",
sm.find_role(role), sm.find_role(role),
password="general", password="general", # noqa: S106
) )

View File

@ -198,7 +198,7 @@ def collect_connection_info(
return yaml.safe_load(raw_engine_kwargs) return yaml.safe_load(raw_engine_kwargs)
def test_db_engine_spec( def test_db_engine_spec( # noqa: C901
console: Console, console: Console,
sqlalchemy_uri: str, sqlalchemy_uri: str,
) -> type[BaseEngineSpec] | None: ) -> type[BaseEngineSpec] | None:

View File

@ -47,7 +47,7 @@ class ImportChartsCommand(ImportModelsCommand):
import_error = ChartImportError import_error = ChartImportError
@staticmethod @staticmethod
def _import(configs: dict[str, Any], overwrite: bool = False) -> None: def _import(configs: dict[str, Any], overwrite: bool = False) -> None: # noqa: C901
# discover datasets associated with charts # discover datasets associated with charts
dataset_uuids: set[str] = set() dataset_uuids: set[str] = set()
for file_name, config in configs.items(): for file_name, config in configs.items():

View File

@ -71,7 +71,7 @@ class UpdateChartCommand(UpdateMixin, BaseCommand):
return ChartDAO.update(self._model, self._properties) return ChartDAO.update(self._model, self._properties)
def validate(self) -> None: def validate(self) -> None: # noqa: C901
exceptions: list[ValidationError] = [] exceptions: list[ValidationError] = []
dashboard_ids = self._properties.get("dashboards") dashboard_ids = self._properties.get("dashboards")
owner_ids: Optional[list[int]] = self._properties.get("owners") owner_ids: Optional[list[int]] = self._properties.get("owners")

View File

@ -81,7 +81,7 @@ def import_chart(
return slc_to_import.id return slc_to_import.id
def import_dashboard( def import_dashboard( # noqa: C901
# pylint: disable=too-many-locals,too-many-statements # pylint: disable=too-many-locals,too-many-statements
dashboard_to_import: Dashboard, dashboard_to_import: Dashboard,
dataset_id_mapping: Optional[dict[int, int]] = None, dataset_id_mapping: Optional[dict[int, int]] = None,

View File

@ -60,7 +60,7 @@ class ImportDashboardsCommand(ImportModelsCommand):
# TODO (betodealmeida): refactor to use code from other commands # TODO (betodealmeida): refactor to use code from other commands
# pylint: disable=too-many-branches, too-many-locals # pylint: disable=too-many-branches, too-many-locals
@staticmethod @staticmethod
def _import(configs: dict[str, Any], overwrite: bool = False) -> None: def _import(configs: dict[str, Any], overwrite: bool = False) -> None: # noqa: C901
# discover charts and datasets associated with dashboards # discover charts and datasets associated with dashboards
chart_uuids: set[str] = set() chart_uuids: set[str] = set()
dataset_uuids: set[str] = set() dataset_uuids: set[str] = set()

View File

@ -57,7 +57,7 @@ def build_uuid_to_id_map(position: dict[str, Any]) -> dict[str, int]:
} }
def update_id_refs( # pylint: disable=too-many-locals def update_id_refs( # pylint: disable=too-many-locals # noqa: C901
config: dict[str, Any], config: dict[str, Any],
chart_ids: dict[str, int], chart_ids: dict[str, int],
dataset_info: dict[str, dict[str, Any]], dataset_info: dict[str, dict[str, Any]],
@ -143,7 +143,7 @@ def update_id_refs( # pylint: disable=too-many-locals
return fixed return fixed
def import_dashboard( def import_dashboard( # noqa: C901
config: dict[str, Any], config: dict[str, Any],
overwrite: bool = False, overwrite: bool = False,
ignore_permissions: bool = False, ignore_permissions: bool = False,

View File

@ -119,7 +119,7 @@ class UpdateDashboardCommand(UpdateMixin, BaseCommand):
if exceptions: if exceptions:
raise DashboardInvalidError(exceptions=exceptions) raise DashboardInvalidError(exceptions=exceptions)
def process_tab_diff(self) -> None: def process_tab_diff(self) -> None: # noqa: C901
def find_deleted_tabs() -> list[str]: def find_deleted_tabs() -> list[str]:
position_json = self._properties.get("position_json", "") position_json = self._properties.get("position_json", "")
current_tabs = self._model.tabs # type: ignore current_tabs = self._model.tabs # type: ignore
@ -143,7 +143,7 @@ class UpdateDashboardCommand(UpdateMixin, BaseCommand):
""" """
The dashboard tab used in this report has been deleted and your report has been deactivated. The dashboard tab used in this report has been deleted and your report has been deactivated.
Please update your report settings to remove or change the tab used. Please update your report settings to remove or change the tab used.
""" """ # noqa: E501
) )
html_content = textwrap.dedent( html_content = textwrap.dedent(

View File

@ -48,7 +48,7 @@ def import_database(
config["id"] = existing.id config["id"] = existing.id
elif not can_write: elif not can_write:
raise ImportFailedError( raise ImportFailedError(
"Database doesn't exist and user doesn't have permission to create databases" "Database doesn't exist and user doesn't have permission to create databases" # noqa: E501
) )
# Check if this URI is allowed # Check if this URI is allowed
if app.config["PREVENT_UNSAFE_DB_CONNECTIONS"]: if app.config["PREVENT_UNSAFE_DB_CONNECTIONS"]:

View File

@ -69,9 +69,9 @@ class SSHTunnelRequiredFieldValidationError(ValidationError, SSHTunnelError):
) )
class SSHTunnelMissingCredentials(CommandInvalidError, SSHTunnelError): class SSHTunnelMissingCredentials(CommandInvalidError, SSHTunnelError): # noqa: N818
message = _("Must provide credentials for the SSH Tunnel") message = _("Must provide credentials for the SSH Tunnel")
class SSHTunnelInvalidCredentials(CommandInvalidError, SSHTunnelError): class SSHTunnelInvalidCredentials(CommandInvalidError, SSHTunnelError): # noqa: N818
message = _("Cannot have multiple credentials for the SSH Tunnel") message = _("Cannot have multiple credentials for the SSH Tunnel")

View File

@ -93,7 +93,7 @@ class TestConnectionDatabaseCommand(BaseCommand):
self._context = context self._context = context
self._uri = uri self._uri = uri
def run(self) -> None: # pylint: disable=too-many-statements,too-many-branches def run(self) -> None: # pylint: disable=too-many-statements,too-many-branches # noqa: C901
self.validate() self.validate()
ex_str = "" ex_str = ""
ssh_tunnel = self._properties.get("ssh_tunnel") ssh_tunnel = self._properties.get("ssh_tunnel")
@ -155,7 +155,7 @@ class TestConnectionDatabaseCommand(BaseCommand):
raise SupersetTimeoutException( raise SupersetTimeoutException(
error_type=SupersetErrorType.CONNECTION_DATABASE_TIMEOUT, error_type=SupersetErrorType.CONNECTION_DATABASE_TIMEOUT,
message=( message=(
"Please check your connection details and database settings, " "Please check your connection details and database settings, " # noqa: E501
"and ensure that your database is accepting connections, " "and ensure that your database is accepting connections, "
"then try connecting again." "then try connecting again."
), ),

View File

@ -143,7 +143,7 @@ class OwnersNotFoundValidationError(ValidationError):
super().__init__([_("Owners are invalid")], field_name="owners") super().__init__([_("Owners are invalid")], field_name="owners")
class DatasetDataAccessIsNotAllowed(ValidationError): class DatasetDataAccessIsNotAllowed(ValidationError): # noqa: N818
status = 422 status = 422
def __init__(self, message: str) -> None: def __init__(self, message: str) -> None:
@ -195,7 +195,7 @@ class DatasetDuplicateFailedError(CreateFailedError):
message = _("Dataset could not be duplicated.") message = _("Dataset could not be duplicated.")
class DatasetForbiddenDataURI(ImportFailedError): class DatasetForbiddenDataURI(ImportFailedError): # noqa: N818
message = _("Data URI is not allowed.") message = _("Data URI is not allowed.")

View File

@ -102,7 +102,7 @@ def validate_data_uri(data_uri: str) -> None:
raise DatasetForbiddenDataURI() raise DatasetForbiddenDataURI()
def import_dataset( def import_dataset( # noqa: C901
config: dict[str, Any], config: dict[str, Any],
overwrite: bool = False, overwrite: bool = False,
force_data: bool = False, force_data: bool = False,
@ -151,7 +151,7 @@ def import_dataset(
try: try:
dataset = SqlaTable.import_from_dict(config, recursive=True, sync=sync) dataset = SqlaTable.import_from_dict(config, recursive=True, sync=sync)
except MultipleResultsFound: except MultipleResultsFound:
# Finding multiple results when importing a dataset only happens because initially # Finding multiple results when importing a dataset only happens because initially # noqa: E501
# datasets were imported without schemas (eg, `examples.NULL.users`), and later # datasets were imported without schemas (eg, `examples.NULL.users`), and later
# they were fixed to have the default schema (eg, `examples.public.users`). If a # they were fixed to have the default schema (eg, `examples.public.users`). If a
# user created `examples.public.users` during that time the second import will # user created `examples.public.users` during that time the second import will
@ -193,7 +193,7 @@ def load_data(data_uri: str, dataset: SqlaTable, database: Database) -> None:
""" """
validate_data_uri(data_uri) validate_data_uri(data_uri)
logger.info("Downloading data from %s", data_uri) logger.info("Downloading data from %s", data_uri)
data = request.urlopen(data_uri) # pylint: disable=consider-using-with data = request.urlopen(data_uri) # pylint: disable=consider-using-with # noqa: S310
if data_uri.endswith(".gz"): if data_uri.endswith(".gz"):
data = gzip.open(data) data = gzip.open(data)
df = pd.read_csv(data, encoding="utf-8") df = pd.read_csv(data, encoding="utf-8")

View File

@ -71,7 +71,7 @@ class CommandInvalidError(CommandException):
self._exceptions.extend(exceptions) self._exceptions.extend(exceptions)
def get_list_classnames(self) -> list[str]: def get_list_classnames(self) -> list[str]:
return list(sorted({ex.__class__.__name__ for ex in self._exceptions})) return sorted({ex.__class__.__name__ for ex in self._exceptions})
def normalized_messages(self) -> dict[Any, Any]: def normalized_messages(self) -> dict[Any, Any]:
errors: dict[Any, Any] = {} errors: dict[Any, Any] = {}

View File

@ -59,7 +59,7 @@ class GetExploreCommand(BaseCommand, ABC):
self._slice_id = params.slice_id self._slice_id = params.slice_id
# pylint: disable=too-many-locals,too-many-branches,too-many-statements # pylint: disable=too-many-locals,too-many-branches,too-many-statements
def run(self) -> Optional[dict[str, Any]]: def run(self) -> Optional[dict[str, Any]]: # noqa: C901
initial_form_data = {} initial_form_data = {}
if self._permalink_key is not None: if self._permalink_key is not None:
command = GetExplorePermalinkCommand(self._permalink_key) command = GetExplorePermalinkCommand(self._permalink_key)

View File

@ -91,7 +91,7 @@ class ImportModelsCommand(BaseCommand):
if self.dao.model_cls: if self.dao.model_cls:
validate_metadata_type(metadata, self.dao.model_cls.__name__, exceptions) validate_metadata_type(metadata, self.dao.model_cls.__name__, exceptions)
# load the configs and make sure we have confirmation to overwrite existing models # load the configs and make sure we have confirmation to overwrite existing models # noqa: E501
self._configs = load_configs( self._configs = load_configs(
self.contents, self.contents,
self.schemas, self.schemas,

View File

@ -82,7 +82,7 @@ class ImportAssetsCommand(BaseCommand):
# pylint: disable=too-many-locals # pylint: disable=too-many-locals
@staticmethod @staticmethod
def _import(configs: dict[str, Any]) -> None: def _import(configs: dict[str, Any]) -> None: # noqa: C901
# import databases first # import databases first
database_ids: dict[str, int] = {} database_ids: dict[str, int] = {}
for file_name, config in configs.items(): for file_name, config in configs.items():

View File

@ -87,7 +87,7 @@ class ImportExamplesCommand(ImportModelsCommand):
) )
@staticmethod @staticmethod
def _import( # pylint: disable=too-many-locals, too-many-branches def _import( # pylint: disable=too-many-locals, too-many-branches # noqa: C901
configs: dict[str, Any], configs: dict[str, Any],
overwrite: bool = False, overwrite: bool = False,
force_data: bool = False, force_data: bool = False,

View File

@ -96,7 +96,7 @@ def validate_metadata_type(
# pylint: disable=too-many-locals,too-many-arguments # pylint: disable=too-many-locals,too-many-arguments
def load_configs( def load_configs( # noqa: C901
contents: dict[str, str], contents: dict[str, str],
schemas: dict[str, Schema], schemas: dict[str, Schema],
passwords: dict[str, str], passwords: dict[str, str],

View File

@ -96,7 +96,7 @@ class AlertCommand(BaseCommand):
if len(rows) > 1: if len(rows) > 1:
raise AlertQueryMultipleRowsError( raise AlertQueryMultipleRowsError(
message=_( message=_(
"Alert query returned more than one row. %(num_rows)s rows returned", "Alert query returned more than one row. %(num_rows)s rows returned", # noqa: E501
num_rows=len(rows), num_rows=len(rows),
) )
) )

View File

@ -61,7 +61,7 @@ class CreateReportScheduleCommand(CreateMixin, BaseReportScheduleCommand):
a list of `ValidationErrors` to be returned in the API response if any. a list of `ValidationErrors` to be returned in the API response if any.
Fields were loaded according to the `ReportSchedulePostSchema` schema. Fields were loaded according to the `ReportSchedulePostSchema` schema.
""" """ # noqa: E501
# Required fields # Required fields
cron_schedule = self._properties["crontab"] cron_schedule = self._properties["crontab"]
name = self._properties["name"] name = self._properties["name"]

View File

@ -95,7 +95,7 @@ class ReportScheduleEitherChartOrDashboardError(ValidationError):
) )
class ReportScheduleFrequencyNotAllowed(ValidationError): class ReportScheduleFrequencyNotAllowed(ValidationError): # noqa: N818
""" """
Marshmallow validation error for report schedule configured to run more Marshmallow validation error for report schedule configured to run more
frequently than allowed frequently than allowed
@ -140,7 +140,7 @@ class DashboardNotSavedValidationError(ValidationError):
def __init__(self) -> None: def __init__(self) -> None:
super().__init__( super().__init__(
_( _(
"Please save your dashboard first, then try creating a new email report." "Please save your dashboard first, then try creating a new email report." # noqa: E501
), ),
field_name="dashboard", field_name="dashboard",
) )

View File

@ -226,7 +226,7 @@ class BaseReportState:
) -> list[str]: ) -> list[str]:
""" """
Retrieve the URL for the dashboard tabs, or return the dashboard URL if no tabs are available. Retrieve the URL for the dashboard tabs, or return the dashboard URL if no tabs are available.
""" """ # noqa: E501
force = "true" if self._report_schedule.force_screenshot else "false" force = "true" if self._report_schedule.force_screenshot else "false"
if ( if (
dashboard_state := self._report_schedule.extra.get("dashboard") dashboard_state := self._report_schedule.extra.get("dashboard")
@ -464,7 +464,7 @@ class BaseReportState:
} }
return log_data return log_data
def _get_notification_content(self) -> NotificationContent: def _get_notification_content(self) -> NotificationContent: # noqa: C901
""" """
Gets a notification content, this is composed by a title and a screenshot Gets a notification content, this is composed by a title and a screenshot

View File

@ -53,14 +53,14 @@ class UpdateReportScheduleCommand(UpdateMixin, BaseReportScheduleCommand):
self.validate() self.validate()
return ReportScheduleDAO.update(self._model, self._properties) return ReportScheduleDAO.update(self._model, self._properties)
def validate(self) -> None: def validate(self) -> None: # noqa: C901
""" """
Validates the properties of a report schedule configuration, including uniqueness Validates the properties of a report schedule configuration, including uniqueness
of name and type, relations based on the report type, frequency, etc. Populates of name and type, relations based on the report type, frequency, etc. Populates
a list of `ValidationErrors` to be returned in the API response if any. a list of `ValidationErrors` to be returned in the API response if any.
Fields were loaded according to the `ReportSchedulePutSchema` schema. Fields were loaded according to the `ReportSchedulePutSchema` schema.
""" """ # noqa: E501
# Load existing report schedule config # Load existing report schedule config
self._model = ReportScheduleDAO.find_by_id(self._model_id) self._model = ReportScheduleDAO.find_by_id(self._model_id)
if not self._model: if not self._model:

View File

@ -39,7 +39,7 @@ class QueryPruneCommand(BaseCommand):
Attributes: Attributes:
retention_period_days (int): The number of days for which records should be retained. retention_period_days (int): The number of days for which records should be retained.
Records older than this period will be deleted. Records older than this period will be deleted.
""" """ # noqa: E501
def __init__(self, retention_period_days: int): def __init__(self, retention_period_days: int):
""" """
@ -83,7 +83,7 @@ class QueryPruneCommand(BaseCommand):
# Update the total number of deleted records # Update the total number of deleted records
total_deleted += result.rowcount total_deleted += result.rowcount
# Explicitly commit the transaction given that if an error occurs, we want to ensure that the # Explicitly commit the transaction given that if an error occurs, we want to ensure that the # noqa: E501
# records that have been deleted so far are committed # records that have been deleted so far are committed
db.session.commit() db.session.commit()
@ -91,7 +91,7 @@ class QueryPruneCommand(BaseCommand):
percentage_complete = (total_deleted / total_rows) * 100 percentage_complete = (total_deleted / total_rows) * 100
if percentage_complete >= next_logging_threshold: if percentage_complete >= next_logging_threshold:
logger.info( logger.info(
"Deleted %s rows from the query table older than %s days (%d%% complete)", "Deleted %s rows from the query table older than %s days (%d%% complete)", # noqa: E501
total_deleted, total_deleted,
self.retention_period_days, self.retention_period_days,
percentage_complete, percentage_complete,

View File

@ -112,7 +112,7 @@ class QueryContextFactory: # pylint: disable=too-few-public-methods
self._apply_filters(query_object) self._apply_filters(query_object)
return query_object return query_object
def _apply_granularity( def _apply_granularity( # noqa: C901
self, self,
query_object: QueryObject, query_object: QueryObject,
form_data: dict[str, Any] | None, form_data: dict[str, Any] | None,

View File

@ -394,15 +394,15 @@ class QueryContextProcessor:
:returns: The time offset. :returns: The time offset.
""" """
if offset == "inherit": if offset == "inherit":
# return the difference in days between the from and the to dttm formatted as a string with the " days ago" suffix # return the difference in days between the from and the to dttm formatted as a string with the " days ago" suffix # noqa: E501
return f"{(outer_to_dttm - outer_from_dttm).days} days ago" return f"{(outer_to_dttm - outer_from_dttm).days} days ago"
if self.is_valid_date(offset): if self.is_valid_date(offset):
# return the offset as the difference in days between the outer from dttm and the offset date (which is a YYYY-MM-DD string) formatted as a string with the " days ago" suffix # return the offset as the difference in days between the outer from dttm and the offset date (which is a YYYY-MM-DD string) formatted as a string with the " days ago" suffix # noqa: E501
offset_date = datetime.strptime(offset, "%Y-%m-%d") offset_date = datetime.strptime(offset, "%Y-%m-%d")
return f"{(outer_from_dttm - offset_date).days} days ago" return f"{(outer_from_dttm - offset_date).days} days ago"
return "" return ""
def processing_time_offsets( # pylint: disable=too-many-locals,too-many-statements def processing_time_offsets( # pylint: disable=too-many-locals,too-many-statements # noqa: C901
self, self,
df: pd.DataFrame, df: pd.DataFrame,
query_object: QueryObject, query_object: QueryObject,
@ -433,14 +433,14 @@ class QueryContextProcessor:
for offset in query_object.time_offsets: for offset in query_object.time_offsets:
try: try:
# pylint: disable=line-too-long # pylint: disable=line-too-long
# Since the x-axis is also a column name for the time filter, x_axis_label will be set as granularity # Since the x-axis is also a column name for the time filter, x_axis_label will be set as granularity # noqa: E501
# these query object are equivalent: # these query object are equivalent:
# 1) { granularity: 'dttm_col', time_range: '2020 : 2021', time_offsets: ['1 year ago']} # 1) { granularity: 'dttm_col', time_range: '2020 : 2021', time_offsets: ['1 year ago']} # noqa: E501
# 2) { columns: [ # 2) { columns: [
# {label: 'dttm_col', sqlExpression: 'dttm_col', "columnType": "BASE_AXIS" } # {label: 'dttm_col', sqlExpression: 'dttm_col', "columnType": "BASE_AXIS" } # noqa: E501
# ], # ],
# time_offsets: ['1 year ago'], # time_offsets: ['1 year ago'],
# filters: [{col: 'dttm_col', op: 'TEMPORAL_RANGE', val: '2020 : 2021'}], # filters: [{col: 'dttm_col', op: 'TEMPORAL_RANGE', val: '2020 : 2021'}], # noqa: E501
# } # }
original_offset = offset original_offset = offset
if self.is_valid_date(offset) or offset == "inherit": if self.is_valid_date(offset) or offset == "inherit":
@ -494,10 +494,10 @@ class QueryContextProcessor:
if flt.get("col") != x_axis_label if flt.get("col") != x_axis_label
] ]
# Inherit or custom start dates might compute the same offset but the response cannot be given # Inherit or custom start dates might compute the same offset but the response cannot be given # noqa: E501
# using cached data unless you are using the same date of inherited range, that's why we # using cached data unless you are using the same date of inherited range, that's why we # noqa: E501
# set the cache cache using a custom key that includes the original offset and the computed offset # set the cache cache using a custom key that includes the original offset and the computed offset # noqa: E501
# for those two scenarios, the rest of the scenarios will use the original offset as cache key # for those two scenarios, the rest of the scenarios will use the original offset as cache key # noqa: E501
cached_time_offset_key = ( cached_time_offset_key = (
offset if offset == original_offset else f"{offset}_{original_offset}" offset if offset == original_offset else f"{offset}_{original_offset}"
) )
@ -824,7 +824,7 @@ class QueryContextProcessor:
return annotation_data return annotation_data
@staticmethod @staticmethod
def get_viz_annotation_data( def get_viz_annotation_data( # noqa: C901
annotation_layer: dict[str, Any], force: bool annotation_layer: dict[str, Any], force: bool
) -> dict[str, Any]: ) -> dict[str, Any]:
# pylint: disable=import-outside-toplevel # pylint: disable=import-outside-toplevel

View File

@ -346,7 +346,7 @@ class QueryObject: # pylint: disable=too-many-instance-attributes
default=str, default=str,
) )
def cache_key(self, **extra: Any) -> str: def cache_key(self, **extra: Any) -> str: # noqa: C901
""" """
The cache key is made out of the key/values from to_dict(), plus any The cache key is made out of the key/values from to_dict(), plus any
other key/values in `extra` other key/values in `extra`

View File

@ -51,7 +51,7 @@ class QueryCacheManager:
# pylint: disable=too-many-instance-attributes,too-many-arguments # pylint: disable=too-many-instance-attributes,too-many-arguments
def __init__( def __init__(
self, self,
df: DataFrame = DataFrame(), df: DataFrame = DataFrame(), # noqa: B008
query: str = "", query: str = "",
annotation_data: dict[str, Any] | None = None, annotation_data: dict[str, Any] | None = None,
applied_template_filters: list[str] | None = None, applied_template_filters: list[str] | None = None,

View File

@ -530,7 +530,7 @@ DEFAULT_FEATURE_FLAGS: dict[str, bool] = {
"DRILL_BY": True, "DRILL_BY": True,
"DATAPANEL_CLOSED_BY_DEFAULT": False, "DATAPANEL_CLOSED_BY_DEFAULT": False,
"HORIZONTAL_FILTER_BAR": False, "HORIZONTAL_FILTER_BAR": False,
# The feature is off by default, and currently only supported in Presto and Postgres, # The feature is off by default, and currently only supported in Presto and Postgres, # noqa: E501
# and Bigquery. # and Bigquery.
# It also needs to be enabled on a per-database basis, by adding the key/value pair # It also needs to be enabled on a per-database basis, by adding the key/value pair
# `cost_estimate_enabled: true` to the database `extra` attribute. # `cost_estimate_enabled: true` to the database `extra` attribute.
@ -554,7 +554,7 @@ DEFAULT_FEATURE_FLAGS: dict[str, bool] = {
"PLAYWRIGHT_REPORTS_AND_THUMBNAILS": False, "PLAYWRIGHT_REPORTS_AND_THUMBNAILS": False,
# Set to True to enable experimental chart plugins # Set to True to enable experimental chart plugins
"CHART_PLUGINS_EXPERIMENTAL": False, "CHART_PLUGINS_EXPERIMENTAL": False,
# Regardless of database configuration settings, force SQLLAB to run async using Celery # Regardless of database configuration settings, force SQLLAB to run async using Celery # noqa: E501
"SQLLAB_FORCE_RUN_ASYNC": False, "SQLLAB_FORCE_RUN_ASYNC": False,
# Set to True to to enable factory resent CLI command # Set to True to to enable factory resent CLI command
"ENABLE_FACTORY_RESET_COMMAND": False, "ENABLE_FACTORY_RESET_COMMAND": False,
@ -1076,7 +1076,7 @@ SQLLAB_QUERY_RESULT_TIMEOUT = 0
# your specific infrastructure. For example, you could analyze queries a posteriori by # your specific infrastructure. For example, you could analyze queries a posteriori by
# running EXPLAIN on them, and compute a histogram of relative costs to present the # running EXPLAIN on them, and compute a histogram of relative costs to present the
# cost as a percentile, this step is optional as every db engine spec has its own # cost as a percentile, this step is optional as every db engine spec has its own
# query cost formatter, but it you wanna customize it you can define it inside the config: # query cost formatter, but it you wanna customize it you can define it inside the config: # noqa: E501
# def postgres_query_cost_formatter( # def postgres_query_cost_formatter(
# result: List[Dict[str, Any]] # result: List[Dict[str, Any]]
@ -1147,7 +1147,7 @@ CSV_TO_HIVE_UPLOAD_DIRECTORY = "EXTERNAL_HIVE_TABLES/"
# Function that creates upload directory dynamically based on the # Function that creates upload directory dynamically based on the
# database used, user and schema provided. # database used, user and schema provided.
def CSV_TO_HIVE_UPLOAD_DIRECTORY_FUNC( # pylint: disable=invalid-name def CSV_TO_HIVE_UPLOAD_DIRECTORY_FUNC( # pylint: disable=invalid-name # noqa: N802
database: Database, database: Database,
user: models.User, # pylint: disable=unused-argument user: models.User, # pylint: disable=unused-argument
schema: str | None, schema: str | None,
@ -1214,7 +1214,7 @@ SMTP_STARTTLS = True
SMTP_SSL = False SMTP_SSL = False
SMTP_USER = "superset" SMTP_USER = "superset"
SMTP_PORT = 25 SMTP_PORT = 25
SMTP_PASSWORD = "superset" SMTP_PASSWORD = "superset" # noqa: S105
SMTP_MAIL_FROM = "superset@superset.com" SMTP_MAIL_FROM = "superset@superset.com"
# If True creates a default SSL context with ssl.Purpose.CLIENT_AUTH using the # If True creates a default SSL context with ssl.Purpose.CLIENT_AUTH using the
# default system root CA certificates. # default system root CA certificates.
@ -1265,7 +1265,7 @@ TRACKING_URL_TRANSFORMER = lambda url: url # noqa: E731
DB_POLL_INTERVAL_SECONDS: dict[str, int] = {} DB_POLL_INTERVAL_SECONDS: dict[str, int] = {}
# Interval between consecutive polls when using Presto Engine # Interval between consecutive polls when using Presto Engine
# See here: https://github.com/dropbox/PyHive/blob/8eb0aeab8ca300f3024655419b93dad926c1a351/pyhive/presto.py#L93 # pylint: disable=line-too-long,useless-suppression # See here: https://github.com/dropbox/PyHive/blob/8eb0aeab8ca300f3024655419b93dad926c1a351/pyhive/presto.py#L93 # pylint: disable=line-too-long,useless-suppression # noqa: E501
PRESTO_POLL_INTERVAL = int(timedelta(seconds=1).total_seconds()) PRESTO_POLL_INTERVAL = int(timedelta(seconds=1).total_seconds())
# Allow list of custom authentications for each DB engine. # Allow list of custom authentications for each DB engine.
@ -1361,17 +1361,17 @@ DISALLOWED_SQL_FUNCTIONS: dict[str, set[str]] = {
# NOTE: For backward compatibility, you can unpack any of the above arguments in your # NOTE: For backward compatibility, you can unpack any of the above arguments in your
# function definition, but keep the **kwargs as the last argument to allow new args # function definition, but keep the **kwargs as the last argument to allow new args
# to be added later without any errors. # to be added later without any errors.
# NOTE: whatever you in this function DOES NOT affect the cache key, so ideally this function # NOTE: whatever you in this function DOES NOT affect the cache key, so ideally this function # noqa: E501
# is "functional", as in deterministic from its input. # is "functional", as in deterministic from its input.
def SQL_QUERY_MUTATOR( # pylint: disable=invalid-name,unused-argument def SQL_QUERY_MUTATOR( # pylint: disable=invalid-name,unused-argument # noqa: N802
sql: str, **kwargs: Any sql: str, **kwargs: Any
) -> str: ) -> str:
return sql return sql
# A variable that chooses whether to apply the SQL_QUERY_MUTATOR before or after splitting the input query # A variable that chooses whether to apply the SQL_QUERY_MUTATOR before or after splitting the input query # noqa: E501
# It allows for using the SQL_QUERY_MUTATOR function for more than comments # It allows for using the SQL_QUERY_MUTATOR function for more than comments
# Usage: If you want to apply a change to every statement to a given query, set MUTATE_AFTER_SPLIT = True # Usage: If you want to apply a change to every statement to a given query, set MUTATE_AFTER_SPLIT = True # noqa: E501
# An example use case is if data has role based access controls, and you want to apply # An example use case is if data has role based access controls, and you want to apply
# a SET ROLE statement alongside every user query. Changing this variable maintains # a SET ROLE statement alongside every user query. Changing this variable maintains
# functionality for both the SQL_Lab and Charts. # functionality for both the SQL_Lab and Charts.
@ -1381,7 +1381,7 @@ MUTATE_AFTER_SPLIT = False
# This allows for a user to add header data to any outgoing emails. For example, # This allows for a user to add header data to any outgoing emails. For example,
# if you need to include metadata in the header or you want to change the specifications # if you need to include metadata in the header or you want to change the specifications
# of the email title, header, or sender. # of the email title, header, or sender.
def EMAIL_HEADER_MUTATOR( # pylint: disable=invalid-name,unused-argument def EMAIL_HEADER_MUTATOR( # pylint: disable=invalid-name,unused-argument # noqa: N802
msg: MIMEMultipart, **kwargs: Any msg: MIMEMultipart, **kwargs: Any
) -> MIMEMultipart: ) -> MIMEMultipart:
return msg return msg
@ -1397,7 +1397,7 @@ EXCLUDE_USERS_FROM_LISTS: list[str] | None = None
# list/dropdown if you do not want these dbs to show as available. # list/dropdown if you do not want these dbs to show as available.
# The available list is generated by driver installed, and some engines have multiple # The available list is generated by driver installed, and some engines have multiple
# drivers. # drivers.
# e.g., DBS_AVAILABLE_DENYLIST: Dict[str, Set[str]] = {"databricks": {"pyhive", "pyodbc"}} # e.g., DBS_AVAILABLE_DENYLIST: Dict[str, Set[str]] = {"databricks": {"pyhive", "pyodbc"}} # noqa: E501
DBS_AVAILABLE_DENYLIST: dict[str, set[str]] = {} DBS_AVAILABLE_DENYLIST: dict[str, set[str]] = {}
# This auth provider is used by background (offline) tasks that need to access # This auth provider is used by background (offline) tasks that need to access
@ -1545,7 +1545,7 @@ TEST_DATABASE_CONNECTION_TIMEOUT = timedelta(seconds=30)
# Details needed for databases that allows user to authenticate using personal OAuth2 # Details needed for databases that allows user to authenticate using personal OAuth2
# tokens. See https://github.com/apache/superset/issues/20300 for more information. The # tokens. See https://github.com/apache/superset/issues/20300 for more information. The
# scope and URIs are usually optional. # scope and URIs are usually optional.
# NOTE that if you change the id, scope, or URIs in this file, you probably need to purge # NOTE that if you change the id, scope, or URIs in this file, you probably need to purge # noqa: E501
# the existing tokens from the database. This needs to be done by running a query to # the existing tokens from the database. This needs to be done by running a query to
# delete the existing tokens. # delete the existing tokens.
DATABASE_OAUTH2_CLIENTS: dict[str, dict[str, Any]] = { DATABASE_OAUTH2_CLIENTS: dict[str, dict[str, Any]] = {
@ -1594,7 +1594,7 @@ TALISMAN_CONFIG = {
"data:", "data:",
"https://apachesuperset.gateway.scarf.sh", "https://apachesuperset.gateway.scarf.sh",
"https://static.scarf.sh/", "https://static.scarf.sh/",
# "https://avatars.slack-edge.com", # Uncomment when SLACK_ENABLE_AVATARS is True # "https://avatars.slack-edge.com", # Uncomment when SLACK_ENABLE_AVATARS is True # noqa: E501
], ],
"worker-src": ["'self'", "blob:"], "worker-src": ["'self'", "blob:"],
"connect-src": [ "connect-src": [
@ -1729,7 +1729,7 @@ GLOBAL_ASYNC_QUERIES_JWT_COOKIE_SAMESITE: None | (Literal["None", "Lax", "Strict
None None
) )
GLOBAL_ASYNC_QUERIES_JWT_COOKIE_DOMAIN = None GLOBAL_ASYNC_QUERIES_JWT_COOKIE_DOMAIN = None
GLOBAL_ASYNC_QUERIES_JWT_SECRET = "test-secret-change-me" GLOBAL_ASYNC_QUERIES_JWT_SECRET = "test-secret-change-me" # noqa: S105
GLOBAL_ASYNC_QUERIES_TRANSPORT: Literal["polling", "ws"] = "polling" GLOBAL_ASYNC_QUERIES_TRANSPORT: Literal["polling", "ws"] = "polling"
GLOBAL_ASYNC_QUERIES_POLLING_DELAY = int( GLOBAL_ASYNC_QUERIES_POLLING_DELAY = int(
timedelta(milliseconds=500).total_seconds() * 1000 timedelta(milliseconds=500).total_seconds() * 1000
@ -1760,9 +1760,9 @@ GLOBAL_ASYNC_QUERIES_CACHE_BACKEND = {
# Embedded config options # Embedded config options
GUEST_ROLE_NAME = "Public" GUEST_ROLE_NAME = "Public"
GUEST_TOKEN_JWT_SECRET = "test-guest-secret-change-me" GUEST_TOKEN_JWT_SECRET = "test-guest-secret-change-me" # noqa: S105
GUEST_TOKEN_JWT_ALGO = "HS256" GUEST_TOKEN_JWT_ALGO = "HS256" # noqa: S105
GUEST_TOKEN_HEADER_NAME = "X-GuestToken" GUEST_TOKEN_HEADER_NAME = "X-GuestToken" # noqa: S105
GUEST_TOKEN_JWT_EXP_SECONDS = 300 # 5 minutes GUEST_TOKEN_JWT_EXP_SECONDS = 300 # 5 minutes
# Guest token audience for the embedded superset, either string or callable # Guest token audience for the embedded superset, either string or callable
GUEST_TOKEN_JWT_AUDIENCE: Callable[[], str] | str | None = None GUEST_TOKEN_JWT_AUDIENCE: Callable[[], str] | str | None = None
@ -1829,7 +1829,7 @@ ZIPPED_FILE_MAX_SIZE = 100 * 1024 * 1024 # 100MB
# Max allowed compression ratio for a zipped file # Max allowed compression ratio for a zipped file
ZIP_FILE_MAX_COMPRESS_RATIO = 200.0 ZIP_FILE_MAX_COMPRESS_RATIO = 200.0
# Configuration for environment tag shown on the navbar. Setting 'text' to '' will hide the tag. # Configuration for environment tag shown on the navbar. Setting 'text' to '' will hide the tag. # noqa: E501
# 'color' can either be a hex color code, or a dot-indexed theme color (e.g. error.base) # 'color' can either be a hex color code, or a dot-indexed theme color (e.g. error.base)
ENVIRONMENT_TAG_CONFIG = { ENVIRONMENT_TAG_CONFIG = {
"variable": "SUPERSET_ENV", "variable": "SUPERSET_ENV",

View File

@ -404,7 +404,7 @@ class BaseDatasource(AuditMixinNullable, ImportExportMixin): # pylint: disable=
"select_star": self.select_star, "select_star": self.select_star,
} }
def data_for_slices( # pylint: disable=too-many-locals def data_for_slices( # pylint: disable=too-many-locals # noqa: C901
self, slices: list[Slice] self, slices: list[Slice]
) -> dict[str, Any]: ) -> dict[str, Any]:
""" """
@ -507,7 +507,7 @@ class BaseDatasource(AuditMixinNullable, ImportExportMixin): # pylint: disable=
return data return data
@staticmethod @staticmethod
def filter_values_handler( # pylint: disable=too-many-arguments def filter_values_handler( # pylint: disable=too-many-arguments # noqa: C901
values: FilterValues | None, values: FilterValues | None,
operator: str, operator: str,
target_generic_type: utils.GenericDataType, target_generic_type: utils.GenericDataType,
@ -727,7 +727,7 @@ class BaseDatasource(AuditMixinNullable, ImportExportMixin): # pylint: disable=
:param template_processor: The template processor to apply to the filters. :param template_processor: The template processor to apply to the filters.
:returns: A list of SQL clauses to be ANDed together. :returns: A list of SQL clauses to be ANDed together.
""" """ # noqa: E501
template_processor = template_processor or self.get_template_processor() template_processor = template_processor or self.get_template_processor()
all_filters: list[TextClause] = [] all_filters: list[TextClause] = []
@ -1062,7 +1062,7 @@ class SqlMetric(AuditMixinNullable, ImportExportMixin, CertificationMixin, Model
"extra", "extra",
"warning_text", "warning_text",
] ]
update_from_object_fields = list(s for s in export_fields if s != "table_id") update_from_object_fields = list(s for s in export_fields if s != "table_id") # noqa: C400
export_parent = "table" export_parent = "table"
def __repr__(self) -> str: def __repr__(self) -> str:
@ -1749,7 +1749,7 @@ class SqlaTable(
# errors. This is particularly important for database OAuth2, see SIP-85. # errors. This is particularly important for database OAuth2, see SIP-85.
raise raise
except Exception as ex: # pylint: disable=broad-except except Exception as ex: # pylint: disable=broad-except
# TODO (betodealmeida): review exception handling while querying the external # TODO (betodealmeida): review exception handling while querying the external # noqa: E501
# database. Ideally we'd expect and handle external database error, but # database. Ideally we'd expect and handle external database error, but
# everything else / the default should be to let things bubble up. # everything else / the default should be to let things bubble up.
df = pd.DataFrame() df = pd.DataFrame()
@ -1937,7 +1937,7 @@ class SqlaTable(
def default_query(qry: Query) -> Query: def default_query(qry: Query) -> Query:
return qry.filter_by(is_sqllab_view=False) return qry.filter_by(is_sqllab_view=False)
def has_extra_cache_key_calls(self, query_obj: QueryObjectDict) -> bool: def has_extra_cache_key_calls(self, query_obj: QueryObjectDict) -> bool: # noqa: C901
""" """
Detects the presence of calls to `ExtraCache` methods in items in query_obj that Detects the presence of calls to `ExtraCache` methods in items in query_obj that
can be templated. If any are present, the query must be evaluated to extract can be templated. If any are present, the query must be evaluated to extract
@ -2033,7 +2033,7 @@ class SqlaTable(
""" """
session = inspect(target).session # pylint: disable=disallowed-name session = inspect(target).session # pylint: disable=disallowed-name
# Forces an update to the table's changed_on value when a metric or column on the # Forces an update to the table's changed_on value when a metric or column on the # noqa: E501
# table is updated. This busts the cache key for all charts that use the table. # table is updated. This busts the cache key for all charts that use the table.
session.execute(update(SqlaTable).where(SqlaTable.id == target.table.id)) session.execute(update(SqlaTable).where(SqlaTable.id == target.table.id))

View File

@ -23,7 +23,7 @@ from typing import Callable, TYPE_CHECKING, TypeVar
from uuid import UUID from uuid import UUID
from flask_babel import lazy_gettext as _ from flask_babel import lazy_gettext as _
from sqlalchemy.engine.url import URL as SqlaURL from sqlalchemy.engine.url import URL as SqlaURL # noqa: N811
from sqlalchemy.exc import NoSuchTableError from sqlalchemy.exc import NoSuchTableError
from sqlalchemy.ext.declarative import DeclarativeMeta from sqlalchemy.ext.declarative import DeclarativeMeta
from sqlalchemy.orm.exc import ObjectDeletedError from sqlalchemy.orm.exc import ObjectDeletedError

View File

@ -27,7 +27,7 @@ USER_AGENT = "Apache Superset"
NULL_STRING = "<NULL>" NULL_STRING = "<NULL>"
EMPTY_STRING = "<empty string>" EMPTY_STRING = "<empty string>"
CHANGE_ME_SECRET_KEY = "CHANGE_ME_TO_A_COMPLEX_RANDOM_SECRET" CHANGE_ME_SECRET_KEY = "CHANGE_ME_TO_A_COMPLEX_RANDOM_SECRET" # noqa: S105
# UUID for the examples database # UUID for the examples database
EXAMPLES_DB_UUID = "a2dc77af-e654-49bb-b321-40f6b559a1ee" EXAMPLES_DB_UUID = "a2dc77af-e654-49bb-b321-40f6b559a1ee"

View File

@ -51,7 +51,7 @@ class DatabaseDAO(BaseDAO[Database]):
of the credentials. of the credentials.
The masked values should be unmasked before the database is updated. The masked values should be unmasked before the database is updated.
""" """ # noqa: E501
if item and attributes and "encrypted_extra" in attributes: if item and attributes and "encrypted_extra" in attributes:
attributes["encrypted_extra"] = item.db_engine_spec.unmask_encrypted_extra( attributes["encrypted_extra"] = item.db_engine_spec.unmask_encrypted_extra(
@ -181,7 +181,7 @@ class SSHTunnelDAO(BaseDAO[SSHTunnel]):
the aforementioned fields. the aforementioned fields.
The masked values should be unmasked before the ssh tunnel is updated. The masked values should be unmasked before the ssh tunnel is updated.
""" """ # noqa: E501
# ID cannot be updated so we remove it if present in the payload # ID cannot be updated so we remove it if present in the payload
if item and attributes: if item and attributes:

View File

@ -319,7 +319,7 @@ class TagDAO(BaseDAO[Tag]):
Example: Example:
favorited_ids([tag1, tag2, tag3]) favorited_ids([tag1, tag2, tag3])
Output: [tag_id1, tag_id3] # if the current user has favorited tag1 and tag3 Output: [tag_id1, tag_id3] # if the current user has favorited tag1 and tag3
""" """ # noqa: E501
ids = [tag.id for tag in tags] ids = [tag.id for tag in tags]
return [ return [
star.tag_id star.tag_id

View File

@ -481,7 +481,7 @@ class DashboardRestApi(BaseSupersetModelRestApi):
$ref: '#/components/responses/403' $ref: '#/components/responses/403'
404: 404:
$ref: '#/components/responses/404' $ref: '#/components/responses/404'
""" """ # noqa: E501
try: try:
tabs = DashboardDAO.get_tabs_for_dashboard(id_or_slug) tabs = DashboardDAO.get_tabs_for_dashboard(id_or_slug)
result = self.tab_schema.dump(tabs) result = self.tab_schema.dump(tabs)

View File

@ -18,18 +18,18 @@ from marshmallow import fields, Schema
class DashboardPermalinkStateSchema(Schema): class DashboardPermalinkStateSchema(Schema):
dataMask = fields.Dict( dataMask = fields.Dict( # noqa: N815
required=False, required=False,
allow_none=True, allow_none=True,
metadata={"description": "Data mask used for native filter state"}, metadata={"description": "Data mask used for native filter state"},
) )
activeTabs = fields.List( activeTabs = fields.List( # noqa: N815
fields.String(), fields.String(),
required=False, required=False,
allow_none=True, allow_none=True,
metadata={"description": "Current active dashboard tabs"}, metadata={"description": "Current active dashboard tabs"},
) )
urlParams = fields.List( urlParams = fields.List( # noqa: N815
fields.Tuple( fields.Tuple(
( (
fields.String( fields.String(
@ -55,7 +55,7 @@ class DashboardPermalinkStateSchema(Schema):
class DashboardPermalinkSchema(Schema): class DashboardPermalinkSchema(Schema):
dashboardId = fields.String( dashboardId = fields.String( # noqa: N815
required=True, required=True,
allow_none=False, allow_none=False,
metadata={"description": "The id or slug of the dashboard"}, metadata={"description": "The id or slug of the dashboard"},

View File

@ -180,7 +180,7 @@ class DashboardJSONMetadataSchema(Schema):
This field was removed in https://github.com/apache/superset/pull/23228, but might This field was removed in https://github.com/apache/superset/pull/23228, but might
be present in old exports. be present in old exports.
""" """ # noqa: E501
if "show_native_filters" in data: if "show_native_filters" in data:
del data["show_native_filters"] del data["show_native_filters"]
@ -438,18 +438,18 @@ class DashboardColorsConfigUpdateSchema(BaseDashboardSchema):
class DashboardScreenshotPostSchema(Schema): class DashboardScreenshotPostSchema(Schema):
dataMask = fields.Dict( dataMask = fields.Dict( # noqa: N815
keys=fields.Str(), keys=fields.Str(),
values=fields.Raw(), values=fields.Raw(),
metadata={"description": "An object representing the data mask."}, metadata={"description": "An object representing the data mask."},
) )
activeTabs = fields.List( activeTabs = fields.List( # noqa: N815
fields.Str(), metadata={"description": "A list representing active tabs."} fields.Str(), metadata={"description": "A list representing active tabs."}
) )
anchor = fields.String( anchor = fields.String(
metadata={"description": "A string representing the anchor."} metadata={"description": "A string representing the anchor."}
) )
urlParams = fields.List( urlParams = fields.List( # noqa: N815
fields.Tuple( fields.Tuple(
(fields.Str(), fields.Str()), (fields.Str(), fields.Str()),
), ),
@ -466,7 +466,7 @@ class GetFavStarIdsSchema(Schema):
result = fields.List( result = fields.List(
fields.Nested(ChartFavStarResponseResult), fields.Nested(ChartFavStarResponseResult),
metadata={ metadata={
"description": "A list of results for each corresponding chart in the request" "description": "A list of results for each corresponding chart in the request" # noqa: E501
}, },
) )
@ -510,9 +510,9 @@ class DashboardCacheScreenshotResponseSchema(Schema):
class CacheScreenshotSchema(Schema): class CacheScreenshotSchema(Schema):
dataMask = fields.Dict(keys=fields.Str(), values=fields.Raw(), required=False) dataMask = fields.Dict(keys=fields.Str(), values=fields.Raw(), required=False) # noqa: N815
activeTabs = fields.List(fields.Str(), required=False) activeTabs = fields.List(fields.Str(), required=False) # noqa: N815
anchor = fields.Str(required=False) anchor = fields.Str(required=False)
urlParams = fields.List( urlParams = fields.List( # noqa: N815
fields.List(fields.Str(), validate=lambda x: len(x) == 2), required=False fields.List(fields.Str(), validate=lambda x: len(x) == 2), required=False
) )

View File

@ -405,7 +405,7 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
log_to_statsd=False, log_to_statsd=False,
) )
@requires_json @requires_json
def post(self) -> FlaskResponse: def post(self) -> FlaskResponse: # noqa: C901
"""Create a new database. """Create a new database.
--- ---
post: post:
@ -2076,7 +2076,7 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
if ( if (
hasattr(engine_spec, "parameters_json_schema") hasattr(engine_spec, "parameters_json_schema")
and hasattr(engine_spec, "sqlalchemy_uri_placeholder") and hasattr(engine_spec, "sqlalchemy_uri_placeholder")
and getattr(engine_spec, "default_driver") in drivers and engine_spec.default_driver in drivers
): ):
payload["parameters"] = engine_spec.parameters_json_schema() payload["parameters"] = engine_spec.parameters_json_schema()
payload["sqlalchemy_uri_placeholder"] = ( payload["sqlalchemy_uri_placeholder"] = (
@ -2260,7 +2260,7 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
$ref: '#/components/responses/404' $ref: '#/components/responses/404'
500: 500:
$ref: '#/components/responses/500' $ref: '#/components/responses/500'
""" """ # noqa: E501
database = DatabaseDAO.find_by_id(pk) database = DatabaseDAO.find_by_id(pk)
if not database: if not database:
return self.response_404() return self.response_404()

View File

@ -652,7 +652,7 @@ class TableMetadataOptionsResponseSchema(Schema):
class TableMetadataColumnsResponseSchema(Schema): class TableMetadataColumnsResponseSchema(Schema):
keys = fields.List(fields.String(), metadata={"description": ""}) keys = fields.List(fields.String(), metadata={"description": ""})
longType = fields.String( longType = fields.String( # noqa: N815
metadata={"description": "The actual backend long type for the column"} metadata={"description": "The actual backend long type for the column"}
) )
name = fields.String(metadata={"description": "The column name"}) name = fields.String(metadata={"description": "The column name"})
@ -697,7 +697,7 @@ class TableMetadataResponseSchema(Schema):
fields.Nested(TableMetadataColumnsResponseSchema), fields.Nested(TableMetadataColumnsResponseSchema),
metadata={"description": "A list of columns and their metadata"}, metadata={"description": "A list of columns and their metadata"},
) )
foreignKeys = fields.List( foreignKeys = fields.List( # noqa: N815
fields.Nested(TableMetadataForeignKeysIndexesResponseSchema), fields.Nested(TableMetadataForeignKeysIndexesResponseSchema),
metadata={"description": "A list of foreign keys and their metadata"}, metadata={"description": "A list of foreign keys and their metadata"},
) )
@ -705,11 +705,11 @@ class TableMetadataResponseSchema(Schema):
fields.Nested(TableMetadataForeignKeysIndexesResponseSchema), fields.Nested(TableMetadataForeignKeysIndexesResponseSchema),
metadata={"description": "A list of indexes and their metadata"}, metadata={"description": "A list of indexes and their metadata"},
) )
primaryKey = fields.Nested( primaryKey = fields.Nested( # noqa: N815
TableMetadataPrimaryKeyResponseSchema, TableMetadataPrimaryKeyResponseSchema,
metadata={"description": "Primary keys metadata"}, metadata={"description": "Primary keys metadata"},
) )
selectStar = fields.String(metadata={"description": "SQL select star"}) selectStar = fields.String(metadata={"description": "SQL select star"}) # noqa: N815
class TableExtraMetadataResponseSchema(Schema): class TableExtraMetadataResponseSchema(Schema):
@ -884,7 +884,7 @@ class ImportV1DatabaseSchema(Schema):
raise ValidationError("Must provide a password for the database") raise ValidationError("Must provide a password for the database")
@validates_schema @validates_schema
def validate_ssh_tunnel_credentials( def validate_ssh_tunnel_credentials( # noqa: C901
self, data: dict[str, Any], **kwargs: Any self, data: dict[str, Any], **kwargs: Any
) -> None: ) -> None:
"""If ssh_tunnel has a masked credentials, credentials are required""" """If ssh_tunnel has a masked credentials, credentials are required"""
@ -973,7 +973,7 @@ class EngineInformationSchema(Schema):
) )
supports_dynamic_catalog = fields.Boolean( supports_dynamic_catalog = fields.Boolean(
metadata={ metadata={
"description": "The database supports multiple catalogs in a single connection" "description": "The database supports multiple catalogs in a single connection" # noqa: E501
} }
) )
supports_oauth2 = fields.Boolean( supports_oauth2 = fields.Boolean(

View File

@ -1052,7 +1052,7 @@ class DatasetRestApi(BaseSupersetModelRestApi):
$ref: '#/components/responses/404' $ref: '#/components/responses/404'
500: 500:
$ref: '#/components/responses/500' $ref: '#/components/responses/500'
""" """ # noqa: E501
try: try:
body = DatasetCacheWarmUpRequestSchema().load(request.json) body = DatasetCacheWarmUpRequestSchema().load(request.json)
except ValidationError as error: except ValidationError as error:

View File

@ -300,7 +300,7 @@ class DatasetCacheWarmUpRequestSchema(Schema):
) )
dashboard_id = fields.Integer( dashboard_id = fields.Integer(
metadata={ metadata={
"description": "The ID of the dashboard to get filters for when warming cache" "description": "The ID of the dashboard to get filters for when warming cache" # noqa: E501
} }
) )
extra_filters = fields.String( extra_filters = fields.String(

View File

@ -94,7 +94,7 @@ def get_engine_spec(backend: str, driver: Optional[str] = None) -> type[BaseEngi
supporting that driver exists then a backend-only match is done, in order to allow new supporting that driver exists then a backend-only match is done, in order to allow new
drivers to work with Superset even if they are not listed in the DB engine spec drivers to work with Superset even if they are not listed in the DB engine spec
drivers. drivers.
""" """ # noqa: E501
engine_specs = load_engine_specs() engine_specs = load_engine_specs()
if driver is not None: if driver is not None:
@ -121,7 +121,7 @@ backend_replacements = {
# pylint: disable=too-many-branches # pylint: disable=too-many-branches
def get_available_engine_specs() -> dict[type[BaseEngineSpec], set[str]]: def get_available_engine_specs() -> dict[type[BaseEngineSpec], set[str]]: # noqa: C901
""" """
Return available engine specs and installed drivers for them. Return available engine specs and installed drivers for them.
""" """

View File

@ -205,7 +205,7 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
engine_name: str | None = None # for user messages, overridden in child classes engine_name: str | None = None # for user messages, overridden in child classes
# These attributes map the DB engine spec to one or more SQLAlchemy dialects/drivers; # These attributes map the DB engine spec to one or more SQLAlchemy dialects/drivers; # noqa: E501
# see the ``supports_url`` and ``supports_backend`` methods below. # see the ``supports_url`` and ``supports_backend`` methods below.
engine = "base" # str as defined in sqlalchemy.engine.engine engine = "base" # str as defined in sqlalchemy.engine.engine
engine_aliases: set[str] = set() engine_aliases: set[str] = set()
@ -410,12 +410,12 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
# if True, database will be listed as option in the upload file form # if True, database will be listed as option in the upload file form
supports_file_upload = True supports_file_upload = True
# Is the DB engine spec able to change the default schema? This requires implementing # Is the DB engine spec able to change the default schema? This requires implementing # noqa: E501
# a custom `adjust_engine_params` method. # a custom `adjust_engine_params` method.
supports_dynamic_schema = False supports_dynamic_schema = False
# Does the DB support catalogs? A catalog here is a group of schemas, and has # Does the DB support catalogs? A catalog here is a group of schemas, and has
# different names depending on the DB: BigQuery calles it a "project", Postgres calls # different names depending on the DB: BigQuery calles it a "project", Postgres calls # noqa: E501
# it a "database", Trino calls it a "catalog", etc. # it a "database", Trino calls it a "catalog", etc.
# #
# When this is changed to true in a DB engine spec it MUST support the # When this is changed to true in a DB engine spec it MUST support the
@ -433,7 +433,7 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
oauth2_scope = "" oauth2_scope = ""
oauth2_authorization_request_uri: str | None = None # pylint: disable=invalid-name oauth2_authorization_request_uri: str | None = None # pylint: disable=invalid-name
oauth2_token_request_uri: str | None = None oauth2_token_request_uri: str | None = None
oauth2_token_request_type = "data" oauth2_token_request_type = "data" # noqa: S105
# Driver-specific exception that should be mapped to OAuth2RedirectError # Driver-specific exception that should be mapped to OAuth2RedirectError
oauth2_exception = OAuth2RedirectError oauth2_exception = OAuth2RedirectError
@ -690,7 +690,7 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
) -> str | None: ) -> str | None:
""" """
Return the schema configured in a SQLALchemy URI and connection arguments, if any. Return the schema configured in a SQLALchemy URI and connection arguments, if any.
""" """ # noqa: E501
return None return None
@classmethod @classmethod
@ -719,7 +719,7 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
Determining the correct schema is crucial for managing access to data, so please Determining the correct schema is crucial for managing access to data, so please
make sure you understand this logic when working on a new DB engine spec. make sure you understand this logic when working on a new DB engine spec.
""" """ # noqa: E501
# dynamic schema varies on a per-query basis # dynamic schema varies on a per-query basis
if cls.supports_dynamic_schema: if cls.supports_dynamic_schema:
return query.schema return query.schema
@ -808,7 +808,7 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
... connection = engine.connect() ... connection = engine.connect()
... connection.execute(sql) ... connection.execute(sql)
""" """ # noqa: E501
return database.get_sqla_engine(catalog=catalog, schema=schema, source=source) return database.get_sqla_engine(catalog=catalog, schema=schema, source=source)
@classmethod @classmethod
@ -1101,7 +1101,7 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
""" """
# old method that doesn't work with catalogs # old method that doesn't work with catalogs
if hasattr(cls, "extra_table_metadata"): if hasattr(cls, "extra_table_metadata"):
warnings.warn( warnings.warn( # noqa: B028
"The `extra_table_metadata` method is deprecated, please implement " "The `extra_table_metadata` method is deprecated, please implement "
"the `get_extra_table_metadata` method in the DB engine spec.", "the `get_extra_table_metadata` method in the DB engine spec.",
DeprecationWarning, DeprecationWarning,
@ -1145,7 +1145,7 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
return sql return sql
@classmethod @classmethod
def apply_top_to_sql(cls, sql: str, limit: int) -> str: def apply_top_to_sql(cls, sql: str, limit: int) -> str: # noqa: C901
""" """
Alters the SQL statement to apply a TOP clause Alters the SQL statement to apply a TOP clause
:param limit: Maximum number of rows to be returned by the query :param limit: Maximum number of rows to be returned by the query
@ -1419,7 +1419,7 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
that when catalog support is added to Superset the interface remains the same. that when catalog support is added to Superset the interface remains the same.
This is important because DB engine specs can be installed from 3rd party This is important because DB engine specs can be installed from 3rd party
packages, so we want to keep these methods as stable as possible. packages, so we want to keep these methods as stable as possible.
""" """ # noqa: E501
return uri, { return uri, {
**connect_args, **connect_args,
**cls.enforce_uri_query_params.get(uri.get_driver_name(), {}), **cls.enforce_uri_query_params.get(uri.get_driver_name(), {}),

View File

@ -285,7 +285,7 @@ class ClickHouseConnectEngineSpec(BasicParametersMixin, ClickHouseEngineSpec):
return cls._function_names return cls._function_names
try: try:
names = database.get_df( names = database.get_df(
"SELECT name FROM system.functions UNION ALL " "SELECT name FROM system.functions UNION ALL " # noqa: S608
+ "SELECT name FROM system.table_functions LIMIT 10000" + "SELECT name FROM system.table_functions LIMIT 10000"
)["name"].tolist() )["name"].tolist()
cls._function_names = names cls._function_names = names

View File

@ -177,7 +177,7 @@ class CouchbaseEngineSpec(BasicParametersMixin, BaseEngineSpec):
) -> list[SupersetError]: ) -> list[SupersetError]:
""" """
Couchbase local server needs hostname and port but on cloud we need only connection String along with credentials to connect. Couchbase local server needs hostname and port but on cloud we need only connection String along with credentials to connect.
""" """ # noqa: E501
errors: list[SupersetError] = [] errors: list[SupersetError] = []
required = {"host", "username", "password", "database"} required = {"host", "username", "password", "database"}
@ -197,7 +197,7 @@ class CouchbaseEngineSpec(BasicParametersMixin, BaseEngineSpec):
host = parameters.get("host", None) host = parameters.get("host", None)
if not host: if not host:
return errors return errors
# host can be a connection string in case of couchbase cloud. So Connection Check is not required in that case. # host can be a connection string in case of couchbase cloud. So Connection Check is not required in that case. # noqa: E501
if not is_hostname_valid(host): if not is_hostname_valid(host):
errors.append( errors.append(
SupersetError( SupersetError(

View File

@ -42,7 +42,7 @@ if TYPE_CHECKING:
COLUMN_DOES_NOT_EXIST_REGEX = re.compile("no such column: (?P<column_name>.+)") COLUMN_DOES_NOT_EXIST_REGEX = re.compile("no such column: (?P<column_name>.+)")
DEFAULT_ACCESS_TOKEN_URL = ( DEFAULT_ACCESS_TOKEN_URL = (
"https://app.motherduck.com/token-request?appName=Superset&close=y" "https://app.motherduck.com/token-request?appName=Superset&close=y" # noqa: S105
) )
@ -112,7 +112,7 @@ class DuckDBParametersMixin:
""" """
Build SQLAlchemy URI for connecting to a DuckDB database. Build SQLAlchemy URI for connecting to a DuckDB database.
If an access token is specified, return a URI to connect to a MotherDuck database. If an access token is specified, return a URI to connect to a MotherDuck database.
""" """ # noqa: E501
if parameters is None: if parameters is None:
parameters = {} parameters = {}
query = parameters.get("query", {}) query = parameters.get("query", {})

View File

@ -114,7 +114,7 @@ class GSheetsEngineSpec(ShillelaghEngineSpec):
oauth2_authorization_request_uri = ( # pylint: disable=invalid-name oauth2_authorization_request_uri = ( # pylint: disable=invalid-name
"https://accounts.google.com/o/oauth2/v2/auth" "https://accounts.google.com/o/oauth2/v2/auth"
) )
oauth2_token_request_uri = "https://oauth2.googleapis.com/token" oauth2_token_request_uri = "https://oauth2.googleapis.com/token" # noqa: S105
oauth2_exception = UnauthenticatedError oauth2_exception = UnauthenticatedError
@classmethod @classmethod
@ -271,7 +271,7 @@ class GSheetsEngineSpec(ShillelaghEngineSpec):
return errors return errors
try: try:
results = conn.execute(f'SELECT * FROM "{url}" LIMIT 1') results = conn.execute(f'SELECT * FROM "{url}" LIMIT 1') # noqa: S608
results.fetchall() results.fetchall()
except Exception: # pylint: disable=broad-except except Exception: # pylint: disable=broad-except
errors.append( errors.append(

View File

@ -110,12 +110,12 @@ class HiveEngineSpec(PrestoEngineSpec):
TimeGrain.MINUTE: "from_unixtime(unix_timestamp({col}), 'yyyy-MM-dd HH:mm:00')", TimeGrain.MINUTE: "from_unixtime(unix_timestamp({col}), 'yyyy-MM-dd HH:mm:00')",
TimeGrain.HOUR: "from_unixtime(unix_timestamp({col}), 'yyyy-MM-dd HH:00:00')", TimeGrain.HOUR: "from_unixtime(unix_timestamp({col}), 'yyyy-MM-dd HH:00:00')",
TimeGrain.DAY: "from_unixtime(unix_timestamp({col}), 'yyyy-MM-dd 00:00:00')", TimeGrain.DAY: "from_unixtime(unix_timestamp({col}), 'yyyy-MM-dd 00:00:00')",
TimeGrain.WEEK: "date_format(date_sub({col}, CAST(7-from_unixtime(unix_timestamp({col}),'u') as int)), 'yyyy-MM-dd 00:00:00')", TimeGrain.WEEK: "date_format(date_sub({col}, CAST(7-from_unixtime(unix_timestamp({col}),'u') as int)), 'yyyy-MM-dd 00:00:00')", # noqa: E501
TimeGrain.MONTH: "from_unixtime(unix_timestamp({col}), 'yyyy-MM-01 00:00:00')", TimeGrain.MONTH: "from_unixtime(unix_timestamp({col}), 'yyyy-MM-01 00:00:00')",
TimeGrain.QUARTER: "date_format(add_months(trunc({col}, 'MM'), -(month({col})-1)%3), 'yyyy-MM-dd 00:00:00')", TimeGrain.QUARTER: "date_format(add_months(trunc({col}, 'MM'), -(month({col})-1)%3), 'yyyy-MM-dd 00:00:00')", # noqa: E501
TimeGrain.YEAR: "from_unixtime(unix_timestamp({col}), 'yyyy-01-01 00:00:00')", TimeGrain.YEAR: "from_unixtime(unix_timestamp({col}), 'yyyy-01-01 00:00:00')",
TimeGrain.WEEK_ENDING_SATURDAY: "date_format(date_add({col}, INT(6-from_unixtime(unix_timestamp({col}), 'u'))), 'yyyy-MM-dd 00:00:00')", TimeGrain.WEEK_ENDING_SATURDAY: "date_format(date_add({col}, INT(6-from_unixtime(unix_timestamp({col}), 'u'))), 'yyyy-MM-dd 00:00:00')", # noqa: E501
TimeGrain.WEEK_STARTING_SUNDAY: "date_format(date_add({col}, -INT(from_unixtime(unix_timestamp({col}), 'u'))), 'yyyy-MM-dd 00:00:00')", TimeGrain.WEEK_STARTING_SUNDAY: "date_format(date_add({col}, -INT(from_unixtime(unix_timestamp({col}), 'u'))), 'yyyy-MM-dd 00:00:00')", # noqa: E501
} }
# Scoping regex at class level to avoid recompiling # Scoping regex at class level to avoid recompiling
@ -338,7 +338,7 @@ class HiveEngineSpec(PrestoEngineSpec):
return None return None
@classmethod @classmethod
def handle_cursor( # pylint: disable=too-many-locals def handle_cursor( # pylint: disable=too-many-locals # noqa: C901
cls, cursor: Any, query: Query cls, cursor: Any, query: Query
) -> None: ) -> None:
"""Updates progress information""" """Updates progress information"""
@ -404,7 +404,7 @@ class HiveEngineSpec(PrestoEngineSpec):
db.session.commit() # pylint: disable=consider-using-transaction db.session.commit() # pylint: disable=consider-using-transaction
if sleep_interval := current_app.config.get("HIVE_POLL_INTERVAL"): if sleep_interval := current_app.config.get("HIVE_POLL_INTERVAL"):
logger.warning( logger.warning(
"HIVE_POLL_INTERVAL is deprecated and will be removed in 3.0. Please use DB_POLL_INTERVAL_SECONDS instead" "HIVE_POLL_INTERVAL is deprecated and will be removed in 3.0. Please use DB_POLL_INTERVAL_SECONDS instead" # noqa: E501
) )
else: else:
sleep_interval = current_app.config["DB_POLL_INTERVAL_SECONDS"].get( sleep_interval = current_app.config["DB_POLL_INTERVAL_SECONDS"].get(
@ -553,7 +553,7 @@ class HiveEngineSpec(PrestoEngineSpec):
# Must be Hive connection, enable impersonation, and set optional param # Must be Hive connection, enable impersonation, and set optional param
# auth=LDAP|KERBEROS # auth=LDAP|KERBEROS
# this will set hive.server2.proxy.user=$effective_username on connect_args['configuration'] # this will set hive.server2.proxy.user=$effective_username on connect_args['configuration'] # noqa: E501
if backend_name == "hive" and username is not None: if backend_name == "hive" and username is not None:
configuration = connect_args.get("configuration", {}) configuration = connect_args.get("configuration", {})
configuration["hive.server2.proxy.user"] = username configuration["hive.server2.proxy.user"] = username

View File

@ -124,7 +124,7 @@ class ImpalaEngineSpec(BaseEngineSpec):
while status in unfinished_states: while status in unfinished_states:
db.session.refresh(query) db.session.refresh(query)
query = db.session.query(Query).filter_by(id=query_id).one() query = db.session.query(Query).filter_by(id=query_id).one()
# if query cancelation was requested prior to the handle_cursor call, but # if query cancelation was requested prior to the handle_cursor call, but # noqa: E501
# the query was still executed # the query was still executed
# modified in stop_query in views / core.py is reflected here. # modified in stop_query in views / core.py is reflected here.
# stop query # stop query

View File

@ -46,7 +46,7 @@ class KustoSqlEngineSpec(BaseEngineSpec): # pylint: disable=abstract-method
TimeGrain.SECOND: "DATEADD(second, \ TimeGrain.SECOND: "DATEADD(second, \
'DATEDIFF(second, 2000-01-01', {col}), '2000-01-01')", 'DATEDIFF(second, 2000-01-01', {col}), '2000-01-01')",
TimeGrain.MINUTE: "DATEADD(minute, DATEDIFF(minute, 0, {col}), 0)", TimeGrain.MINUTE: "DATEADD(minute, DATEDIFF(minute, 0, {col}), 0)",
TimeGrain.FIVE_MINUTES: "DATEADD(minute, DATEDIFF(minute, 0, {col}) / 5 * 5, 0)", TimeGrain.FIVE_MINUTES: "DATEADD(minute, DATEDIFF(minute, 0, {col}) / 5 * 5, 0)", # noqa: E501
TimeGrain.TEN_MINUTES: "DATEADD(minute, \ TimeGrain.TEN_MINUTES: "DATEADD(minute, \
DATEDIFF(minute, 0, {col}) / 10 * 10, 0)", DATEDIFF(minute, 0, {col}) / 10 * 10, 0)",
TimeGrain.FIFTEEN_MINUTES: "DATEADD(minute, \ TimeGrain.FIFTEEN_MINUTES: "DATEADD(minute, \

View File

@ -31,8 +31,8 @@ class KylinEngineSpec(BaseEngineSpec): # pylint: disable=abstract-method
_time_grain_expressions = { _time_grain_expressions = {
None: "{col}", None: "{col}",
TimeGrain.SECOND: "CAST(FLOOR(CAST({col} AS TIMESTAMP) TO SECOND) AS TIMESTAMP)", TimeGrain.SECOND: "CAST(FLOOR(CAST({col} AS TIMESTAMP) TO SECOND) AS TIMESTAMP)", # noqa: E501
TimeGrain.MINUTE: "CAST(FLOOR(CAST({col} AS TIMESTAMP) TO MINUTE) AS TIMESTAMP)", TimeGrain.MINUTE: "CAST(FLOOR(CAST({col} AS TIMESTAMP) TO MINUTE) AS TIMESTAMP)", # noqa: E501
TimeGrain.HOUR: "CAST(FLOOR(CAST({col} AS TIMESTAMP) TO HOUR) AS TIMESTAMP)", TimeGrain.HOUR: "CAST(FLOOR(CAST({col} AS TIMESTAMP) TO HOUR) AS TIMESTAMP)",
TimeGrain.DAY: "CAST(FLOOR(CAST({col} AS TIMESTAMP) TO DAY) AS DATE)", TimeGrain.DAY: "CAST(FLOOR(CAST({col} AS TIMESTAMP) TO DAY) AS DATE)",
TimeGrain.WEEK: "CAST(FLOOR(CAST({col} AS TIMESTAMP) TO WEEK) AS DATE)", TimeGrain.WEEK: "CAST(FLOOR(CAST({col} AS TIMESTAMP) TO WEEK) AS DATE)",

View File

@ -222,7 +222,7 @@ def generate_table() -> list[list[Any]]:
rows = [] # pylint: disable=redefined-outer-name rows = [] # pylint: disable=redefined-outer-name
rows.append(["Feature"] + list(info)) # header row rows.append(["Feature"] + list(info)) # header row
rows.append(["Module"] + list(db_info["module"] for db_info in info.values())) rows.append(["Module"] + list(db_info["module"] for db_info in info.values())) # noqa: C400
# descriptive # descriptive
keys = [ keys = [
@ -243,14 +243,14 @@ def generate_table() -> list[list[Any]]:
] ]
for key in keys: for key in keys:
rows.append( rows.append(
[DATABASE_DETAILS[key]] + list(db_info[key] for db_info in info.values()) [DATABASE_DETAILS[key]] + list(db_info[key] for db_info in info.values()) # noqa: C400
) )
# basic # basic
for time_grain in TimeGrain: for time_grain in TimeGrain:
rows.append( rows.append(
[f"Has time grain {time_grain.name}"] [f"Has time grain {time_grain.name}"]
+ list(db_info["time_grains"][time_grain.name] for db_info in info.values()) + list(db_info["time_grains"][time_grain.name] for db_info in info.values()) # noqa: C400
) )
keys = [ keys = [
"masked_encrypted_extra", "masked_encrypted_extra",
@ -259,7 +259,7 @@ def generate_table() -> list[list[Any]]:
] ]
for key in keys: for key in keys:
rows.append( rows.append(
[BASIC_FEATURES[key]] + list(db_info[key] for db_info in info.values()) [BASIC_FEATURES[key]] + list(db_info[key] for db_info in info.values()) # noqa: C400
) )
# nice to have # nice to have
@ -280,7 +280,7 @@ def generate_table() -> list[list[Any]]:
for key in keys: for key in keys:
rows.append( rows.append(
[NICE_TO_HAVE_FEATURES[key]] [NICE_TO_HAVE_FEATURES[key]]
+ list(db_info[key] for db_info in info.values()) + list(db_info[key] for db_info in info.values()) # noqa: C400
) )
# advanced # advanced
@ -291,10 +291,10 @@ def generate_table() -> list[list[Any]]:
] ]
for key in keys: for key in keys:
rows.append( rows.append(
[ADVANCED_FEATURES[key]] + list(db_info[key] for db_info in info.values()) [ADVANCED_FEATURES[key]] + list(db_info[key] for db_info in info.values()) # noqa: C400
) )
rows.append(["Score"] + list(db_info["score"] for db_info in info.values())) rows.append(["Score"] + list(db_info["score"] for db_info in info.values())) # noqa: C400
return rows return rows

View File

@ -160,7 +160,7 @@ class MssqlEngineSpec(BaseEngineSpec):
def extract_error_message(cls, ex: Exception) -> str: def extract_error_message(cls, ex: Exception) -> str:
if str(ex).startswith("(8155,"): if str(ex).startswith("(8155,"):
return ( return (
f"{cls.engine} error: All your SQL functions need to " f"{cls.engine} error: All your SQL functions need to " # noqa: S608
"have an alias on MSSQL. For example: SELECT COUNT(*) AS C1 FROM TABLE1" "have an alias on MSSQL. For example: SELECT COUNT(*) AS C1 FROM TABLE1"
) )
return f"{cls.engine} error: {cls._extract_error_message(ex)}" return f"{cls.engine} error: {cls._extract_error_message(ex)}"

View File

@ -214,7 +214,7 @@ def _find_columns_to_sanitize(cursor: Any) -> list[PlacedSanitizeFunc]:
:param cursor: the result set cursor :param cursor: the result set cursor
:returns: the list of tuples consisting of the column index and sanitization function :returns: the list of tuples consisting of the column index and sanitization function
""" """ # noqa: E501
return [ return [
PlacedSanitizeFunc(i, _sanitized_ocient_type_codes[cursor.description[i][1]]) PlacedSanitizeFunc(i, _sanitized_ocient_type_codes[cursor.description[i][1]])
for i in range(len(cursor.description)) for i in range(len(cursor.description))
@ -317,9 +317,7 @@ class OcientEngineSpec(BaseEngineSpec):
rows: list[tuple[Any, ...]] = super().fetch_data(cursor, limit) rows: list[tuple[Any, ...]] = super().fetch_data(cursor, limit)
except Exception: except Exception:
with OcientEngineSpec.query_id_mapping_lock: with OcientEngineSpec.query_id_mapping_lock:
del OcientEngineSpec.query_id_mapping[ del OcientEngineSpec.query_id_mapping[cursor.superset_query_id]
getattr(cursor, "superset_query_id")
]
raise raise
# TODO: Unsure if we need to verify that we are receiving rows: # TODO: Unsure if we need to verify that we are receiving rows:
@ -376,7 +374,7 @@ class OcientEngineSpec(BaseEngineSpec):
OcientEngineSpec.query_id_mapping[query.id] = cursor.query_id OcientEngineSpec.query_id_mapping[query.id] = cursor.query_id
# Add the query id to the cursor # Add the query id to the cursor
setattr(cursor, "superset_query_id", query.id) cursor.superset_query_id = query.id
return super().handle_cursor(cursor, query) return super().handle_cursor(cursor, query)
@classmethod @classmethod

View File

@ -44,14 +44,14 @@ class PinotEngineSpec(BaseEngineSpec):
+ "CAST({col} AS TIMESTAMP)), 900000) AS TIMESTAMP)", + "CAST({col} AS TIMESTAMP)), 900000) AS TIMESTAMP)",
TimeGrain.THIRTY_MINUTES: "CAST(ROUND(DATE_TRUNC('minute', " TimeGrain.THIRTY_MINUTES: "CAST(ROUND(DATE_TRUNC('minute', "
+ "CAST({col} AS TIMESTAMP)), 1800000) AS TIMESTAMP)", + "CAST({col} AS TIMESTAMP)), 1800000) AS TIMESTAMP)",
TimeGrain.HOUR: "CAST(DATE_TRUNC('hour', CAST({col} AS TIMESTAMP)) AS TIMESTAMP)", TimeGrain.HOUR: "CAST(DATE_TRUNC('hour', CAST({col} AS TIMESTAMP)) AS TIMESTAMP)", # noqa: E501
TimeGrain.DAY: "CAST(DATE_TRUNC('day', CAST({col} AS TIMESTAMP)) AS TIMESTAMP)", TimeGrain.DAY: "CAST(DATE_TRUNC('day', CAST({col} AS TIMESTAMP)) AS TIMESTAMP)",
TimeGrain.WEEK: "CAST(DATE_TRUNC('week', CAST({col} AS TIMESTAMP)) AS TIMESTAMP)", TimeGrain.WEEK: "CAST(DATE_TRUNC('week', CAST({col} AS TIMESTAMP)) AS TIMESTAMP)", # noqa: E501
TimeGrain.MONTH: "CAST(DATE_TRUNC('month', " TimeGrain.MONTH: "CAST(DATE_TRUNC('month', "
+ "CAST({col} AS TIMESTAMP)) AS TIMESTAMP)", + "CAST({col} AS TIMESTAMP)) AS TIMESTAMP)",
TimeGrain.QUARTER: "CAST(DATE_TRUNC('quarter', " TimeGrain.QUARTER: "CAST(DATE_TRUNC('quarter', "
+ "CAST({col} AS TIMESTAMP)) AS TIMESTAMP)", + "CAST({col} AS TIMESTAMP)) AS TIMESTAMP)",
TimeGrain.YEAR: "CAST(DATE_TRUNC('year', CAST({col} AS TIMESTAMP)) AS TIMESTAMP)", TimeGrain.YEAR: "CAST(DATE_TRUNC('year', CAST({col} AS TIMESTAMP)) AS TIMESTAMP)", # noqa: E501
} }
@classmethod @classmethod

View File

@ -103,13 +103,13 @@ class PostgresBaseEngineSpec(BaseEngineSpec):
_time_grain_expressions = { _time_grain_expressions = {
None: "{col}", None: "{col}",
TimeGrain.SECOND: "DATE_TRUNC('second', {col})", TimeGrain.SECOND: "DATE_TRUNC('second', {col})",
TimeGrain.FIVE_SECONDS: "DATE_TRUNC('minute', {col}) + INTERVAL '5 seconds' * FLOOR(EXTRACT(SECOND FROM {col}) / 5)", TimeGrain.FIVE_SECONDS: "DATE_TRUNC('minute', {col}) + INTERVAL '5 seconds' * FLOOR(EXTRACT(SECOND FROM {col}) / 5)", # noqa: E501
TimeGrain.THIRTY_SECONDS: "DATE_TRUNC('minute', {col}) + INTERVAL '30 seconds' * FLOOR(EXTRACT(SECOND FROM {col}) / 30)", TimeGrain.THIRTY_SECONDS: "DATE_TRUNC('minute', {col}) + INTERVAL '30 seconds' * FLOOR(EXTRACT(SECOND FROM {col}) / 30)", # noqa: E501
TimeGrain.MINUTE: "DATE_TRUNC('minute', {col})", TimeGrain.MINUTE: "DATE_TRUNC('minute', {col})",
TimeGrain.FIVE_MINUTES: "DATE_TRUNC('hour', {col}) + INTERVAL '5 minutes' * FLOOR(EXTRACT(MINUTE FROM {col}) / 5)", TimeGrain.FIVE_MINUTES: "DATE_TRUNC('hour', {col}) + INTERVAL '5 minutes' * FLOOR(EXTRACT(MINUTE FROM {col}) / 5)", # noqa: E501
TimeGrain.TEN_MINUTES: "DATE_TRUNC('hour', {col}) + INTERVAL '10 minutes' * FLOOR(EXTRACT(MINUTE FROM {col}) / 10)", TimeGrain.TEN_MINUTES: "DATE_TRUNC('hour', {col}) + INTERVAL '10 minutes' * FLOOR(EXTRACT(MINUTE FROM {col}) / 10)", # noqa: E501
TimeGrain.FIFTEEN_MINUTES: "DATE_TRUNC('hour', {col}) + INTERVAL '15 minutes' * FLOOR(EXTRACT(MINUTE FROM {col}) / 15)", TimeGrain.FIFTEEN_MINUTES: "DATE_TRUNC('hour', {col}) + INTERVAL '15 minutes' * FLOOR(EXTRACT(MINUTE FROM {col}) / 15)", # noqa: E501
TimeGrain.THIRTY_MINUTES: "DATE_TRUNC('hour', {col}) + INTERVAL '30 minutes' * FLOOR(EXTRACT(MINUTE FROM {col}) / 30)", TimeGrain.THIRTY_MINUTES: "DATE_TRUNC('hour', {col}) + INTERVAL '30 minutes' * FLOOR(EXTRACT(MINUTE FROM {col}) / 30)", # noqa: E501
TimeGrain.HOUR: "DATE_TRUNC('hour', {col})", TimeGrain.HOUR: "DATE_TRUNC('hour', {col})",
TimeGrain.DAY: "DATE_TRUNC('day', {col})", TimeGrain.DAY: "DATE_TRUNC('day', {col})",
TimeGrain.WEEK: "DATE_TRUNC('week', {col})", TimeGrain.WEEK: "DATE_TRUNC('week', {col})",
@ -294,7 +294,7 @@ class PostgresEngineSpec(BasicParametersMixin, PostgresBaseEngineSpec):
SupersetError( SupersetError(
error_type=SupersetErrorType.QUERY_SECURITY_ACCESS_ERROR, error_type=SupersetErrorType.QUERY_SECURITY_ACCESS_ERROR,
message=__( message=__(
"Users are not allowed to set a search path for security reasons." "Users are not allowed to set a search path for security reasons." # noqa: E501
), ),
level=ErrorLevel.ERROR, level=ErrorLevel.ERROR,
) )
@ -471,7 +471,7 @@ WHERE datistemplate = false;
""" """
try: try:
cursor.execute( cursor.execute(
"SELECT pg_terminate_backend(pid) " "SELECT pg_terminate_backend(pid) " # noqa: S608
"FROM pg_stat_activity " "FROM pg_stat_activity "
f"WHERE pid='{cancel_query_id}'" f"WHERE pid='{cancel_query_id}'"
) )

View File

@ -111,7 +111,7 @@ def get_children(column: ResultSetColumnType) -> list[ResultSetColumnType]:
:param column: dictionary representing a Presto column :param column: dictionary representing a Presto column
:return: list of dictionaries representing children columns :return: list of dictionaries representing children columns
""" """ # noqa: E501
pattern = re.compile(r"(?P<type>\w+)\((?P<children>.*)\)") pattern = re.compile(r"(?P<type>\w+)\((?P<children>.*)\)")
if not column["type"]: if not column["type"]:
raise ValueError raise ValueError
@ -256,15 +256,15 @@ class PrestoBaseEngineSpec(BaseEngineSpec, metaclass=ABCMeta):
_time_grain_expressions = { _time_grain_expressions = {
None: "{col}", None: "{col}",
TimeGrain.SECOND: "date_trunc('second', CAST({col} AS TIMESTAMP))", TimeGrain.SECOND: "date_trunc('second', CAST({col} AS TIMESTAMP))",
TimeGrain.FIVE_SECONDS: "date_trunc('second', CAST({col} AS TIMESTAMP)) - interval '1' second * (second(CAST({col} AS TIMESTAMP)) % 5)", TimeGrain.FIVE_SECONDS: "date_trunc('second', CAST({col} AS TIMESTAMP)) - interval '1' second * (second(CAST({col} AS TIMESTAMP)) % 5)", # noqa: E501
TimeGrain.THIRTY_SECONDS: "date_trunc('second', CAST({col} AS TIMESTAMP)) - interval '1' second * (second(CAST({col} AS TIMESTAMP)) % 30)", TimeGrain.THIRTY_SECONDS: "date_trunc('second', CAST({col} AS TIMESTAMP)) - interval '1' second * (second(CAST({col} AS TIMESTAMP)) % 30)", # noqa: E501
TimeGrain.MINUTE: "date_trunc('minute', CAST({col} AS TIMESTAMP))", TimeGrain.MINUTE: "date_trunc('minute', CAST({col} AS TIMESTAMP))",
TimeGrain.FIVE_MINUTES: "date_trunc('minute', CAST({col} AS TIMESTAMP)) - interval '1' minute * (minute(CAST({col} AS TIMESTAMP)) % 5)", TimeGrain.FIVE_MINUTES: "date_trunc('minute', CAST({col} AS TIMESTAMP)) - interval '1' minute * (minute(CAST({col} AS TIMESTAMP)) % 5)", # noqa: E501
TimeGrain.TEN_MINUTES: "date_trunc('minute', CAST({col} AS TIMESTAMP)) - interval '1' minute * (minute(CAST({col} AS TIMESTAMP)) % 10)", TimeGrain.TEN_MINUTES: "date_trunc('minute', CAST({col} AS TIMESTAMP)) - interval '1' minute * (minute(CAST({col} AS TIMESTAMP)) % 10)", # noqa: E501
TimeGrain.FIFTEEN_MINUTES: "date_trunc('minute', CAST({col} AS TIMESTAMP)) - interval '1' minute * (minute(CAST({col} AS TIMESTAMP)) % 15)", TimeGrain.FIFTEEN_MINUTES: "date_trunc('minute', CAST({col} AS TIMESTAMP)) - interval '1' minute * (minute(CAST({col} AS TIMESTAMP)) % 15)", # noqa: E501
TimeGrain.HALF_HOUR: "date_trunc('minute', CAST({col} AS TIMESTAMP)) - interval '1' minute * (minute(CAST({col} AS TIMESTAMP)) % 30)", TimeGrain.HALF_HOUR: "date_trunc('minute', CAST({col} AS TIMESTAMP)) - interval '1' minute * (minute(CAST({col} AS TIMESTAMP)) % 30)", # noqa: E501
TimeGrain.HOUR: "date_trunc('hour', CAST({col} AS TIMESTAMP))", TimeGrain.HOUR: "date_trunc('hour', CAST({col} AS TIMESTAMP))",
TimeGrain.SIX_HOURS: "date_trunc('hour', CAST({col} AS TIMESTAMP)) - interval '1' hour * (hour(CAST({col} AS TIMESTAMP)) % 6)", TimeGrain.SIX_HOURS: "date_trunc('hour', CAST({col} AS TIMESTAMP)) - interval '1' hour * (hour(CAST({col} AS TIMESTAMP)) % 6)", # noqa: E501
TimeGrain.DAY: "date_trunc('day', CAST({col} AS TIMESTAMP))", TimeGrain.DAY: "date_trunc('day', CAST({col} AS TIMESTAMP))",
TimeGrain.WEEK: "date_trunc('week', CAST({col} AS TIMESTAMP))", TimeGrain.WEEK: "date_trunc('week', CAST({col} AS TIMESTAMP))",
TimeGrain.MONTH: "date_trunc('month', CAST({col} AS TIMESTAMP))", TimeGrain.MONTH: "date_trunc('month', CAST({col} AS TIMESTAMP))",
@ -512,7 +512,7 @@ class PrestoBaseEngineSpec(BaseEngineSpec, metaclass=ABCMeta):
if table.schema if table.schema
else system_table_name else system_table_name
) )
partition_select_clause = f"SELECT * FROM {full_table_name}" partition_select_clause = f"SELECT * FROM {full_table_name}" # noqa: S608
sql = dedent( sql = dedent(
f"""\ f"""\
@ -768,7 +768,7 @@ class PrestoBaseEngineSpec(BaseEngineSpec, metaclass=ABCMeta):
return result return result
@classmethod @classmethod
def _parse_structural_column( # pylint: disable=too-many-locals def _parse_structural_column( # pylint: disable=too-many-locals # noqa: C901
cls, cls,
parent_column_name: str, parent_column_name: str,
parent_data_type: str, parent_data_type: str,
@ -1139,7 +1139,7 @@ class PrestoEngineSpec(PrestoBaseEngineSpec):
) )
@classmethod @classmethod
def expand_data( # pylint: disable=too-many-locals def expand_data( # pylint: disable=too-many-locals # noqa: C901
cls, columns: list[ResultSetColumnType], data: list[dict[Any, Any]] cls, columns: list[ResultSetColumnType], data: list[dict[Any, Any]]
) -> tuple[ ) -> tuple[
list[ResultSetColumnType], list[dict[Any, Any]], list[ResultSetColumnType] list[ResultSetColumnType], list[dict[Any, Any]], list[ResultSetColumnType]

View File

@ -176,7 +176,7 @@ class RedshiftEngineSpec(BasicParametersMixin, PostgresBaseEngineSpec):
try: try:
logger.info("Killing Redshift PID:%s", str(cancel_query_id)) logger.info("Killing Redshift PID:%s", str(cancel_query_id))
cursor.execute( cursor.execute(
"SELECT pg_cancel_backend(procpid) " "SELECT pg_cancel_backend(procpid) " # noqa: S608
"FROM pg_stat_activity " "FROM pg_stat_activity "
f"WHERE procpid='{cancel_query_id}'" f"WHERE procpid='{cancel_query_id}'"
) )

View File

@ -79,7 +79,7 @@ class TrinoEngineSpec(PrestoBaseEngineSpec):
# OAuth 2.0 # OAuth 2.0
supports_oauth2 = True supports_oauth2 = True
oauth2_exception = TrinoAuthError oauth2_exception = TrinoAuthError
oauth2_token_request_type = "data" oauth2_token_request_type = "data" # noqa: S105
@classmethod @classmethod
def get_extra_table_metadata( def get_extra_table_metadata(
@ -101,7 +101,7 @@ class TrinoEngineSpec(PrestoBaseEngineSpec):
latest_parts = tuple([None] * len(col_names)) latest_parts = tuple([None] * len(col_names))
metadata["partitions"] = { metadata["partitions"] = {
"cols": sorted( "cols": sorted( # noqa: C414
list( list(
{ {
column_name column_name

View File

@ -53,15 +53,15 @@ class YDBEngineSpec(BaseEngineSpec):
_time_grain_expressions = { _time_grain_expressions = {
None: "{col}", None: "{col}",
TimeGrain.SECOND: "DateTime::MakeDatetime(DateTime::StartOf({col}, Interval('PT1S')))", TimeGrain.SECOND: "DateTime::MakeDatetime(DateTime::StartOf({col}, Interval('PT1S')))", # noqa: E501
TimeGrain.THIRTY_SECONDS: "DateTime::MakeDatetime(DateTime::StartOf({col}, Interval('PT30S')))", TimeGrain.THIRTY_SECONDS: "DateTime::MakeDatetime(DateTime::StartOf({col}, Interval('PT30S')))", # noqa: E501
TimeGrain.MINUTE: "DateTime::MakeDatetime(DateTime::StartOf({col}, Interval('PT1M')))", TimeGrain.MINUTE: "DateTime::MakeDatetime(DateTime::StartOf({col}, Interval('PT1M')))", # noqa: E501
TimeGrain.FIVE_MINUTES: "DateTime::MakeDatetime(DateTime::StartOf({col}, Interval('PT5M')))", TimeGrain.FIVE_MINUTES: "DateTime::MakeDatetime(DateTime::StartOf({col}, Interval('PT5M')))", # noqa: E501
TimeGrain.TEN_MINUTES: "DateTime::MakeDatetime(DateTime::StartOf({col}, Interval('PT10M')))", TimeGrain.TEN_MINUTES: "DateTime::MakeDatetime(DateTime::StartOf({col}, Interval('PT10M')))", # noqa: E501
TimeGrain.FIFTEEN_MINUTES: "DateTime::MakeDatetime(DateTime::StartOf({col}, Interval('PT15M')))", TimeGrain.FIFTEEN_MINUTES: "DateTime::MakeDatetime(DateTime::StartOf({col}, Interval('PT15M')))", # noqa: E501
TimeGrain.THIRTY_MINUTES: "DateTime::MakeDatetime(DateTime::StartOf({col}, Interval('PT30M')))", TimeGrain.THIRTY_MINUTES: "DateTime::MakeDatetime(DateTime::StartOf({col}, Interval('PT30M')))", # noqa: E501
TimeGrain.HOUR: "DateTime::MakeDatetime(DateTime::StartOf({col}, Interval('PT1H')))", TimeGrain.HOUR: "DateTime::MakeDatetime(DateTime::StartOf({col}, Interval('PT1H')))", # noqa: E501
TimeGrain.DAY: "DateTime::MakeDatetime(DateTime::StartOf({col}, Interval('P1D')))", TimeGrain.DAY: "DateTime::MakeDatetime(DateTime::StartOf({col}, Interval('P1D')))", # noqa: E501
TimeGrain.WEEK: "DateTime::MakeDatetime(DateTime::StartOfWeek({col}))", TimeGrain.WEEK: "DateTime::MakeDatetime(DateTime::StartOfWeek({col}))",
TimeGrain.MONTH: "DateTime::MakeDatetime(DateTime::StartOfMonth({col}))", TimeGrain.MONTH: "DateTime::MakeDatetime(DateTime::StartOfMonth({col}))",
TimeGrain.QUARTER: "DateTime::MakeDatetime(DateTime::StartOfQuarter({col}))", TimeGrain.QUARTER: "DateTime::MakeDatetime(DateTime::StartOfQuarter({col}))",
@ -79,9 +79,9 @@ class YDBEngineSpec(BaseEngineSpec):
sqla_type = cls.get_sqla_column_type(target_type) sqla_type = cls.get_sqla_column_type(target_type)
if isinstance(sqla_type, types.Date): if isinstance(sqla_type, types.Date):
return f"DateTime::MakeDate(DateTime::ParseIso8601('{dttm.date().isoformat()}'))" return f"DateTime::MakeDate(DateTime::ParseIso8601('{dttm.date().isoformat()}'))" # noqa: E501
if isinstance(sqla_type, types.DateTime): if isinstance(sqla_type, types.DateTime):
return f"""DateTime::MakeDatetime(DateTime::ParseIso8601('{dttm.isoformat(sep="T", timespec="seconds")}'))""" return f"""DateTime::MakeDatetime(DateTime::ParseIso8601('{dttm.isoformat(sep="T", timespec="seconds")}'))""" # noqa: E501
return None return None
@staticmethod @staticmethod

View File

@ -36,7 +36,7 @@ RESOURCE = KeyValueResource.LOCK
@contextmanager @contextmanager
def KeyValueDistributedLock( # pylint: disable=invalid-name def KeyValueDistributedLock( # pylint: disable=invalid-name # noqa: N802
namespace: str, namespace: str,
**kwargs: Any, **kwargs: Any,
) -> Iterator[uuid.UUID]: ) -> Iterator[uuid.UUID]:

View File

@ -27,7 +27,7 @@ class SupersetErrorType(StrEnum):
Types of errors that can exist within Superset. Types of errors that can exist within Superset.
Keep in sync with superset-frontend/packages/superset-ui-core/src/query/types/Query.ts Keep in sync with superset-frontend/packages/superset-ui-core/src/query/types/Query.ts
""" """ # noqa: E501
# Frontend errors # Frontend errors
FRONTEND_CSRF_ERROR = "FRONTEND_CSRF_ERROR" FRONTEND_CSRF_ERROR = "FRONTEND_CSRF_ERROR"
@ -40,7 +40,7 @@ class SupersetErrorType(StrEnum):
TABLE_DOES_NOT_EXIST_ERROR = "TABLE_DOES_NOT_EXIST_ERROR" TABLE_DOES_NOT_EXIST_ERROR = "TABLE_DOES_NOT_EXIST_ERROR"
SCHEMA_DOES_NOT_EXIST_ERROR = "SCHEMA_DOES_NOT_EXIST_ERROR" SCHEMA_DOES_NOT_EXIST_ERROR = "SCHEMA_DOES_NOT_EXIST_ERROR"
CONNECTION_INVALID_USERNAME_ERROR = "CONNECTION_INVALID_USERNAME_ERROR" CONNECTION_INVALID_USERNAME_ERROR = "CONNECTION_INVALID_USERNAME_ERROR"
CONNECTION_INVALID_PASSWORD_ERROR = "CONNECTION_INVALID_PASSWORD_ERROR" CONNECTION_INVALID_PASSWORD_ERROR = "CONNECTION_INVALID_PASSWORD_ERROR" # noqa: S105
CONNECTION_INVALID_HOSTNAME_ERROR = "CONNECTION_INVALID_HOSTNAME_ERROR" CONNECTION_INVALID_HOSTNAME_ERROR = "CONNECTION_INVALID_HOSTNAME_ERROR"
CONNECTION_PORT_CLOSED_ERROR = "CONNECTION_PORT_CLOSED_ERROR" CONNECTION_PORT_CLOSED_ERROR = "CONNECTION_PORT_CLOSED_ERROR"
CONNECTION_INVALID_PORT_ERROR = "CONNECTION_INVALID_PORT_ERROR" CONNECTION_INVALID_PORT_ERROR = "CONNECTION_INVALID_PORT_ERROR"
@ -201,7 +201,7 @@ class ErrorLevel(StrEnum):
Levels of errors that can exist within Superset. Levels of errors that can exist within Superset.
Keep in sync with superset-frontend/packages/superset-ui-core/src/query/types/Query.ts Keep in sync with superset-frontend/packages/superset-ui-core/src/query/types/Query.ts
""" """ # noqa: E501
INFO = "info" INFO = "info"
WARNING = "warning" WARNING = "warning"

View File

@ -71,5 +71,5 @@ def load_big_data() -> None:
add_data(columns=columns, num_rows=10, table_name=f"small_table_{i}") add_data(columns=columns, num_rows=10, table_name=f"small_table_{i}")
print("Creating table with long name") print("Creating table with long name")
name = "".join(random.choices(string.ascii_letters + string.digits, k=60)) name = "".join(random.choices(string.ascii_letters + string.digits, k=60)) # noqa: S311
add_data(columns=columns, num_rows=10, table_name=name) add_data(columns=columns, num_rows=10, table_name=name)

View File

@ -851,7 +851,7 @@ def create_dashboard(slices: list[Slice]) -> Dashboard:
"type": "ROW" "type": "ROW"
} }
} }
""" """ # noqa: E501
) )
) )
# pylint: enable=line-too-long # pylint: enable=line-too-long

View File

@ -54,8 +54,8 @@ def load_long_lat_data(only_metadata: bool = False, force: bool = False) -> None
start + datetime.timedelta(hours=i * 24 / (len(pdf) - 1)) start + datetime.timedelta(hours=i * 24 / (len(pdf) - 1))
for i in range(len(pdf)) for i in range(len(pdf))
] ]
pdf["occupancy"] = [random.randint(1, 6) for _ in range(len(pdf))] pdf["occupancy"] = [random.randint(1, 6) for _ in range(len(pdf))] # noqa: S311
pdf["radius_miles"] = [random.uniform(1, 3) for _ in range(len(pdf))] pdf["radius_miles"] = [random.uniform(1, 3) for _ in range(len(pdf))] # noqa: S311
pdf["geohash"] = pdf[["LAT", "LON"]].apply( pdf["geohash"] = pdf[["LAT", "LON"]].apply(
lambda x: geohash.encode(*x), axis=1 lambda x: geohash.encode(*x), axis=1
) )

View File

@ -88,7 +88,7 @@ def load_configs_from_directory(
# removing "type" from the metadata allows us to import any exported model # removing "type" from the metadata allows us to import any exported model
# from the unzipped directory directly # from the unzipped directory directly
metadata = yaml.load(contents.get(METADATA_FILE_NAME, "{}"), Loader=yaml.Loader) metadata = yaml.load(contents.get(METADATA_FILE_NAME, "{}"), Loader=yaml.Loader) # noqa: S506
if "type" in metadata: if "type" in metadata:
del metadata["type"] del metadata["type"]
contents[METADATA_FILE_NAME] = yaml.dump(metadata) contents[METADATA_FILE_NAME] = yaml.dump(metadata)

View File

@ -26,7 +26,7 @@ from marshmallow import ValidationError
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
class SupersetException(Exception): class SupersetException(Exception): # noqa: N818
status = 500 status = 500
message = "" message = ""
@ -396,13 +396,13 @@ class DisallowedSQLFunction(SupersetErrorException):
) )
class CreateKeyValueDistributedLockFailedException(Exception): class CreateKeyValueDistributedLockFailedException(Exception): # noqa: N818
""" """
Exception to signalize failure to acquire lock. Exception to signalize failure to acquire lock.
""" """
class DeleteKeyValueDistributedLockFailedException(Exception): class DeleteKeyValueDistributedLockFailedException(Exception): # noqa: N818
""" """
Exception to signalize failure to delete lock. Exception to signalize failure to delete lock.
""" """

View File

@ -18,12 +18,12 @@ from marshmallow import fields, Schema
class ExplorePermalinkStateSchema(Schema): class ExplorePermalinkStateSchema(Schema):
formData = fields.Dict( formData = fields.Dict( # noqa: N815
required=True, required=True,
allow_none=False, allow_none=False,
metadata={"description": "Chart form data"}, metadata={"description": "Chart form data"},
) )
urlParams = fields.List( urlParams = fields.List( # noqa: N815
fields.Tuple( fields.Tuple(
( (
fields.String( fields.String(
@ -44,17 +44,17 @@ class ExplorePermalinkStateSchema(Schema):
class ExplorePermalinkSchema(Schema): class ExplorePermalinkSchema(Schema):
chartId = fields.Integer( chartId = fields.Integer( # noqa: N815
required=False, required=False,
allow_none=True, allow_none=True,
metadata={"description": "The id of the chart"}, metadata={"description": "The id of the chart"},
) )
datasourceType = fields.String( datasourceType = fields.String( # noqa: N815
required=True, required=True,
allow_none=False, allow_none=False,
metadata={"description": "The type of the datasource"}, metadata={"description": "The type of the datasource"},
) )
datasourceId = fields.Integer( datasourceId = fields.Integer( # noqa: N815
required=False, required=False,
allow_none=True, allow_none=True,
metadata={"description": "The id of the datasource"}, metadata={"description": "The id of the datasource"},

View File

@ -106,7 +106,7 @@ class DatasetSchema(Schema):
class SliceSchema(Schema): class SliceSchema(Schema):
cache_timeout = fields.Integer( cache_timeout = fields.Integer(
metadata={ metadata={
"description": "Duration (in seconds) of the caching timeout for this chart." "description": "Duration (in seconds) of the caching timeout for this chart." # noqa: E501
} }
) )
certification_details = fields.String( certification_details = fields.String(

View File

@ -97,7 +97,7 @@ class UIManifestProcessor:
# templates # templates
full_manifest = json.load(f) full_manifest = json.load(f)
self.manifest = full_manifest.get("entrypoints", {}) self.manifest = full_manifest.get("entrypoints", {})
except Exception: # pylint: disable=broad-except except Exception: # pylint: disable=broad-except # noqa: S110
pass pass
def get_manifest_files(self, bundle: str, asset_type: str) -> list[str]: def get_manifest_files(self, bundle: str, asset_type: str) -> list[str]:

View File

@ -33,7 +33,7 @@ and applying any filters (as well as sorting, limiting, and offsetting).
Note that no aggregation is done on the database. Aggregations and other operations like Note that no aggregation is done on the database. Aggregations and other operations like
joins and unions are done in memory, using the SQLite engine. joins and unions are done in memory, using the SQLite engine.
""" """ # noqa: E501
from __future__ import annotations from __future__ import annotations
@ -270,7 +270,7 @@ class SupersetShillelaghAdapter(Adapter):
self.schema = parts.pop(-1) if parts else None self.schema = parts.pop(-1) if parts else None
self.catalog = parts.pop(-1) if parts else None self.catalog = parts.pop(-1) if parts else None
# If the table has a single integer primary key we use that as the row ID in order # If the table has a single integer primary key we use that as the row ID in order # noqa: E501
# to perform updates and deletes. Otherwise we can only do inserts and selects. # to perform updates and deletes. Otherwise we can only do inserts and selects.
self._rowid: str | None = None self._rowid: str | None = None

View File

@ -581,7 +581,7 @@ class SupersetAppInitializer: # pylint: disable=too-many-public-methods
self.superset_app.url_map.converters["regex"] = RegexConverter self.superset_app.url_map.converters["regex"] = RegexConverter
self.superset_app.url_map.converters["object_type"] = ObjectTypeConverter self.superset_app.url_map.converters["object_type"] = ObjectTypeConverter
def configure_middlewares(self) -> None: def configure_middlewares(self) -> None: # noqa: C901
if self.config["ENABLE_CORS"]: if self.config["ENABLE_CORS"]:
# pylint: disable=import-outside-toplevel # pylint: disable=import-outside-toplevel
from flask_cors import CORS from flask_cors import CORS
@ -648,7 +648,7 @@ class SupersetAppInitializer: # pylint: disable=too-many-public-methods
"We haven't found any Content Security Policy (CSP) defined in " "We haven't found any Content Security Policy (CSP) defined in "
"the configurations. Please make sure to configure CSP using the " "the configurations. Please make sure to configure CSP using the "
"TALISMAN_ENABLED and TALISMAN_CONFIG keys or any other external " "TALISMAN_ENABLED and TALISMAN_CONFIG keys or any other external "
"software. Failing to configure CSP have serious security implications. " "software. Failing to configure CSP have serious security implications. " # noqa: E501
"Check https://developer.mozilla.org/en-US/docs/Web/HTTP/CSP for more " "Check https://developer.mozilla.org/en-US/docs/Web/HTTP/CSP for more "
"information. You can disable this warning using the " "information. You can disable this warning using the "
"CONTENT_SECURITY_POLICY_WARNING key." "CONTENT_SECURITY_POLICY_WARNING key."

View File

@ -79,7 +79,7 @@ class PickleKeyValueCodec(KeyValueCodec):
return pickle.dumps(value) return pickle.dumps(value)
def decode(self, value: bytes) -> dict[Any, Any]: def decode(self, value: bytes) -> dict[Any, Any]:
return pickle.loads(value) return pickle.loads(value) # noqa: S301
class MarshmallowKeyValueCodec(JsonKeyValueCodec): class MarshmallowKeyValueCodec(JsonKeyValueCodec):

View File

@ -61,7 +61,7 @@ def decode_permalink_id(key: str, salt: str) -> int:
def get_uuid_namespace(seed: str) -> UUID: def get_uuid_namespace(seed: str) -> UUID:
md5_obj = md5() md5_obj = md5() # noqa: S324
md5_obj.update(seed.encode("utf-8")) md5_obj.update(seed.encode("utf-8"))
return UUID(md5_obj.hexdigest()) return UUID(md5_obj.hexdigest())

View File

@ -150,10 +150,10 @@ def print_processed_batch(
""" """
elapsed_time = datetime.now() - start_time elapsed_time = datetime.now() - start_time
elapsed_seconds = elapsed_time.total_seconds() elapsed_seconds = elapsed_time.total_seconds()
elapsed_formatted = f"{int(elapsed_seconds // 3600):02}:{int((elapsed_seconds % 3600) // 60):02}:{int(elapsed_seconds % 60):02}" elapsed_formatted = f"{int(elapsed_seconds // 3600):02}:{int((elapsed_seconds % 3600) // 60):02}:{int(elapsed_seconds % 60):02}" # noqa: E501
rows_processed = min(offset + batch_size, total_rows) rows_processed = min(offset + batch_size, total_rows)
logger.info( logger.info(
f"{elapsed_formatted} - {rows_processed:,} of {total_rows:,} {model.__tablename__} rows processed " f"{elapsed_formatted} - {rows_processed:,} of {total_rows:,} {model.__tablename__} rows processed " # noqa: E501
f"({(rows_processed / total_rows) * 100:.2f}%)" f"({(rows_processed / total_rows) * 100:.2f}%)"
) )
@ -252,7 +252,7 @@ def update_schema_catalog_perms(
catalog (str): The new catalog to set. catalog (str): The new catalog to set.
downgrade (bool, optional): If True, reset the `catalog` and `catalog_perm` fields to None. downgrade (bool, optional): If True, reset the `catalog` and `catalog_perm` fields to None.
Defaults to False. Defaults to False.
""" """ # noqa: E501
# Mapping of table id to schema permission # Mapping of table id to schema permission
mapping = {} mapping = {}

View File

@ -62,8 +62,8 @@ class MigrateViz:
if "viz_type" in self.data: if "viz_type" in self.data:
self.data["viz_type"] = self.target_viz_type self.data["viz_type"] = self.target_viz_type
# Sometimes visualizations have same keys in the source form_data and rename_keys # Sometimes visualizations have same keys in the source form_data and rename_keys # noqa: E501
# We need to remove them from data to allow the migration to work properly with rename_keys # We need to remove them from data to allow the migration to work properly with rename_keys # noqa: E501
for source_key, target_key in self.rename_keys.items(): for source_key, target_key in self.rename_keys.items():
if source_key in self.data and target_key in self.data: if source_key in self.data and target_key in self.data:
self.data.pop(target_key) self.data.pop(target_key)

Some files were not shown because too many files have changed in this diff Show More