chore: enable ruff lint rule TRY201 and B904 to improve `raise` stack traces (#29166)

This commit is contained in:
Maxime Beauchemin 2024-06-12 13:04:42 -07:00 committed by GitHub
parent 5167d20f27
commit 4bb2e2f8af
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
50 changed files with 152 additions and 141 deletions

View File

@ -435,7 +435,14 @@ target-version = "py310"
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
# Unlike Flake8, Ruff doesn't enable pycodestyle warnings (`W`) or
# McCabe complexity (`C901`) by default.
select = ["E4", "E7", "E9", "F"]
select = [
"B904",
"E4",
"E7",
"E9",
"F",
"TRY201",
]
ignore = []
extend-select = ["I"]

View File

@ -42,9 +42,9 @@ def create_app(superset_config_module: Optional[str] = None) -> Flask:
return app
# Make sure that bootstrap errors ALWAYS get logged
except Exception as ex:
except Exception:
logger.exception("Failed to create app")
raise ex
raise
class SupersetApp(Flask):

View File

@ -446,5 +446,6 @@ class ChartDataRestApi(ChartRestApi):
return ChartDataQueryContextSchema().load(form_data)
except KeyError as ex:
raise ValidationError("Request is incorrect") from ex
except ValidationError as error:
raise error
except ValidationError: # pylint: disable=try-except-raise
# Make sure to bubble this up
raise

View File

@ -55,14 +55,14 @@ class ImportChartsCommand(BaseCommand):
return
except IncorrectVersionError:
logger.debug("File not handled by command, skipping")
except (CommandInvalidError, ValidationError) as exc:
except (CommandInvalidError, ValidationError):
# found right version, but file is invalid
logger.info("Command failed validation")
raise exc
except Exception as exc:
raise
except Exception:
# validation succeeded but something went wrong
logger.exception("Error running import command")
raise exc
raise
raise CommandInvalidError("Could not find a valid command to import file")

View File

@ -58,14 +58,14 @@ class ImportDashboardsCommand(BaseCommand):
return
except IncorrectVersionError:
logger.debug("File not handled by command, skipping")
except (CommandInvalidError, ValidationError) as exc:
except (CommandInvalidError, ValidationError):
# found right version, but file is invalid
logger.info("Command failed validation")
raise exc
except Exception as exc:
raise
except Exception:
# validation succeeded but something went wrong
logger.exception("Error running import command")
raise exc
raise
raise CommandInvalidError("Could not find a valid command to import file")

View File

@ -68,7 +68,7 @@ class CreateDatabaseCommand(BaseCommand):
engine=self._properties.get("sqlalchemy_uri", "").split(":")[0],
)
# So we can show the original message
raise ex
raise
except Exception as ex:
event_logger.log_with_context(
action=f"db_creation_failed.{ex.__class__.__name__}",
@ -141,7 +141,7 @@ class CreateDatabaseCommand(BaseCommand):
engine=self._properties.get("sqlalchemy_uri", "").split(":")[0],
)
# So we can show the original message
raise ex
raise
except (
DAOCreateFailedError,
DatabaseInvalidError,

View File

@ -53,14 +53,14 @@ class ImportDatabasesCommand(BaseCommand):
return
except IncorrectVersionError:
logger.debug("File not handled by command, skipping")
except (CommandInvalidError, ValidationError) as exc:
except (CommandInvalidError, ValidationError):
# found right version, but file is invalid
logger.info("Command failed validation")
raise exc
except Exception as exc:
raise
except Exception:
# validation succeeded but something went wrong
logger.exception("Error running import command")
raise exc
raise
raise CommandInvalidError("Could not find a valid command to import file")

View File

@ -51,8 +51,9 @@ class CreateSSHTunnelCommand(BaseCommand):
return ssh_tunnel
except DAOCreateFailedError as ex:
raise SSHTunnelCreateFailedError() from ex
except SSHTunnelInvalidError as ex:
raise ex
except SSHTunnelInvalidError: # pylint: disable=try-except-raise
# Make sure to bubble this up
raise
def validate(self) -> None:
# TODO(hughhh): check to make sure the server port is not localhost

View File

@ -127,8 +127,8 @@ class TablesDatabaseCommand(BaseCommand):
payload = {"count": len(tables) + len(views), "result": options}
return payload
except SupersetException as ex:
raise ex
except SupersetException:
raise
except Exception as ex:
raise DatabaseTablesUnexpectedError(ex) from ex

View File

@ -212,7 +212,7 @@ class TestConnectionDatabaseCommand(BaseCommand):
engine=database.db_engine_spec.__name__,
)
# bubble up the exception to return a 408
raise ex
raise
except SSHTunnelingNotEnabledError as ex:
event_logger.log_with_context(
action=get_log_connection_action(
@ -221,7 +221,7 @@ class TestConnectionDatabaseCommand(BaseCommand):
engine=database.db_engine_spec.__name__,
)
# bubble up the exception to return a 400
raise ex
raise
except Exception as ex:
event_logger.log_with_context(
action=get_log_connection_action(

View File

@ -85,9 +85,9 @@ class UpdateDatabaseCommand(BaseCommand):
database.set_sqlalchemy_uri(database.sqlalchemy_uri)
ssh_tunnel = self._handle_ssh_tunnel(database)
self._refresh_catalogs(database, original_database_name, ssh_tunnel)
except SSHTunnelError as ex:
except SSHTunnelError: # pylint: disable=try-except-raise
# allow exception to bubble for debugbing information
raise ex
raise
except (DAOUpdateFailedError, DAOCreateFailedError) as ex:
raise DatabaseUpdateFailedError() from ex

View File

@ -58,14 +58,14 @@ class ImportDatasetsCommand(BaseCommand):
return
except IncorrectVersionError:
logger.debug("File not handled by command, skipping")
except (CommandInvalidError, ValidationError) as exc:
except (CommandInvalidError, ValidationError):
# found right version, but file is invalid
logger.info("Command failed validation")
raise exc
except Exception as exc:
raise
except Exception:
# validation succeeded but something went wrong
logger.exception("Error running import command")
raise exc
raise
raise CommandInvalidError("Could not find a valid command to import file")

View File

@ -74,9 +74,9 @@ class ImportModelsCommand(BaseCommand):
try:
self._import(self._configs, self.overwrite)
db.session.commit()
except CommandException as ex:
except CommandException:
db.session.rollback()
raise ex
raise
except Exception as ex:
db.session.rollback()
raise self.import_error() from ex

View File

@ -75,7 +75,7 @@ def load_metadata(contents: dict[str, str]) -> dict[str, str]:
# otherwise we raise the validation error
ex.messages = {METADATA_FILE_NAME: ex.messages}
raise ex
raise
return metadata

View File

@ -55,10 +55,10 @@ class ImportSavedQueriesCommand(BaseCommand):
return
except IncorrectVersionError:
logger.debug("File not handled by command, skipping")
except (CommandInvalidError, ValidationError) as exc:
except (CommandInvalidError, ValidationError):
# found right version, but file is invalid
logger.exception("Error running import command")
raise exc
raise
raise CommandInvalidError("Could not find a valid command to import file")

View File

@ -599,7 +599,7 @@ class ReportNotTriggeredErrorState(BaseReportState):
self.update_report_schedule_and_log(
ReportState.ERROR, error_message=second_error_message
)
raise first_ex
raise
class ReportWorkingState(BaseReportState):
@ -662,7 +662,7 @@ class ReportSuccessState(BaseReportState):
ReportState.ERROR,
error_message=REPORT_SCHEDULE_ERROR_NOTIFICATION_MARKER,
)
raise ex
raise
try:
self.send()
@ -737,8 +737,8 @@ class AsyncExecuteReportScheduleCommand(BaseCommand):
ReportScheduleStateMachine(
self._execution_id, self._model, self._scheduled_dttm
).run()
except CommandException as ex:
raise ex
except CommandException:
raise
except Exception as ex:
raise ReportScheduleUnexpectedError(str(ex)) from ex

View File

@ -42,7 +42,7 @@ class CreateRLSRuleCommand(BaseCommand):
return RLSDAO.create(attributes=self._properties)
except DAOCreateFailedError as ex:
logger.exception(ex.exception)
raise ex
raise
def validate(self) -> None:
roles = populate_roles(self._roles)

View File

@ -47,7 +47,7 @@ class UpdateRLSRuleCommand(BaseCommand):
rule = RLSDAO.update(self._model, self._properties)
except DAOUpdateFailedError as ex:
logger.exception(ex.exception)
raise ex
raise
return rule

View File

@ -115,10 +115,10 @@ class ExecuteSqlCommand(BaseCommand):
"status": status,
"payload": self._execution_context_convertor.serialize_payload(),
}
except (SupersetErrorException, SupersetErrorsException) as ex:
except (SupersetErrorException, SupersetErrorsException):
# to make sure we raising the original
# SupersetErrorsException || SupersetErrorsException
raise ex
raise
except Exception as ex:
raise SqlLabException(self._execution_context, exception=ex) from ex
@ -158,9 +158,9 @@ class ExecuteSqlCommand(BaseCommand):
return self._sql_json_executor.execute(
self._execution_context, rendered_query, self._log_params
)
except Exception as ex:
except Exception:
self._query_dao.update(query, {"status": QueryStatus.FAILED})
raise ex
raise
def _get_the_query_db(self) -> Database:
mydb: Any = self._database_dao.find_by_id(self._execution_context.database_id)

View File

@ -276,7 +276,7 @@ class QueryObject: # pylint: disable=too-many-instance-attributes
return None
except QueryObjectValidationError as ex:
if raise_exceptions:
raise ex
raise
return ex
def _validate_no_have_duplicate_labels(self) -> None:

View File

@ -1727,11 +1727,11 @@ class SqlaTable(
self.schema or None,
mutator=assign_column_label,
)
except (SupersetErrorException, SupersetErrorsException) as ex:
except (SupersetErrorException, SupersetErrorsException):
# SupersetError(s) exception should not be captured; instead, they should
# bubble up to the Flask error handler so they are returned as proper SIP-40
# errors. This is particularly important for database OAuth2, see SIP-85.
raise ex
raise
except Exception as ex: # pylint: disable=broad-except
# TODO (betodealmeida): review exception handling while querying the external
# database. Ideally we'd expect and handle external database error, but

View File

@ -123,8 +123,8 @@ def make_url_safe(raw_url: str | URL) -> URL:
url = raw_url.strip()
try:
return make_url(url) # noqa
except Exception:
raise DatabaseInvalidError() # pylint: disable=raise-missing-from
except Exception as ex:
raise DatabaseInvalidError() from ex
else:
return raw_url

View File

@ -2001,7 +2001,7 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
extra = json.loads(database.extra)
except json.JSONDecodeError as ex:
logger.error(ex, exc_info=True)
raise ex
raise
return extra
@staticmethod
@ -2022,7 +2022,7 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
params.update(encrypted_extra)
except json.JSONDecodeError as ex:
logger.error(ex, exc_info=True)
raise ex
raise
@classmethod
def is_readonly_query(cls, parsed_query: ParsedQuery) -> bool:

View File

@ -104,7 +104,7 @@ class ImpalaEngineSpec(BaseEngineSpec):
try:
cursor.execute_async(query)
except Exception as ex:
raise cls.get_dbapi_mapped_exception(ex)
raise cls.get_dbapi_mapped_exception(ex) from ex
@classmethod
def handle_cursor(cls, cursor: Any, query: Query) -> None:

View File

@ -315,12 +315,12 @@ class OcientEngineSpec(BaseEngineSpec):
) -> list[tuple[Any, ...]]:
try:
rows: list[tuple[Any, ...]] = super().fetch_data(cursor, limit)
except Exception as exception:
except Exception:
with OcientEngineSpec.query_id_mapping_lock:
del OcientEngineSpec.query_id_mapping[
getattr(cursor, "superset_query_id")
]
raise exception
raise
# TODO: Unsure if we need to verify that we are receiving rows:
if len(rows) > 0 and type(rows[0]).__name__ == "Row":

View File

@ -368,7 +368,7 @@ class SnowflakeEngineSpec(PostgresBaseEngineSpec):
encrypted_extra = json.loads(database.encrypted_extra)
except json.JSONDecodeError as ex:
logger.error(ex, exc_info=True)
raise ex
raise
auth_method = encrypted_extra.get("auth_method", None)
auth_params = encrypted_extra.get("auth_params", {})
if not auth_method:

View File

@ -342,7 +342,7 @@ class TrinoEngineSpec(PrestoBaseEngineSpec):
connect_args["auth"] = trino_auth(**auth_params)
except json.JSONDecodeError as ex:
logger.error(ex, exc_info=True)
raise ex
raise
@classmethod
def get_dbapi_exception_mapping(cls) -> dict[type[Exception], type[Exception]]:

View File

@ -118,13 +118,13 @@ def compute_time_compare(granularity, periods):
try:
obj = isodate.parse_duration(granularity) * periods
except isodate.isoerror.ISO8601Error:
except isodate.isoerror.ISO8601Error as ex:
# if parse_human_timedelta can parse it, return it directly
delta = f"{periods} {granularity}{'s' if periods > 1 else ''}"
obj = parse_human_timedelta(delta)
if obj:
return delta
raise Exception(f"Unable to parse: {granularity}")
raise Exception(f"Unable to parse: {granularity}") from ex
if isinstance(obj, isodate.duration.Duration):
return isodate_duration_to_string(obj)

View File

@ -197,9 +197,9 @@ def upgrade():
dashboard.json_metadata = json.dumps(json_metadata, sort_keys=True)
except Exception as e:
except Exception:
print(f"Parsing json_metadata for dashboard {dashboard.id} failed.")
raise e
raise
session.commit()
session.close()
@ -225,9 +225,9 @@ def downgrade():
changed_filter_sets += 1
changed_filters += downgrade_filter_set(filter_set)
dashboard.json_metadata = json.dumps(json_metadata, sort_keys=True)
except Exception as e:
except Exception:
print(f"Parsing json_metadata for dashboard {dashboard.id} failed.")
raise e
raise
session.commit()
session.close()

View File

@ -111,9 +111,9 @@ def upgrade():
changed_filters += upgrades[0]
changed_filter_sets += upgrades[1]
dashboard.json_metadata = json.dumps(json_metadata, sort_keys=True)
except Exception as e:
except Exception:
print(f"Parsing json_metadata for dashboard {dashboard.id} failed.")
raise e
raise
session.commit()
session.close()
@ -151,9 +151,9 @@ def downgrade():
changed_filters += downgrades[0]
changed_filter_sets += downgrades[1]
dashboard.json_metadata = json.dumps(json_metadata, sort_keys=True)
except Exception as e:
except Exception:
print(f"Parsing json_metadata for dashboard {dashboard.id} failed.")
raise e
raise
session.commit()
session.close()

View File

@ -92,9 +92,9 @@ def upgrade():
]
changed_slices += 1
slice.params = json.dumps(params, sort_keys=True)
except Exception as e:
except Exception:
print(f"Parsing json_metadata for slice {slice.id} failed.")
raise e
raise
session.commit()
session.close()

View File

@ -63,12 +63,12 @@ def upgrade():
if header_timestamp_format:
params["time_format"] = header_timestamp_format
slc.params = json.dumps(params, sort_keys=True)
except Exception as e:
except Exception:
logger.exception(
f"An error occurred: parsing params for slice {slc.id} failed."
f"You need to fix it before upgrading your DB."
)
raise e
raise
session.commit()
session.close()
@ -89,12 +89,12 @@ def downgrade():
if force_timestamp_formatting:
params["header_format_selector"] = force_timestamp_formatting
slc.params = json.dumps(params, sort_keys=True)
except Exception as e:
except Exception:
logger.exception(
f"An error occurred: parsing params for slice {slc.id} failed. "
"You need to fix it before downgrading your DB."
)
raise e
raise
session.commit()
session.close()

View File

@ -61,12 +61,12 @@ def upgrade():
if legacy_order_by:
params["series_limit_metric"] = legacy_order_by
slc.params = json.dumps(params, sort_keys=True)
except Exception as e:
except Exception:
logger.exception(
f"An error occurred: parsing params for slice {slc.id} failed."
f"You need to fix it before upgrading your DB."
)
raise e
raise
session.commit()
session.close()
@ -84,12 +84,12 @@ def downgrade():
if series_limit_metric:
params["legacy_order_by"] = series_limit_metric
slc.params = json.dumps(params, sort_keys=True)
except Exception as e:
except Exception:
logger.exception(
f"An error occurred: parsing params for slice {slc.id} failed. "
"You need to fix it before downgrading your DB."
)
raise e
raise
session.commit()
session.close()

View File

@ -90,9 +90,9 @@ def upgrade():
if needs_upgrade:
dashboard.json_metadata = json.dumps(json_metadata)
except Exception as e:
except Exception:
logger.exception("Failed to run up migration")
raise e
raise
session.commit()
session.close()
@ -127,9 +127,9 @@ def downgrade():
dashboard.json_metadata = json.dumps(json_metadata)
except Exception as e:
except Exception:
logger.exception("Failed to run down migration")
raise e
raise
session.commit()
session.close()

View File

@ -68,7 +68,7 @@ def upgrade():
session.commit()
except SQLAlchemyError as ex:
session.rollback()
raise Exception(f"An error occurred while upgrading permissions: {ex}")
raise Exception(f"An error occurred while upgrading permissions: {ex}") from ex
def downgrade():

View File

@ -68,7 +68,7 @@ def upgrade():
session.commit()
except SQLAlchemyError as ex:
session.rollback()
raise Exception(f"An error occurred while upgrading permissions: {ex}")
raise Exception(f"An error occurred while upgrading permissions: {ex}") from ex
def downgrade():

View File

@ -70,7 +70,7 @@ def upgrade():
session.commit()
except SQLAlchemyError as ex:
session.rollback()
raise Exception(f"An error occurred while upgrading permissions: {ex}")
raise Exception(f"An error occurred while upgrading permissions: {ex}") from ex
def downgrade():

View File

@ -71,7 +71,7 @@ def upgrade():
session.commit()
except SQLAlchemyError as ex:
session.rollback()
raise Exception(f"An error occurred while upgrading permissions: {ex}")
raise Exception(f"An error occurred while upgrading permissions: {ex}") from ex
def downgrade():

View File

@ -111,7 +111,7 @@ def upgrade():
f"An error occurred: Upgrading params for slice {slc.id} failed."
f"You need to fix it before upgrading your DB."
)
raise Exception(f"An error occurred while upgrading slice: {ex}")
raise Exception(f"An error occurred while upgrading slice: {ex}") from ex
session.commit()
session.close()
@ -206,7 +206,7 @@ def downgrade():
f"An error occurred: Downgrading params for slice {slc.id} failed."
f"You need to fix it before downgrading your DB."
)
raise Exception(f"An error occurred while downgrading slice: {ex}")
raise Exception(f"An error occurred while downgrading slice: {ex}") from ex
session.commit()
session.close()

View File

@ -538,7 +538,7 @@ class Database(Model, AuditMixinNullable, ImportExportMixin): # pylint: disable
try:
return create_engine(sqlalchemy_url, **params)
except Exception as ex:
raise self.db_engine_spec.get_dbapi_mapped_exception(ex)
raise self.db_engine_spec.get_dbapi_mapped_exception(ex) from ex
@contextmanager
def get_raw_connection(
@ -570,7 +570,7 @@ class Database(Model, AuditMixinNullable, ImportExportMixin): # pylint: disable
except Exception as ex:
if self.is_oauth2_enabled() and self.db_engine_spec.needs_oauth2(ex):
self.db_engine_spec.start_oauth2_dance(self)
raise ex
raise
def get_default_catalog(self) -> str | None:
"""
@ -769,7 +769,7 @@ class Database(Model, AuditMixinNullable, ImportExportMixin): # pylint: disable
)
}
except Exception as ex:
raise self.db_engine_spec.get_dbapi_mapped_exception(ex)
raise self.db_engine_spec.get_dbapi_mapped_exception(ex) from ex
@cache_util.memoized_func(
key="db:{self.id}:schema:{schema}:view_list",
@ -803,7 +803,7 @@ class Database(Model, AuditMixinNullable, ImportExportMixin): # pylint: disable
)
}
except Exception as ex:
raise self.db_engine_spec.get_dbapi_mapped_exception(ex)
raise self.db_engine_spec.get_dbapi_mapped_exception(ex) from ex
@contextmanager
def get_inspector(
@ -906,7 +906,7 @@ class Database(Model, AuditMixinNullable, ImportExportMixin): # pylint: disable
encrypted_extra = json.loads(self.encrypted_extra)
except json.JSONDecodeError as ex:
logger.error(ex, exc_info=True)
raise ex
raise
return encrypted_extra
# pylint: disable=invalid-name

View File

@ -310,7 +310,7 @@ class ImportExportMixin:
try:
obj_query = db.session.query(cls).filter(and_(*filters))
obj = obj_query.one_or_none()
except MultipleResultsFound as ex:
except MultipleResultsFound:
logger.error(
"Error importing %s \n %s \n %s",
cls.__name__,
@ -318,7 +318,7 @@ class ImportExportMixin:
yaml.safe_dump(dict_rep),
exc_info=True,
)
raise ex
raise
if not obj:
is_new_obj = True

View File

@ -312,9 +312,9 @@ def execute_sql_statement( # pylint: disable=too-many-statements
level=ErrorLevel.ERROR,
)
) from ex
except OAuth2RedirectError as ex:
except OAuth2RedirectError:
# user needs to authenticate with OAuth2 in order to run query
raise ex
raise
except Exception as ex:
# query is stopped in another thread/worker
# stopping raises expected exceptions which we should skip

View File

@ -138,7 +138,7 @@ class PrestoDBSQLValidator(BaseSQLValidator):
)
except Exception as ex:
logger.exception("Unexpected error running validation query: %s", str(ex))
raise ex
raise
@classmethod
def validate(

View File

@ -97,8 +97,8 @@ class SynchronousSqlJsonExecutor(SqlJsonExecutorBase):
)
self._query_dao.update_saved_query_exec_info(query_id)
execution_context.set_execution_result(data)
except SupersetTimeoutException as ex:
raise ex
except SupersetTimeoutException:
raise
except Exception as ex:
logger.exception("Query %i failed unexpectedly", query_id)
raise SupersetGenericDBErrorException(

View File

@ -55,8 +55,9 @@ def _create_query_context_from_form(form_data: dict[str, Any]) -> QueryContext:
return ChartDataQueryContextSchema().load(form_data)
except KeyError as ex:
raise ValidationError("Request is incorrect") from ex
except ValidationError as error:
raise error
except ValidationError: # pylint: disable=try-except-raise
# Make sure to bubble this up
raise
def _load_user_from_job_metadata(job_metadata: dict[str, Any]) -> User:
@ -96,7 +97,7 @@ def load_chart_data_into_cache(
)
except SoftTimeLimitExceeded as ex:
logger.warning("A timeout occurred while loading chart data, error: %s", ex)
raise ex
raise
except Exception as ex:
# TODO: QueryContext should support SIP-40 style errors
error = str(ex.message if hasattr(ex, "message") else ex)
@ -104,7 +105,7 @@ def load_chart_data_into_cache(
async_query_manager.update_job(
job_metadata, async_query_manager.STATUS_ERROR, errors=errors
)
raise ex
raise
@celery_app.task(name="load_explore_json_into_cache", soft_time_limit=query_timeout)
@ -162,7 +163,7 @@ def load_explore_json_into_cache( # pylint: disable=too-many-locals
logger.warning(
"A timeout occurred while loading explore json, error: %s", ex
)
raise ex
raise
except Exception as ex:
if isinstance(ex, SupersetVizException):
errors = ex.errors
@ -173,4 +174,4 @@ def load_explore_json_into_cache( # pylint: disable=too-many-locals
async_query_manager.update_job(
job_metadata, async_query_manager.STATUS_ERROR, errors=errors
)
raise ex
raise

View File

@ -57,7 +57,7 @@ def statsd_gauge(metric_prefix: str | None = None) -> Callable[..., Any]:
current_app.config["STATS_LOGGER"].gauge(
f"{metric_prefix_}.error", 1
)
raise ex
raise
return wrapped
@ -146,8 +146,9 @@ def stats_timing(stats_key: str, stats_logger: BaseStatsLogger) -> Iterator[floa
start_ts = now_as_float()
try:
yield start_ts
except Exception as ex:
raise ex
except Exception: # pylint: disable=try-except-raise
# Make sure to bubble this up
raise
finally:
stats_logger.timing(stats_key, now_as_float() - start_ts)

View File

@ -122,11 +122,11 @@ def json_iso_dttm_ser(obj: Any, pessimistic: bool = False) -> Any:
try:
return base_json_conv(obj)
except TypeError as ex:
except TypeError:
if pessimistic:
logger.error("Failed to serialize %s", obj)
return f"Unserializable [{type(obj)}]"
raise ex
raise
def pessimistic_json_iso_dttm_ser(obj: Any) -> Any:
@ -249,4 +249,4 @@ def loads(
)
except JSONDecodeError as ex:
logger.error("JSON is not valid %s", str(ex), exc_info=True)
raise ex
raise

View File

@ -180,9 +180,9 @@ class WebDriverPlaywright(WebDriverProxy):
)
element = page.locator(f".{element_name}")
element.wait_for()
except PlaywrightTimeout as ex:
except PlaywrightTimeout:
logger.exception("Timed out requesting url %s", url)
raise ex
raise
try:
# chart containers didn't render
@ -191,12 +191,12 @@ class WebDriverPlaywright(WebDriverProxy):
slice_container_locator.first.wait_for()
for slice_container_elem in slice_container_locator.all():
slice_container_elem.wait_for()
except PlaywrightTimeout as ex:
except PlaywrightTimeout:
logger.exception(
"Timed out waiting for chart containers to draw at url %s",
url,
)
raise ex
raise
try:
# charts took too long to load
logger.debug(
@ -204,11 +204,11 @@ class WebDriverPlaywright(WebDriverProxy):
)
for loading_element in page.locator(".loading").all():
loading_element.wait_for(state="detached")
except PlaywrightTimeout as ex:
except PlaywrightTimeout:
logger.exception(
"Timed out waiting for charts to load at url %s", url
)
raise ex
raise
selenium_animation_wait = current_app.config[
"SCREENSHOT_SELENIUM_ANIMATION_WAIT"
@ -366,9 +366,9 @@ class WebDriverSelenium(WebDriverProxy):
element = WebDriverWait(driver, self._screenshot_locate_wait).until(
EC.presence_of_element_located((By.CLASS_NAME, element_name))
)
except TimeoutException as ex:
except TimeoutException:
logger.exception("Selenium timed out requesting url %s", url)
raise ex
raise
try:
# chart containers didn't render
@ -378,12 +378,12 @@ class WebDriverSelenium(WebDriverProxy):
(By.CLASS_NAME, "chart-container")
)
)
except TimeoutException as ex:
except TimeoutException:
logger.exception(
"Selenium timed out waiting for chart containers to draw at url %s",
url,
)
raise ex
raise
try:
# charts took too long to load
@ -393,11 +393,11 @@ class WebDriverSelenium(WebDriverProxy):
WebDriverWait(driver, self._screenshot_load_wait).until_not(
EC.presence_of_all_elements_located((By.CLASS_NAME, "loading"))
)
except TimeoutException as ex:
except TimeoutException:
logger.exception(
"Selenium timed out waiting for charts to load at url %s", url
)
raise ex
raise
selenium_animation_wait = current_app.config[
"SCREENSHOT_SELENIUM_ANIMATION_WAIT"

View File

@ -124,7 +124,7 @@ def statsd_metrics(f: Callable[..., Any]) -> Callable[..., Any]:
self.incr_stats("warning", func_name)
else:
self.incr_stats("error", func_name)
raise ex
raise
self.send_stats_metrics(response, func_name, duration)
return response

View File

@ -198,15 +198,15 @@ def delete_all_inserted_dashboards():
for dashboard in dashboards_to_delete:
try:
delete_dashboard(dashboard, False)
except Exception as ex:
except Exception:
logger.error(f"failed to delete {dashboard.id}", exc_info=True)
raise ex
raise
if len(inserted_dashboards_ids) > 0:
db.session.commit()
inserted_dashboards_ids.clear()
except Exception as ex2:
except Exception:
logger.error("delete_all_inserted_dashboards failed", exc_info=True)
raise ex2
raise
def delete_dashboard(dashboard: Dashboard, do_commit: bool = False) -> None:
@ -245,15 +245,15 @@ def delete_all_inserted_slices():
for slice in slices_to_delete:
try:
delete_slice(slice, False)
except Exception as ex:
except Exception:
logger.error(f"failed to delete {slice.id}", exc_info=True)
raise ex
raise
if len(inserted_slices_ids) > 0:
db.session.commit()
inserted_slices_ids.clear()
except Exception as ex2:
except Exception:
logger.error("delete_all_inserted_slices failed", exc_info=True)
raise ex2
raise
def delete_slice(slice_: Slice, do_commit: bool = False) -> None:
@ -278,15 +278,15 @@ def delete_all_inserted_tables():
for table in tables_to_delete:
try:
delete_sqltable(table, False)
except Exception as ex:
except Exception:
logger.error(f"failed to delete {table.id}", exc_info=True)
raise ex
raise
if len(inserted_sqltables_ids) > 0:
db.session.commit()
inserted_sqltables_ids.clear()
except Exception as ex2:
except Exception:
logger.error("delete_all_inserted_tables failed", exc_info=True)
raise ex2
raise
def delete_sqltable(table: SqlaTable, do_commit: bool = False) -> None:
@ -313,15 +313,15 @@ def delete_all_inserted_dbs():
for database in databases_to_delete:
try:
delete_database(database, False)
except Exception as ex:
except Exception:
logger.error(f"failed to delete {database.id}", exc_info=True)
raise ex
raise
if len(inserted_databases_ids) > 0:
db.session.commit()
inserted_databases_ids.clear()
except Exception as ex2:
except Exception:
logger.error("delete_all_inserted_databases failed", exc_info=True)
raise ex2
raise
def delete_database(database: Database, do_commit: bool = False) -> None: