Fix a bunch of files with pylint disabled (#8743)

* Re-enable pylint for superset/jinja_context.py

* Re-enable pylint for superset/sql_lab.py

* Re-enable pylint for superset/sql_parse.py

* Re-enable pylint for superset/exceptions.py

* Re-enable lint for superset/translations/utils.py

* Re-enable pylint for superset/views/schedules.py

* Re-enable pylint for superset/views/base.py

* Re-enable pylint for superset/views/log/views.py

* Re-enable pylint for superset/views/annotations.py

* black

* PR feedback, pylint, isort fixes

* Black, one more time...

* Move ungrouped-imports to a global disable
This commit is contained in:
Will Barrett 2019-12-11 10:14:24 -08:00 committed by Maxime Beauchemin
parent 60914fa76a
commit 562aeab1aa
10 changed files with 128 additions and 127 deletions

View File

@ -81,7 +81,7 @@ confidence=
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use"--disable=all --enable=classes
# --disable=W"
disable=standarderror-builtin,long-builtin,dict-view-method,intern-builtin,suppressed-message,no-absolute-import,unpacking-in-except,apply-builtin,delslice-method,indexing-exception,old-raise-syntax,print-statement,cmp-builtin,reduce-builtin,useless-suppression,coerce-method,input-builtin,cmp-method,raw_input-builtin,nonzero-method,backtick,basestring-builtin,setslice-method,reload-builtin,oct-method,map-builtin-not-iterating,execfile-builtin,old-octal-literal,zip-builtin-not-iterating,buffer-builtin,getslice-method,metaclass-assignment,xrange-builtin,long-suffix,round-builtin,range-builtin-not-iterating,next-method-called,dict-iter-method,parameter-unpacking,unicode-builtin,unichr-builtin,import-star-module-level,raising-string,filter-builtin-not-iterating,old-ne-operator,using-cmp-argument,coerce-builtin,file-builtin,old-division,hex-method,invalid-unary-operand-type,missing-docstring,too-many-lines,duplicate-code,bad-continuation
disable=standarderror-builtin,long-builtin,dict-view-method,intern-builtin,suppressed-message,no-absolute-import,unpacking-in-except,apply-builtin,delslice-method,indexing-exception,old-raise-syntax,print-statement,cmp-builtin,reduce-builtin,useless-suppression,coerce-method,input-builtin,cmp-method,raw_input-builtin,nonzero-method,backtick,basestring-builtin,setslice-method,reload-builtin,oct-method,map-builtin-not-iterating,execfile-builtin,old-octal-literal,zip-builtin-not-iterating,buffer-builtin,getslice-method,metaclass-assignment,xrange-builtin,long-suffix,round-builtin,range-builtin-not-iterating,next-method-called,dict-iter-method,parameter-unpacking,unicode-builtin,unichr-builtin,import-star-module-level,raising-string,filter-builtin-not-iterating,old-ne-operator,using-cmp-argument,coerce-builtin,file-builtin,old-division,hex-method,invalid-unary-operand-type,missing-docstring,too-many-lines,duplicate-code,bad-continuation,ungrouped-imports
[REPORTS]

View File

@ -14,15 +14,11 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=C,R,W
class SupersetException(Exception):
status = 500
def __init__(self, msg):
super(SupersetException, self).__init__(msg)
class SupersetTimeoutException(SupersetException):
pass

View File

@ -14,7 +14,6 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=C,R,W
"""Defines the templating context for SQL Lab"""
import inspect
import json
@ -112,11 +111,11 @@ def filter_values(column: str, default: Optional[str] = None) -> List[str]:
if default:
return [default]
else:
return []
class CacheKeyWrapper:
class CacheKeyWrapper: # pylint: disable=too-few-public-methods
""" Dummy class that exposes a method used to store additional values used in
calculation of query object cache keys"""
@ -152,7 +151,7 @@ class CacheKeyWrapper:
return key
class BaseTemplateProcessor:
class BaseTemplateProcessor: # pylint: disable=too-few-public-methods
"""Base class for database-specific jinja context
There's this bit of magic in ``process_template`` that instantiates only
@ -273,5 +272,7 @@ for k in keys:
def get_template_processor(database, table=None, query=None, **kwargs):
TP = template_processors.get(database.backend, BaseTemplateProcessor)
return TP(database=database, table=table, query=query, **kwargs)
template_processor = template_processors.get(
database.backend, BaseTemplateProcessor
)
return template_processor(database=database, table=table, query=query, **kwargs)

View File

@ -14,7 +14,6 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=C,R,W
import logging
import uuid
from contextlib import closing
@ -53,6 +52,8 @@ config = app.config
stats_logger = config["STATS_LOGGER"]
SQLLAB_TIMEOUT = config["SQLLAB_ASYNC_TIME_LIMIT_SEC"]
SQLLAB_HARD_TIMEOUT = SQLLAB_TIMEOUT + 60
SQL_MAX_ROW = config["SQL_MAX_ROW"]
SQL_QUERY_MUTATOR = config["SQL_QUERY_MUTATOR"]
log_query = config["QUERY_LOGGER"]
logger = logging.getLogger(__name__)
@ -90,7 +91,7 @@ def get_query_backoff_handler(details):
logger.error(f"Query {query_id}: Sleeping for a sec before retrying...")
def get_query_giveup_handler(details):
def get_query_giveup_handler(_):
stats_logger.incr("error_failed_at_getting_orm_query")
@ -141,7 +142,7 @@ def session_scope(nullpool):
time_limit=SQLLAB_HARD_TIMEOUT,
soft_time_limit=SQLLAB_TIMEOUT,
)
def get_sql_results(
def get_sql_results( # pylint: disable=too-many-arguments
ctask,
query_id,
rendered_query,
@ -156,7 +157,6 @@ def get_sql_results(
try:
return execute_sql_statements(
ctask,
query_id,
rendered_query,
return_results,
@ -166,7 +166,7 @@ def get_sql_results(
start_time=start_time,
expand_data=expand_data,
)
except Exception as e:
except Exception as e: # pylint: disable=broad-except
logger.exception(f"Query {query_id}: {e}")
stats_logger.incr("error_sqllab_unhandled")
query = get_query(query_id, session)
@ -175,12 +175,10 @@ def get_sql_results(
def execute_sql_statement(sql_statement, query, user_name, session, cursor):
"""Executes a single SQL statement"""
query_id = query.id
database = query.database
db_engine_spec = database.db_engine_spec
parsed_query = ParsedQuery(sql_statement)
sql = parsed_query.stripped()
SQL_MAX_ROWS = app.config["SQL_MAX_ROW"]
if not parsed_query.is_readonly() and not database.allow_dml:
raise SqlLabSecurityException(
@ -202,13 +200,12 @@ def execute_sql_statement(sql_statement, query, user_name, session, cursor):
sql = parsed_query.as_create_table(query.tmp_table_name)
query.select_as_cta_used = True
if parsed_query.is_select():
if SQL_MAX_ROWS and (not query.limit or query.limit > SQL_MAX_ROWS):
query.limit = SQL_MAX_ROWS
if SQL_MAX_ROW and (not query.limit or query.limit > SQL_MAX_ROW):
query.limit = SQL_MAX_ROW
if query.limit:
sql = database.apply_limit_to_sql(sql, query.limit)
# Hook to allow environment-specific mutation (usually comments) to the SQL
SQL_QUERY_MUTATOR = config["SQL_QUERY_MUTATOR"]
if SQL_QUERY_MUTATOR:
sql = SQL_QUERY_MUTATOR(sql, user_name, security_manager, database)
@ -225,30 +222,30 @@ def execute_sql_statement(sql_statement, query, user_name, session, cursor):
query.executed_sql = sql
session.commit()
with stats_timing("sqllab.query.time_executing_query", stats_logger):
logger.info(f"Query {query_id}: Running query: \n{sql}")
logger.info(f"Query {query.id}: Running query: \n{sql}")
db_engine_spec.execute(cursor, sql, async_=True)
logger.info(f"Query {query_id}: Handling cursor")
logger.info(f"Query {query.id}: Handling cursor")
db_engine_spec.handle_cursor(cursor, query, session)
with stats_timing("sqllab.query.time_fetching_results", stats_logger):
logger.debug(
"Query {}: Fetching data for query object: {}".format(
query_id, query.to_dict()
)
"Query %d: Fetching data for query object: %s",
query.id,
str(query.to_dict()),
)
data = db_engine_spec.fetch_data(cursor, query.limit)
except SoftTimeLimitExceeded as e:
logger.exception(f"Query {query_id}: {e}")
logger.exception(f"Query {query.id}: {e}")
raise SqlLabTimeoutException(
"SQL Lab timeout. This environment's policy is to kill queries "
"after {} seconds.".format(SQLLAB_TIMEOUT)
)
except Exception as e:
logger.exception(f"Query {query_id}: {e}")
logger.exception(f"Query {query.id}: {e}")
raise SqlLabException(db_engine_spec.extract_error_message(e))
logger.debug(f"Query {query_id}: Fetching cursor description")
logger.debug(f"Query {query.id}: Fetching cursor description")
cursor_description = cursor.description
return SupersetDataFrame(data, cursor_description, db_engine_spec)
@ -259,7 +256,7 @@ def _serialize_payload(
logger.debug(f"Serializing to msgpack: {use_msgpack}")
if use_msgpack:
return msgpack.dumps(payload, default=json_iso_dttm_ser, use_bin_type=True)
else:
return json.dumps(payload, default=json_iso_dttm_ser, ignore_nan=True)
@ -298,7 +295,6 @@ def _serialize_and_expand_data(
def execute_sql_statements(
ctask,
query_id,
rendered_query,
return_results=True,
@ -307,7 +303,7 @@ def execute_sql_statements(
session=None,
start_time=None,
expand_data=False,
):
): # pylint: disable=too-many-arguments, too-many-locals, too-many-statements
"""Executes the sql query returns the results."""
if store_results and start_time:
# only asynchronous queries
@ -347,7 +343,7 @@ def execute_sql_statements(
# Check if stopped
query = get_query(query_id, session)
if query.status == QueryStatus.STOPPED:
return
return None
# Run statement
msg = f"Running statement {i+1} out of {statement_count}"
@ -358,7 +354,7 @@ def execute_sql_statements(
cdf = execute_sql_statement(
statement, query, user_name, session, cursor
)
except Exception as e:
except Exception as e: # pylint: disable=broad-except
msg = str(e)
if statement_count > 1:
msg = f"[Statement {i+1} out of {statement_count}] " + msg
@ -422,3 +418,5 @@ def execute_sql_statements(
if return_results:
return payload
return None

View File

@ -14,7 +14,6 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=C,R,W
import logging
from typing import List, Optional, Set
@ -29,6 +28,27 @@ PRECEDES_TABLE_NAME = {"FROM", "JOIN", "DESCRIBE", "WITH", "LEFT JOIN", "RIGHT J
CTE_PREFIX = "CTE__"
def _extract_limit_from_query(statement: TokenList) -> Optional[int]:
"""
Extract limit clause from SQL statement.
:param statement: SQL statement
:return: Limit extracted from query, None if no limit present in statement
"""
idx, _ = statement.token_next_by(m=(Keyword, "LIMIT"))
if idx is not None:
_, token = statement.token_next(idx=idx)
if token:
if isinstance(token, IdentifierList):
# In case of "LIMIT <offset>, <limit>", find comma and extract
# first succeeding non-whitespace token
idx, _ = token.token_next_by(m=(sqlparse.tokens.Punctuation, ","))
_, token = token.token_next(idx=idx)
if token and token.ttype == sqlparse.tokens.Literal.Number.Integer:
return int(token.value)
return None
class ParsedQuery(object):
def __init__(self, sql_statement):
self.sql: str = sql_statement
@ -36,11 +56,11 @@ class ParsedQuery(object):
self._alias_names: Set[str] = set()
self._limit: Optional[int] = None
logging.info("Parsing with sqlparse statement {}".format(self.sql))
logging.info("Parsing with sqlparse statement %s", self.sql)
self._parsed = sqlparse.parse(self.stripped())
for statement in self._parsed:
self.__extract_from_token(statement)
self._limit = self._extract_limit_from_query(statement)
self._limit = _extract_limit_from_query(statement)
self._table_names = self._table_names - self._alias_names
@property
@ -146,7 +166,7 @@ class ParsedQuery(object):
exec_sql += f"CREATE TABLE {table_name} AS \n{sql}"
return exec_sql
def __extract_from_token(self, token: Token):
def __extract_from_token(self, token: Token): # pylint: disable=too-many-branches
"""
Populate self._table_names from token
@ -176,34 +196,14 @@ class ParsedQuery(object):
if isinstance(item, Identifier):
self.__process_tokenlist(item)
elif isinstance(item, IdentifierList):
for token in item.get_identifiers():
if isinstance(token, TokenList):
self.__process_tokenlist(token)
for token2 in item.get_identifiers():
if isinstance(token2, TokenList):
self.__process_tokenlist(token2)
elif isinstance(item, IdentifierList):
for token in item.tokens:
if not self.__is_identifier(token):
for token2 in item.tokens:
if not self.__is_identifier(token2):
self.__extract_from_token(item)
def _extract_limit_from_query(self, statement: TokenList) -> Optional[int]:
"""
Extract limit clause from SQL statement.
:param statement: SQL statement
:return: Limit extracted from query, None if no limit present in statement
"""
idx, _ = statement.token_next_by(m=(Keyword, "LIMIT"))
if idx is not None:
_, token = statement.token_next(idx=idx)
if token:
if isinstance(token, IdentifierList):
# In case of "LIMIT <offset>, <limit>", find comma and extract
# first succeeding non-whitespace token
idx, _ = token.token_next_by(m=(sqlparse.tokens.Punctuation, ","))
_, token = token.token_next(idx=idx)
if token and token.ttype == sqlparse.tokens.Literal.Number.Integer:
return int(token.value)
return None
def get_query_with_new_limit(self, new_limit: int) -> str:
"""
returns the query with the specified limit.

View File

@ -14,7 +14,6 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=C,R,W
import json
import os
from typing import Any, Dict
@ -40,7 +39,7 @@ def get_language_pack(locale):
with open(filename) as f:
pack = json.load(f)
ALL_LANGUAGE_PACKS[locale] = pack
except Exception:
except Exception: # pylint: disable=broad-except
# Assuming english, client side falls back on english
pass
return pack

View File

@ -14,7 +14,6 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=C,R,W
from flask_appbuilder.models.sqla.interface import SQLAInterface
from flask_babel import gettext as __, lazy_gettext as _
from wtforms.validators import StopValidation
@ -25,7 +24,7 @@ from superset.models.annotations import Annotation, AnnotationLayer
from .base import DeleteMixin, SupersetModelView
class StartEndDttmValidator(object):
class StartEndDttmValidator(object): # pylint: disable=too-few-public-methods
"""
Validates dttm fields.
"""
@ -43,7 +42,9 @@ class StartEndDttmValidator(object):
)
class AnnotationModelView(SupersetModelView, DeleteMixin):
class AnnotationModelView(
SupersetModelView, DeleteMixin
): # pylint: disable=too-many-ancestors
datamodel = SQLAInterface(Annotation)
list_title = _("List Annotation")
@ -79,17 +80,19 @@ class AnnotationModelView(SupersetModelView, DeleteMixin):
validators_columns = {"start_dttm": [StartEndDttmValidator()]}
def pre_add(self, obj):
if not obj.start_dttm:
obj.start_dttm = obj.end_dttm
elif not obj.end_dttm:
obj.end_dttm = obj.start_dttm
def pre_add(self, item):
if not item.start_dttm:
item.start_dttm = item.end_dttm
elif not item.end_dttm:
item.end_dttm = item.start_dttm
def pre_update(self, obj):
self.pre_add(obj)
def pre_update(self, item):
self.pre_add(item)
class AnnotationLayerModelView(SupersetModelView, DeleteMixin):
class AnnotationLayerModelView(
SupersetModelView, DeleteMixin
): # pylint: disable=too-many-ancestors
datamodel = SQLAInterface(AnnotationLayer)
list_title = _("List Annotation Layer")

View File

@ -14,7 +14,6 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=C,R,W
import functools
import logging
import traceback
@ -67,7 +66,9 @@ def get_error_msg():
def json_error_response(msg=None, status=500, stacktrace=None, payload=None, link=None):
if not payload:
payload = {"error": "{}".format(msg)}
payload["stacktrace"] = utils.get_stacktrace()
if not stacktrace:
stacktrace = utils.get_stacktrace()
payload["stacktrace"] = stacktrace
if link:
payload["link"] = link
@ -103,7 +104,7 @@ def api(f):
def wraps(self, *args, **kwargs):
try:
return f(self, *args, **kwargs)
except Exception as e:
except Exception as e: # pylint: disable=broad-except
logging.exception(e)
return json_error_response(get_error_msg())
@ -142,7 +143,7 @@ def handle_api_exception(f):
stacktrace=traceback.format_exc(),
status=e.code,
)
except Exception as e:
except Exception as e: # pylint: disable=broad-except
logging.exception(e)
return json_error_response(
utils.error_msg_from_exception(e), stacktrace=utils.get_stacktrace()
@ -163,7 +164,7 @@ def get_user_roles():
class BaseSupersetView(BaseView):
def json_response(self, obj, status=200):
def json_response(self, obj, status=200): # pylint: disable=no-self-use
return Response(
json.dumps(obj, default=utils.json_int_dttm_ser, ignore_nan=True),
status=status,
@ -232,7 +233,7 @@ def common_bootstrap_payload():
}
class SupersetListWidget(ListWidget):
class SupersetListWidget(ListWidget): # pylint: disable=too-few-public-methods
template = "superset/fab_overrides/list.html"
@ -241,7 +242,7 @@ class SupersetModelView(ModelView):
list_widget = SupersetListWidget
class ListWidgetWithCheckboxes(ListWidget):
class ListWidgetWithCheckboxes(ListWidget): # pylint: disable=too-few-public-methods
"""An alternative to list view that renders Boolean fields as checkboxes
Works in conjunction with the `checkbox` view."""
@ -249,7 +250,7 @@ class ListWidgetWithCheckboxes(ListWidget):
template = "superset/fab_overrides/list_with_checkboxes.html"
def validate_json(form, field):
def validate_json(_form, field):
try:
json.loads(field.data)
except Exception as e:
@ -257,13 +258,14 @@ def validate_json(form, field):
raise Exception(_("json isn't valid"))
class YamlExportMixin(object):
yaml_dict_key: Optional[str] = None
class YamlExportMixin(object): # pylint: disable=too-few-public-methods
"""
Override this if you want a dict response instead, with a certain key.
Used on DatabaseView for cli compatibility
"""
yaml_dict_key: Optional[str] = None
@action("yaml_export", __("Export to YAML"), __("Export to YAML?"), "fa-download")
def yaml_export(self, items):
if not isinstance(items, list):
@ -279,21 +281,21 @@ class YamlExportMixin(object):
)
class DeleteMixin(object):
def _delete(self, pk):
class DeleteMixin(object): # pylint: disable=too-few-public-methods
def _delete(self, primary_key):
"""
Delete function logic, override to implement diferent logic
deletes the record with primary_key = pk
deletes the record with primary_key = primary_key
:param pk:
:param primary_key:
record primary key to delete
"""
item = self.datamodel.get(pk, self._base_filters)
item = self.datamodel.get(primary_key, self._base_filters)
if not item:
abort(404)
try:
self.pre_delete(item)
except Exception as e:
except Exception as e: # pylint: disable=broad-except
flash(str(e), "danger")
else:
view_menu = security_manager.find_view_menu(item.get_perm())
@ -328,7 +330,7 @@ class DeleteMixin(object):
for item in items:
try:
self.pre_delete(item)
except Exception as e:
except Exception as e: # pylint: disable=broad-except
flash(str(e), "danger")
else:
self._delete(item.id)
@ -336,8 +338,8 @@ class DeleteMixin(object):
return redirect(self.get_redirect())
class DatasourceFilter(BaseFilter):
def apply(self, query, func): # noqa
class DatasourceFilter(BaseFilter): # pylint: disable=too-few-public-methods
def apply(self, query, value):
if security_manager.all_datasource_access():
return query
datasource_perms = security_manager.user_view_menu_names("datasource_access")
@ -350,7 +352,7 @@ class DatasourceFilter(BaseFilter):
)
class CsvResponse(Response):
class CsvResponse(Response): # pylint: disable=too-many-ancestors
"""
Override Response to take into account csv encoding from config.py
"""
@ -381,8 +383,8 @@ def check_ownership(obj, raise_if_false=True):
roles = [r.name for r in get_user_roles()]
if "Admin" in roles:
return True
session = db.create_scoped_session()
orig_obj = session.query(obj.__class__).filter_by(id=obj.id).first()
scoped_session = db.create_scoped_session()
orig_obj = scoped_session.query(obj.__class__).filter_by(id=obj.id).first()
# Making a list of owners that works across ORM models
owners = []
@ -404,7 +406,7 @@ def check_ownership(obj, raise_if_false=True):
def bind_field(
self, form: DynamicForm, unbound_field: UnboundField, options: Dict[Any, Any]
_, form: DynamicForm, unbound_field: UnboundField, options: Dict[Any, Any]
) -> Field:
"""
Customize how fields are bound by stripping all whitespace.

View File

@ -14,7 +14,6 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=C,R,W
from flask_appbuilder.models.sqla.interface import SQLAInterface
from flask_babel import gettext as __
@ -25,7 +24,7 @@ from superset.views.base import SupersetModelView
from . import LogMixin
class LogModelView(LogMixin, SupersetModelView):
class LogModelView(LogMixin, SupersetModelView): # pylint: disable=too-many-ancestors
datamodel = SQLAInterface(models.Log)

View File

@ -14,7 +14,6 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=C,R,W
import enum
from typing import Optional, Type
@ -42,7 +41,9 @@ from superset.views.core import json_success
from .base import DeleteMixin, SupersetModelView
class EmailScheduleView(SupersetModelView, DeleteMixin):
class EmailScheduleView(
SupersetModelView, DeleteMixin
): # pylint: disable=too-many-ancestors
_extra_data = {"test_email": False, "test_email_recipients": None}
schedule_type: Optional[Type] = None
schedule_type_model: Optional[Type] = None
@ -91,35 +92,35 @@ class EmailScheduleView(SupersetModelView, DeleteMixin):
self._extra_data["test_email"] = form.test_email.data
self._extra_data["test_email_recipients"] = test_email_recipients
def pre_add(self, obj):
def pre_add(self, item):
try:
recipients = get_email_address_list(obj.recipients)
obj.recipients = ", ".join(recipients)
recipients = get_email_address_list(item.recipients)
item.recipients = ", ".join(recipients)
except Exception:
raise SupersetException("Invalid email list")
obj.user = obj.user or g.user
if not croniter.is_valid(obj.crontab):
item.user = item.user or g.user
if not croniter.is_valid(item.crontab):
raise SupersetException("Invalid crontab format")
def pre_update(self, obj):
self.pre_add(obj)
def pre_update(self, item):
self.pre_add(item)
def post_add(self, obj):
def post_add(self, item):
# Schedule a test mail if the user requested for it.
if self._extra_data["test_email"]:
recipients = self._extra_data["test_email_recipients"] or obj.recipients
args = (self.schedule_type, obj.id)
recipients = self._extra_data["test_email_recipients"] or item.recipients
args = (self.schedule_type, item.id)
kwargs = dict(recipients=recipients)
schedule_email_report.apply_async(args=args, kwargs=kwargs)
# Notify the user that schedule changes will be activate only in the
# next hour
if obj.active:
if item.active:
flash("Schedule changes will get applied in one hour", "warning")
def post_update(self, obj):
self.post_add(obj)
def post_update(self, item):
self.post_add(item)
@has_access
@expose("/fetch/<int:item_id>/", methods=["GET"])
@ -149,7 +150,9 @@ class EmailScheduleView(SupersetModelView, DeleteMixin):
return json_success(json.dumps(schedules, default=json_iso_dttm_ser))
class DashboardEmailScheduleView(EmailScheduleView):
class DashboardEmailScheduleView(
EmailScheduleView
): # pylint: disable=too-many-ancestors
schedule_type = ScheduleType.dashboard.value
schedule_type_model = Dashboard
@ -202,13 +205,13 @@ class DashboardEmailScheduleView(EmailScheduleView):
"delivery_type": _("Delivery Type"),
}
def pre_add(self, obj):
if obj.dashboard is None:
def pre_add(self, item):
if item.dashboard is None:
raise SupersetException("Dashboard is mandatory")
super(DashboardEmailScheduleView, self).pre_add(obj)
super(DashboardEmailScheduleView, self).pre_add(item)
class SliceEmailScheduleView(EmailScheduleView):
class SliceEmailScheduleView(EmailScheduleView): # pylint: disable=too-many-ancestors
schedule_type = ScheduleType.slice.value
schedule_type_model = Slice
add_title = _("Schedule Email Reports for Charts")
@ -263,10 +266,10 @@ class SliceEmailScheduleView(EmailScheduleView):
"email_format": _("Email Format"),
}
def pre_add(self, obj):
if obj.slice is None:
def pre_add(self, item):
if item.slice is None:
raise SupersetException("Slice is mandatory")
super(SliceEmailScheduleView, self).pre_add(obj)
super(SliceEmailScheduleView, self).pre_add(item)
def _register_schedule_menus():