Re-enable pylint for common, connector, and test files (#8774)
* re-enable pylint for superset/common/query_context.py * Re-enable pylint for superset/connectors/base/views.py * re-enable pylint for superset/connectors/connector_registry.py * Re-enable pylint for tests/fixtures/pyodbcRow.py * re-enable pylint for superset/cli.py * Fix for mypy * Improve variable naming * A few more naming fixes
This commit is contained in:
parent
9f16d053e1
commit
7f8db7ab8d
|
|
@ -15,7 +15,6 @@
|
|||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
# pylint: disable=C,R,W
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from subprocess import Popen
|
||||
|
|
@ -41,7 +40,7 @@ def superset():
|
|||
"""This is a management script for the Superset application."""
|
||||
|
||||
@app.shell_context_processor
|
||||
def make_shell_context():
|
||||
def make_shell_context(): # pylint: disable=unused-variable
|
||||
return dict(app=app, db=db)
|
||||
|
||||
|
||||
|
|
@ -179,7 +178,7 @@ def refresh_druid(datasource, merge):
|
|||
for cluster in session.query(DruidCluster).all():
|
||||
try:
|
||||
cluster.refresh_datasources(datasource_name=datasource, merge_flag=merge)
|
||||
except Exception as e:
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
print("Error while processing cluster '{}'\n{}".format(cluster, str(e)))
|
||||
logging.exception(e)
|
||||
cluster.metadata_last_refreshed = datetime.now()
|
||||
|
|
@ -212,23 +211,23 @@ def import_dashboards(path, recursive, username):
|
|||
"""Import dashboards from JSON"""
|
||||
from superset.utils import dashboard_import_export
|
||||
|
||||
p = Path(path)
|
||||
path_object = Path(path)
|
||||
files = []
|
||||
if p.is_file():
|
||||
files.append(p)
|
||||
elif p.exists() and not recursive:
|
||||
files.extend(p.glob("*.json"))
|
||||
elif p.exists() and recursive:
|
||||
files.extend(p.rglob("*.json"))
|
||||
if path_object.is_file():
|
||||
files.append(path_object)
|
||||
elif path_object.exists() and not recursive:
|
||||
files.extend(path_object.glob("*.json"))
|
||||
elif path_object.exists() and recursive:
|
||||
files.extend(path_object.rglob("*.json"))
|
||||
if username is not None:
|
||||
g.user = security_manager.find_user(username=username)
|
||||
for f in files:
|
||||
logging.info("Importing dashboard from file %s", f)
|
||||
for file_ in files:
|
||||
logging.info("Importing dashboard from file %s", file_)
|
||||
try:
|
||||
with f.open() as data_stream:
|
||||
with file_.open() as data_stream:
|
||||
dashboard_import_export.import_dashboards(db.session, data_stream)
|
||||
except Exception as e:
|
||||
logging.error("Error when importing dashboard from file %s", f)
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
logging.error("Error when importing dashboard from file %s", file_)
|
||||
logging.error(e)
|
||||
|
||||
|
||||
|
|
@ -282,25 +281,25 @@ def import_datasources(path, sync, recursive):
|
|||
from superset.utils import dict_import_export
|
||||
|
||||
sync_array = sync.split(",")
|
||||
p = Path(path)
|
||||
path_object = Path(path)
|
||||
files = []
|
||||
if p.is_file():
|
||||
files.append(p)
|
||||
elif p.exists() and not recursive:
|
||||
files.extend(p.glob("*.yaml"))
|
||||
files.extend(p.glob("*.yml"))
|
||||
elif p.exists() and recursive:
|
||||
files.extend(p.rglob("*.yaml"))
|
||||
files.extend(p.rglob("*.yml"))
|
||||
for f in files:
|
||||
logging.info("Importing datasources from file %s", f)
|
||||
if path_object.is_file():
|
||||
files.append(path_object)
|
||||
elif path_object.exists() and not recursive:
|
||||
files.extend(path_object.glob("*.yaml"))
|
||||
files.extend(path_object.glob("*.yml"))
|
||||
elif path_object.exists() and recursive:
|
||||
files.extend(path_object.rglob("*.yaml"))
|
||||
files.extend(path_object.rglob("*.yml"))
|
||||
for file_ in files:
|
||||
logging.info("Importing datasources from file %s", file_)
|
||||
try:
|
||||
with f.open() as data_stream:
|
||||
with file_.open() as data_stream:
|
||||
dict_import_export.import_from_dict(
|
||||
db.session, yaml.safe_load(data_stream), sync=sync_array
|
||||
)
|
||||
except Exception as e:
|
||||
logging.error("Error when importing datasources from file %s", f)
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
logging.error("Error when importing datasources from file %s", file_)
|
||||
logging.error(e)
|
||||
|
||||
|
||||
|
|
@ -379,7 +378,7 @@ def update_datasources_cache():
|
|||
database.get_all_view_names_in_database(
|
||||
force=True, cache=True, cache_timeout=24 * 60 * 60
|
||||
)
|
||||
except Exception as e:
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
print("{}".format(str(e)))
|
||||
|
||||
|
||||
|
|
@ -401,8 +400,8 @@ def worker(workers):
|
|||
CELERYD_CONCURRENCY=app.config["SUPERSET_CELERY_WORKERS"]
|
||||
)
|
||||
|
||||
worker = celery_app.Worker(optimization="fair")
|
||||
worker.start()
|
||||
local_worker = celery_app.Worker(optimization="fair")
|
||||
local_worker.start()
|
||||
|
||||
|
||||
@superset.command()
|
||||
|
|
@ -418,10 +417,10 @@ def flower(port, address):
|
|||
|
||||
Celery Flower is a UI to monitor the Celery operation on a given
|
||||
broker"""
|
||||
BROKER_URL = celery_app.conf.BROKER_URL
|
||||
broker_url = celery_app.conf.BROKER_URL
|
||||
cmd = (
|
||||
"celery flower "
|
||||
f"--broker={BROKER_URL} "
|
||||
f"--broker={broker_url} "
|
||||
f"--port={port} "
|
||||
f"--address={address} "
|
||||
)
|
||||
|
|
|
|||
|
|
@ -14,7 +14,6 @@
|
|||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
# pylint: disable=C,R,W
|
||||
import logging
|
||||
import pickle as pkl
|
||||
from datetime import datetime, timedelta
|
||||
|
|
@ -116,14 +115,18 @@ class QueryContext:
|
|||
"df": df,
|
||||
}
|
||||
|
||||
def df_metrics_to_num(self, df: pd.DataFrame, query_object: QueryObject) -> None:
|
||||
def df_metrics_to_num( # pylint: disable=invalid-name,no-self-use
|
||||
self, df: pd.DataFrame, query_object: QueryObject
|
||||
) -> None:
|
||||
"""Converting metrics to numeric when pandas.read_sql cannot"""
|
||||
metrics = [metric for metric in query_object.metrics]
|
||||
for col, dtype in df.dtypes.items():
|
||||
if dtype.type == np.object_ and col in metrics:
|
||||
df[col] = pd.to_numeric(df[col], errors="coerce")
|
||||
|
||||
def get_data(self, df: pd.DataFrame) -> List[Dict]:
|
||||
def get_data( # pylint: disable=invalid-name,no-self-use
|
||||
self, df: pd.DataFrame
|
||||
) -> List[Dict]:
|
||||
return df.to_dict(orient="records")
|
||||
|
||||
def get_single_payload(self, query_obj: QueryObject) -> Dict[str, Any]:
|
||||
|
|
@ -157,7 +160,9 @@ class QueryContext:
|
|||
return self.datasource.database.cache_timeout
|
||||
return config["CACHE_DEFAULT_TIMEOUT"]
|
||||
|
||||
def get_df_payload(self, query_obj: QueryObject, **kwargs) -> Dict[str, Any]:
|
||||
def get_df_payload( # pylint: disable=too-many-locals,too-many-statements
|
||||
self, query_obj: QueryObject, **kwargs
|
||||
) -> Dict[str, Any]:
|
||||
"""Handles caching around the df payload retrieval"""
|
||||
extra_cache_keys = self.datasource.get_extra_cache_keys(query_obj.to_dict())
|
||||
cache_key = (
|
||||
|
|
@ -169,7 +174,7 @@ class QueryContext:
|
|||
if query_obj
|
||||
else None
|
||||
)
|
||||
logging.info("Cache key: {}".format(cache_key))
|
||||
logging.info("Cache key: %s", cache_key)
|
||||
is_loaded = False
|
||||
stacktrace = None
|
||||
df = None
|
||||
|
|
@ -188,10 +193,10 @@ class QueryContext:
|
|||
query = cache_value["query"]
|
||||
status = utils.QueryStatus.SUCCESS
|
||||
is_loaded = True
|
||||
except Exception as e:
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
logging.exception(e)
|
||||
logging.error(
|
||||
"Error reading cache: " + utils.error_msg_from_exception(e)
|
||||
"Error reading cache: %s", utils.error_msg_from_exception(e)
|
||||
)
|
||||
logging.info("Serving from cache")
|
||||
|
||||
|
|
@ -205,7 +210,7 @@ class QueryContext:
|
|||
if status != utils.QueryStatus.FAILED:
|
||||
stats_logger.incr("loaded_from_source")
|
||||
is_loaded = True
|
||||
except Exception as e:
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
logging.exception(e)
|
||||
if not error_message:
|
||||
error_message = "{}".format(e)
|
||||
|
|
@ -218,17 +223,15 @@ class QueryContext:
|
|||
cache_binary = pkl.dumps(cache_value, protocol=pkl.HIGHEST_PROTOCOL)
|
||||
|
||||
logging.info(
|
||||
"Caching {} chars at key {}".format(
|
||||
len(cache_binary), cache_key
|
||||
)
|
||||
"Caching %d chars at key %s", len(cache_binary), cache_key
|
||||
)
|
||||
|
||||
stats_logger.incr("set_cache_key")
|
||||
cache.set(cache_key, cache_binary, timeout=self.cache_timeout)
|
||||
except Exception as e:
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
# cache.set call can fail if the backend is down or if
|
||||
# the key is too large or whatever other reasons
|
||||
logging.warning("Could not cache key {}".format(cache_key))
|
||||
logging.warning("Could not cache key %s", cache_key)
|
||||
logging.exception(e)
|
||||
cache.delete(cache_key)
|
||||
return {
|
||||
|
|
|
|||
|
|
@ -14,7 +14,6 @@
|
|||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
# pylint: disable=C,R,W
|
||||
from flask import Markup
|
||||
|
||||
from superset.exceptions import SupersetException
|
||||
|
|
@ -22,12 +21,12 @@ from superset.views.base import SupersetModelView
|
|||
|
||||
|
||||
class DatasourceModelView(SupersetModelView):
|
||||
def pre_delete(self, obj):
|
||||
if obj.slices:
|
||||
def pre_delete(self, item):
|
||||
if item.slices:
|
||||
raise SupersetException(
|
||||
Markup(
|
||||
"Cannot delete a datasource that has slices attached to it."
|
||||
"Here's the list of associated charts: "
|
||||
+ "".join([o.slice_name for o in obj.slices])
|
||||
+ "".join([i.slice_name for i in item.slices])
|
||||
)
|
||||
)
|
||||
|
|
|
|||
|
|
@ -14,7 +14,6 @@
|
|||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
# pylint: disable=C,R,W
|
||||
from collections import OrderedDict
|
||||
from typing import Dict, List, Optional, Set, Type, TYPE_CHECKING
|
||||
|
||||
|
|
@ -22,6 +21,7 @@ from sqlalchemy import or_
|
|||
from sqlalchemy.orm import Session, subqueryload
|
||||
|
||||
if TYPE_CHECKING:
|
||||
# pylint: disable=unused-import
|
||||
from superset.models.core import Database
|
||||
from superset.connectors.base.models import BaseDatasource
|
||||
|
||||
|
|
@ -61,7 +61,7 @@ class ConnectorRegistry(object):
|
|||
return datasources
|
||||
|
||||
@classmethod
|
||||
def get_datasource_by_name(
|
||||
def get_datasource_by_name( # pylint: disable=too-many-arguments
|
||||
cls,
|
||||
session: Session,
|
||||
datasource_type: str,
|
||||
|
|
@ -75,7 +75,7 @@ class ConnectorRegistry(object):
|
|||
)
|
||||
|
||||
@classmethod
|
||||
def query_datasources_by_permissions(
|
||||
def query_datasources_by_permissions( # pylint: disable=invalid-name
|
||||
cls,
|
||||
session: Session,
|
||||
database: "Database",
|
||||
|
|
|
|||
|
|
@ -14,14 +14,14 @@
|
|||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
# pylint: disable=C,R,W
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
|
||||
class Row(object):
|
||||
def __init__(self, values):
|
||||
self.values = values
|
||||
|
||||
def __name__(self):
|
||||
def __name__(self): # pylint: disable=no-self-use
|
||||
return "Row"
|
||||
|
||||
def __iter__(self):
|
||||
|
|
|
|||
Loading…
Reference in New Issue