Improve code quality (#3480)
* add lanscape? * add code climate badges * pylint first pass * Try out yapf * merge * merge * lint * more yapf * removed unnecessary corrections
This commit is contained in:
parent
c3c9ceb1cc
commit
ede1432936
|
|
@ -20,14 +20,12 @@ from werkzeug.contrib.fixers import ProxyFix
|
|||
from superset.connectors.connector_registry import ConnectorRegistry
|
||||
from superset import utils, config # noqa
|
||||
|
||||
|
||||
APP_DIR = os.path.dirname(__file__)
|
||||
CONFIG_MODULE = os.environ.get('SUPERSET_CONFIG', 'superset.config')
|
||||
|
||||
with open(APP_DIR + '/static/assets/backendSync.json', 'r') as f:
|
||||
frontend_config = json.load(f)
|
||||
|
||||
|
||||
app = Flask(__name__)
|
||||
app.config.from_object(CONFIG_MODULE)
|
||||
conf = app.config
|
||||
|
|
@ -53,14 +51,16 @@ def get_manifest_file(filename):
|
|||
parse_manifest_json()
|
||||
return '/static/assets/dist/' + manifest.get(filename, '')
|
||||
|
||||
|
||||
parse_manifest_json()
|
||||
|
||||
|
||||
@app.context_processor
|
||||
def get_js_manifest():
|
||||
return dict(js_manifest=get_manifest_file)
|
||||
|
||||
#################################################################
|
||||
|
||||
#################################################################
|
||||
|
||||
for bp in conf.get('BLUEPRINTS'):
|
||||
try:
|
||||
|
|
@ -100,10 +100,11 @@ logging.getLogger().setLevel(app.config.get('LOG_LEVEL'))
|
|||
|
||||
if app.config.get('ENABLE_TIME_ROTATE'):
|
||||
logging.getLogger().setLevel(app.config.get('TIME_ROTATE_LOG_LEVEL'))
|
||||
handler = TimedRotatingFileHandler(app.config.get('FILENAME'),
|
||||
when=app.config.get('ROLLOVER'),
|
||||
interval=app.config.get('INTERVAL'),
|
||||
backupCount=app.config.get('BACKUP_COUNT'))
|
||||
handler = TimedRotatingFileHandler(
|
||||
app.config.get('FILENAME'),
|
||||
when=app.config.get('ROLLOVER'),
|
||||
interval=app.config.get('INTERVAL'),
|
||||
backupCount=app.config.get('BACKUP_COUNT'))
|
||||
logging.getLogger().addHandler(handler)
|
||||
|
||||
if app.config.get('ENABLE_CORS'):
|
||||
|
|
@ -114,17 +115,18 @@ if app.config.get('ENABLE_PROXY_FIX'):
|
|||
app.wsgi_app = ProxyFix(app.wsgi_app)
|
||||
|
||||
if app.config.get('ENABLE_CHUNK_ENCODING'):
|
||||
class ChunkedEncodingFix(object):
|
||||
|
||||
class ChunkedEncodingFix(object):
|
||||
def __init__(self, app):
|
||||
self.app = app
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
# Setting wsgi.input_terminated tells werkzeug.wsgi to ignore
|
||||
# content-length and read the stream till the end.
|
||||
if 'chunked' == environ.get('HTTP_TRANSFER_ENCODING', '').lower():
|
||||
if environ.get('HTTP_TRANSFER_ENCODING', '').lower() == u'chunked':
|
||||
environ['wsgi.input_terminated'] = True
|
||||
return self.app(environ, start_response)
|
||||
|
||||
app.wsgi_app = ChunkedEncodingFix(app.wsgi_app)
|
||||
|
||||
if app.config.get('UPLOAD_FOLDER'):
|
||||
|
|
@ -142,8 +144,10 @@ class MyIndexView(IndexView):
|
|||
def index(self):
|
||||
return redirect('/superset/welcome')
|
||||
|
||||
|
||||
appbuilder = AppBuilder(
|
||||
app, db.session,
|
||||
app,
|
||||
db.session,
|
||||
base_template='superset/base.html',
|
||||
indexview=MyIndexView,
|
||||
security_manager_class=app.config.get("CUSTOM_SECURITY_MANAGER"))
|
||||
|
|
|
|||
|
|
@ -439,7 +439,7 @@ class PrestoEngineSpec(BaseEngineSpec):
|
|||
result_set_df = db.get_df(
|
||||
"""SELECT table_schema, table_name FROM INFORMATION_SCHEMA.{}S
|
||||
ORDER BY concat(table_schema, '.', table_name)""".format(
|
||||
datasource_type.upper()), None)
|
||||
datasource_type.upper()), None)
|
||||
result_sets = defaultdict(list)
|
||||
for unused, row in result_set_df.iterrows():
|
||||
result_sets[row['table_schema']].append(row['table_name'])
|
||||
|
|
@ -1003,8 +1003,7 @@ class BQEngineSpec(BaseEngineSpec):
|
|||
tt = target_type.upper()
|
||||
if tt == 'DATE':
|
||||
return "'{}'".format(dttm.strftime('%Y-%m-%d'))
|
||||
else:
|
||||
return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
|
||||
return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
|
||||
|
||||
|
||||
class ImpalaEngineSpec(BaseEngineSpec):
|
||||
|
|
@ -1028,8 +1027,7 @@ class ImpalaEngineSpec(BaseEngineSpec):
|
|||
tt = target_type.upper()
|
||||
if tt == 'DATE':
|
||||
return "'{}'".format(dttm.strftime('%Y-%m-%d'))
|
||||
else:
|
||||
return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
|
||||
return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
|
||||
|
||||
|
||||
engines = {
|
||||
|
|
|
|||
|
|
@ -4,8 +4,8 @@ from __future__ import division
|
|||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from superset import frontend_config
|
||||
import re
|
||||
from superset import frontend_config
|
||||
|
||||
FORM_DATA_KEY_WHITELIST = list(frontend_config.get('controls').keys()) + ['slice_id']
|
||||
|
||||
|
|
@ -79,5 +79,3 @@ def cast_form_data(form_data):
|
|||
if k not in FORM_DATA_KEY_WHITELIST:
|
||||
del d[k]
|
||||
return d
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -16,8 +16,7 @@ from flask_appbuilder import Model
|
|||
import sqlalchemy as sqla
|
||||
from sqlalchemy import (
|
||||
Column, Integer, String, ForeignKey, Text, Boolean,
|
||||
DateTime, Numeric,
|
||||
)
|
||||
DateTime, Numeric, )
|
||||
from sqlalchemy.orm import backref, relationship
|
||||
|
||||
from superset import sm
|
||||
|
|
@ -28,7 +27,6 @@ install_aliases()
|
|||
|
||||
|
||||
class Query(Model):
|
||||
|
||||
"""ORM model for SQL query"""
|
||||
|
||||
__tablename__ = 'query'
|
||||
|
|
@ -39,8 +37,7 @@ class Query(Model):
|
|||
|
||||
# Store the tmp table into the DB only if the user asks for it.
|
||||
tmp_table_name = Column(String(256))
|
||||
user_id = Column(
|
||||
Integer, ForeignKey('ab_user.id'), nullable=True)
|
||||
user_id = Column(Integer, ForeignKey('ab_user.id'), nullable=True)
|
||||
status = Column(String(16), default=QueryStatus.PENDING)
|
||||
tab_name = Column(String(256))
|
||||
sql_editor_id = Column(String(256))
|
||||
|
|
@ -80,14 +77,11 @@ class Query(Model):
|
|||
database = relationship(
|
||||
'Database',
|
||||
foreign_keys=[database_id],
|
||||
backref=backref('queries', cascade='all, delete-orphan')
|
||||
)
|
||||
user = relationship(
|
||||
sm.user_model,
|
||||
foreign_keys=[user_id])
|
||||
backref=backref('queries', cascade='all, delete-orphan'))
|
||||
user = relationship(sm.user_model, foreign_keys=[user_id])
|
||||
|
||||
__table_args__ = (
|
||||
sqla.Index('ti_user_id_changed_on', user_id, changed_on),
|
||||
sqla.Index('ti_user_id_changed_on', user_id, changed_on),
|
||||
)
|
||||
|
||||
@property
|
||||
|
|
@ -128,25 +122,19 @@ class Query(Model):
|
|||
"""Name property"""
|
||||
ts = datetime.now().isoformat()
|
||||
ts = ts.replace('-', '').replace(':', '').split('.')[0]
|
||||
tab = (
|
||||
self.tab_name.replace(' ', '_').lower()
|
||||
if self.tab_name
|
||||
else 'notab'
|
||||
)
|
||||
tab = (self.tab_name.replace(' ', '_').lower()
|
||||
if self.tab_name else 'notab')
|
||||
tab = re.sub(r'\W+', '', tab)
|
||||
return "sqllab_{tab}_{ts}".format(**locals())
|
||||
|
||||
|
||||
class SavedQuery(Model, AuditMixinNullable):
|
||||
|
||||
"""ORM model for SQL query"""
|
||||
|
||||
__tablename__ = 'saved_query'
|
||||
id = Column(Integer, primary_key=True)
|
||||
user_id = Column(
|
||||
Integer, ForeignKey('ab_user.id'), nullable=True)
|
||||
db_id = Column(
|
||||
Integer, ForeignKey('dbs.id'), nullable=True)
|
||||
user_id = Column(Integer, ForeignKey('ab_user.id'), nullable=True)
|
||||
db_id = Column(Integer, ForeignKey('dbs.id'), nullable=True)
|
||||
schema = Column(String(128))
|
||||
label = Column(String(256))
|
||||
description = Column(Text)
|
||||
|
|
@ -158,8 +146,7 @@ class SavedQuery(Model, AuditMixinNullable):
|
|||
database = relationship(
|
||||
'Database',
|
||||
foreign_keys=[db_id],
|
||||
backref=backref('saved_queries', cascade='all, delete-orphan')
|
||||
)
|
||||
backref=backref('saved_queries', cascade='all, delete-orphan'))
|
||||
|
||||
@property
|
||||
def pop_tab_link(self):
|
||||
|
|
|
|||
|
|
@ -11,7 +11,6 @@ from superset import conf, db, sm
|
|||
from superset.models import core as models
|
||||
from superset.connectors.connector_registry import ConnectorRegistry
|
||||
|
||||
|
||||
READ_ONLY_MODEL_VIEWS = {
|
||||
'DatabaseAsync',
|
||||
'DatabaseView',
|
||||
|
|
@ -104,16 +103,16 @@ def get_or_create_main_db():
|
|||
|
||||
def is_admin_only(pvm):
|
||||
# not readonly operations on read only model views allowed only for admins
|
||||
if (pvm.view_menu.name in READ_ONLY_MODEL_VIEWS and
|
||||
pvm.permission.name not in READ_ONLY_PERMISSION):
|
||||
if (pvm.view_menu.name in READ_ONLY_MODEL_VIEWS
|
||||
and pvm.permission.name not in READ_ONLY_PERMISSION):
|
||||
return True
|
||||
return (pvm.view_menu.name in ADMIN_ONLY_VIEW_MENUS or
|
||||
pvm.permission.name in ADMIN_ONLY_PERMISSIONS)
|
||||
return (pvm.view_menu.name in ADMIN_ONLY_VIEW_MENUS
|
||||
or pvm.permission.name in ADMIN_ONLY_PERMISSIONS)
|
||||
|
||||
|
||||
def is_alpha_only(pvm):
|
||||
if (pvm.view_menu.name in GAMMA_READ_ONLY_MODEL_VIEWS and
|
||||
pvm.permission.name not in READ_ONLY_PERMISSION):
|
||||
if (pvm.view_menu.name in GAMMA_READ_ONLY_MODEL_VIEWS
|
||||
and pvm.permission.name not in READ_ONLY_PERMISSION):
|
||||
return True
|
||||
return pvm.permission.name in ALPHA_ONLY_PERMISSIONS
|
||||
|
||||
|
|
@ -133,12 +132,14 @@ def is_gamma_pvm(pvm):
|
|||
|
||||
def is_sql_lab_pvm(pvm):
|
||||
return pvm.view_menu.name in {'SQL Lab'} or pvm.permission.name in {
|
||||
'can_sql_json', 'can_csv', 'can_search_queries'}
|
||||
'can_sql_json', 'can_csv', 'can_search_queries'
|
||||
}
|
||||
|
||||
|
||||
def is_granter_pvm(pvm):
|
||||
return pvm.permission.name in {'can_override_role_permissions',
|
||||
'can_approve'}
|
||||
return pvm.permission.name in {
|
||||
'can_override_role_permissions', 'can_approve'
|
||||
}
|
||||
|
||||
|
||||
def set_role(role_name, pvm_check):
|
||||
|
|
@ -191,7 +192,7 @@ def create_missing_perms():
|
|||
metrics += list(db.session.query(datasource_class.metric_class).all())
|
||||
|
||||
for metric in metrics:
|
||||
if (metric.is_restricted):
|
||||
if metric.is_restricted:
|
||||
merge_pv('metric_access', metric.perm)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -4,16 +4,15 @@ from time import sleep
|
|||
from datetime import datetime
|
||||
import json
|
||||
import logging
|
||||
import uuid
|
||||
import pandas as pd
|
||||
import sqlalchemy
|
||||
import uuid
|
||||
|
||||
from celery.exceptions import SoftTimeLimitExceeded
|
||||
from sqlalchemy.pool import NullPool
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from celery.exceptions import SoftTimeLimitExceeded
|
||||
|
||||
from superset import (
|
||||
app, db, utils, dataframe, results_backend)
|
||||
from superset import (app, db, utils, dataframe, results_backend)
|
||||
from superset.models.sql_lab import Query
|
||||
from superset.sql_parse import SupersetQuery
|
||||
from superset.db_engine_specs import LimitMethod
|
||||
|
|
@ -78,10 +77,9 @@ def get_session(nullpool):
|
|||
session_class = sessionmaker()
|
||||
session_class.configure(bind=engine)
|
||||
return session_class()
|
||||
else:
|
||||
session = db.session()
|
||||
session.commit() # HACK
|
||||
return session
|
||||
session = db.session()
|
||||
session.commit() # HACK
|
||||
return session
|
||||
|
||||
|
||||
@celery_app.task(bind=True, soft_time_limit=SQLLAB_TIMEOUT)
|
||||
|
|
@ -103,7 +101,8 @@ def get_sql_results(
|
|||
raise
|
||||
|
||||
|
||||
def execute_sql(ctask, query_id, return_results=True, store_results=False, user_name=None):
|
||||
def execute_sql(
|
||||
ctask, query_id, return_results=True, store_results=False, user_name=None):
|
||||
"""Executes the sql query returns the results."""
|
||||
session = get_session(not ctask.request.called_directly)
|
||||
|
||||
|
|
@ -144,13 +143,11 @@ def execute_sql(ctask, query_id, return_results=True, store_results=False, user_
|
|||
if not query.tmp_table_name:
|
||||
start_dttm = datetime.fromtimestamp(query.start_time)
|
||||
query.tmp_table_name = 'tmp_{}_table_{}'.format(
|
||||
query.user_id,
|
||||
start_dttm.strftime('%Y_%m_%d_%H_%M_%S'))
|
||||
query.user_id, start_dttm.strftime('%Y_%m_%d_%H_%M_%S'))
|
||||
executed_sql = superset_query.as_create_table(query.tmp_table_name)
|
||||
query.select_as_cta_used = True
|
||||
elif (
|
||||
query.limit and superset_query.is_select() and
|
||||
db_engine_spec.limit_method == LimitMethod.WRAP_SQL):
|
||||
elif (query.limit and superset_query.is_select()
|
||||
and db_engine_spec.limit_method == LimitMethod.WRAP_SQL):
|
||||
executed_sql = database.wrap_sql_limit(executed_sql, query.limit)
|
||||
query.limit_used = True
|
||||
try:
|
||||
|
|
@ -170,7 +167,7 @@ def execute_sql(ctask, query_id, return_results=True, store_results=False, user_
|
|||
logging.info("Set query to 'running'")
|
||||
|
||||
engine = database.get_sqla_engine(
|
||||
schema=query.schema, nullpool=not ctask.request.called_directly, user_name=user_name)
|
||||
schema=query.schema, nullpool=not ctask.request.called_directly, user_name=user_name)
|
||||
try:
|
||||
engine = database.get_sqla_engine(
|
||||
schema=query.schema, nullpool=not ctask.request.called_directly, user_name=user_name)
|
||||
|
|
@ -178,8 +175,8 @@ def execute_sql(ctask, query_id, return_results=True, store_results=False, user_
|
|||
cursor = conn.cursor()
|
||||
logging.info("Running query: \n{}".format(executed_sql))
|
||||
logging.info(query.executed_sql)
|
||||
cursor.execute(
|
||||
query.executed_sql, **db_engine_spec.cursor_execute_kwargs)
|
||||
cursor.execute(query.executed_sql,
|
||||
**db_engine_spec.cursor_execute_kwargs)
|
||||
logging.info("Handling cursor")
|
||||
db_engine_spec.handle_cursor(cursor, query, session)
|
||||
logging.info("Fetching data: {}".format(query.to_dict()))
|
||||
|
|
@ -202,29 +199,31 @@ def execute_sql(ctask, query_id, return_results=True, store_results=False, user_
|
|||
conn.close()
|
||||
|
||||
if query.status == utils.QueryStatus.STOPPED:
|
||||
return json.dumps({
|
||||
'query_id': query.id,
|
||||
'status': query.status,
|
||||
'query': query.to_dict(),
|
||||
}, default=utils.json_iso_dttm_ser)
|
||||
return json.dumps(
|
||||
{
|
||||
'query_id': query.id,
|
||||
'status': query.status,
|
||||
'query': query.to_dict(),
|
||||
},
|
||||
default=utils.json_iso_dttm_ser)
|
||||
|
||||
column_names = (
|
||||
[col[0] for col in cursor_description] if cursor_description else [])
|
||||
column_names = dedup(column_names)
|
||||
cdf = dataframe.SupersetDataFrame(pd.DataFrame(
|
||||
list(data), columns=column_names))
|
||||
cdf = dataframe.SupersetDataFrame(
|
||||
pd.DataFrame(list(data), columns=column_names))
|
||||
|
||||
query.rows = cdf.size
|
||||
query.progress = 100
|
||||
query.status = QueryStatus.SUCCESS
|
||||
if query.select_as_cta:
|
||||
query.select_sql = '{}'.format(database.select_star(
|
||||
query.tmp_table_name,
|
||||
limit=query.limit,
|
||||
schema=database.force_ctas_schema,
|
||||
show_cols=False,
|
||||
latest_partition=False,
|
||||
))
|
||||
query.select_sql = '{}'.format(
|
||||
database.select_star(
|
||||
query.tmp_table_name,
|
||||
limit=query.limit,
|
||||
schema=database.force_ctas_schema,
|
||||
show_cols=False,
|
||||
latest_partition=False, ))
|
||||
query.end_time = utils.now_as_float()
|
||||
session.merge(query)
|
||||
session.flush()
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from colorama import Fore, Style
|
||||
import logging
|
||||
from colorama import Fore, Style
|
||||
|
||||
|
||||
class BaseStatsLogger(object):
|
||||
|
|
@ -27,20 +27,18 @@ class BaseStatsLogger(object):
|
|||
|
||||
|
||||
class DummyStatsLogger(BaseStatsLogger):
|
||||
|
||||
def incr(self, key):
|
||||
logging.info(
|
||||
Fore.CYAN + "[stats_logger] (incr) " + key + Style.RESET_ALL)
|
||||
|
||||
def decr(self, key):
|
||||
logging.info(
|
||||
Fore.CYAN + "[stats_logger] (decr) " + key + Style.RESET_ALL)
|
||||
logging.info(Fore.CYAN + "[stats_logger] (decr) " + key +
|
||||
Style.RESET_ALL)
|
||||
|
||||
def gauge(self, key, value):
|
||||
logging.info((
|
||||
Fore.CYAN + "[stats_logger] (gauge) "
|
||||
"{key} | {value}" + Style.RESET_ALL).format(**locals())
|
||||
)
|
||||
"{key} | {value}" + Style.RESET_ALL).format(**locals()))
|
||||
|
||||
|
||||
try:
|
||||
|
|
@ -48,10 +46,8 @@ try:
|
|||
|
||||
class StatsdStatsLogger(BaseStatsLogger):
|
||||
def __init__(self, host, port, prefix='superset'):
|
||||
self.client = StatsClient(
|
||||
host=host,
|
||||
port=port,
|
||||
prefix=prefix)
|
||||
super()
|
||||
self.client = StatsClient(host=host, port=port, prefix=prefix)
|
||||
|
||||
def incr(self, key):
|
||||
self.client.incr(key)
|
||||
|
|
|
|||
|
|
@ -8,16 +8,16 @@ import decimal
|
|||
import functools
|
||||
import json
|
||||
import logging
|
||||
import numpy
|
||||
import os
|
||||
import parsedatetime
|
||||
import pytz
|
||||
import smtplib
|
||||
import sqlalchemy as sa
|
||||
import signal
|
||||
import parsedatetime
|
||||
import smtplib
|
||||
import pytz
|
||||
import sqlalchemy as sa
|
||||
import uuid
|
||||
import sys
|
||||
import zlib
|
||||
import numpy
|
||||
|
||||
from builtins import object
|
||||
from datetime import date, datetime, time, timedelta
|
||||
|
|
@ -28,15 +28,16 @@ from email.mime.text import MIMEText
|
|||
from email.mime.multipart import MIMEMultipart
|
||||
from email.mime.application import MIMEApplication
|
||||
from email.utils import formatdate
|
||||
|
||||
from flask import flash, Markup, render_template, url_for, redirect, request
|
||||
from flask_appbuilder.const import (
|
||||
LOGMSG_ERR_SEC_ACCESS_DENIED,
|
||||
FLAMSG_ERR_SEC_ACCESS_DENIED,
|
||||
PERMISSION_PREFIX
|
||||
)
|
||||
from flask_cache import Cache
|
||||
from flask_appbuilder._compat import as_unicode
|
||||
from flask_babel import gettext as __
|
||||
from flask_cache import Cache
|
||||
import markdown as md
|
||||
from past.builtins import basestring
|
||||
from pydruid.utils.having import Having
|
||||
|
|
@ -78,8 +79,7 @@ def can_access(sm, permission_name, view_name, user):
|
|||
"""Protecting from has_access failing from missing perms/view"""
|
||||
if user.is_anonymous():
|
||||
return sm.is_item_public(permission_name, view_name)
|
||||
else:
|
||||
return sm._has_view_access(user, permission_name, view_name)
|
||||
return sm._has_view_access(user, permission_name, view_name)
|
||||
|
||||
|
||||
def flasher(msg, severity=None):
|
||||
|
|
@ -94,7 +94,6 @@ def flasher(msg, severity=None):
|
|||
|
||||
|
||||
class memoized(object): # noqa
|
||||
|
||||
"""Decorator that caches a function's return value each time it is called
|
||||
|
||||
If called later with the same arguments, the cached value is returned, and
|
||||
|
|
@ -161,11 +160,13 @@ class DimSelector(Having):
|
|||
# Just a hack to prevent any exceptions
|
||||
Having.__init__(self, type='equalTo', aggregation=None, value=None)
|
||||
|
||||
self.having = {'having': {
|
||||
'type': 'dimSelector',
|
||||
'dimension': args['dimension'],
|
||||
'value': args['value'],
|
||||
}}
|
||||
self.having = {
|
||||
'having': {
|
||||
'type': 'dimSelector',
|
||||
'dimension': args['dimension'],
|
||||
'value': args['value'],
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def list_minus(l, minus):
|
||||
|
|
@ -231,13 +232,11 @@ def parse_human_timedelta(s):
|
|||
cal = parsedatetime.Calendar()
|
||||
dttm = dttm_from_timtuple(datetime.now().timetuple())
|
||||
d = cal.parse(s, dttm)[0]
|
||||
d = datetime(
|
||||
d.tm_year, d.tm_mon, d.tm_mday, d.tm_hour, d.tm_min, d.tm_sec)
|
||||
d = datetime(d.tm_year, d.tm_mon, d.tm_mday, d.tm_hour, d.tm_min, d.tm_sec)
|
||||
return d - dttm
|
||||
|
||||
|
||||
class JSONEncodedDict(TypeDecorator):
|
||||
|
||||
"""Represents an immutable structure as a json-encoded string."""
|
||||
|
||||
impl = TEXT
|
||||
|
|
@ -301,8 +300,7 @@ def json_iso_dttm_ser(obj):
|
|||
obj = obj.isoformat()
|
||||
else:
|
||||
raise TypeError(
|
||||
"Unserializable object {} of type {}".format(obj, type(obj))
|
||||
)
|
||||
"Unserializable object {} of type {}".format(obj, type(obj)))
|
||||
return obj
|
||||
|
||||
|
||||
|
|
@ -328,8 +326,7 @@ def json_int_dttm_ser(obj):
|
|||
obj = (obj - EPOCH.date()).total_seconds() * 1000
|
||||
else:
|
||||
raise TypeError(
|
||||
"Unserializable object {} of type {}".format(obj, type(obj))
|
||||
)
|
||||
"Unserializable object {} of type {}".format(obj, type(obj)))
|
||||
return obj
|
||||
|
||||
|
||||
|
|
@ -353,7 +350,7 @@ def error_msg_from_exception(e):
|
|||
"""
|
||||
msg = ''
|
||||
if hasattr(e, 'message'):
|
||||
if type(e.message) is dict:
|
||||
if isinstance(e.message, dict):
|
||||
msg = e.message.get('message')
|
||||
elif e.message:
|
||||
msg = "{}".format(e.message)
|
||||
|
|
@ -382,9 +379,8 @@ def generic_find_constraint_name(table, columns, referenced, db):
|
|||
t = sa.Table(table, db.metadata, autoload=True, autoload_with=db.engine)
|
||||
|
||||
for fk in t.foreign_key_constraints:
|
||||
if (
|
||||
fk.referred_table.name == referenced and
|
||||
set(fk.column_keys) == columns):
|
||||
if (fk.referred_table.name == referenced
|
||||
and set(fk.column_keys) == columns):
|
||||
return fk.name
|
||||
|
||||
|
||||
|
|
@ -421,6 +417,7 @@ class timeout(object):
|
|||
"""
|
||||
To be used in a ``with`` block and timeout its content.
|
||||
"""
|
||||
|
||||
def __init__(self, seconds=1, error_message='Timeout'):
|
||||
self.seconds = seconds
|
||||
self.error_message = error_message
|
||||
|
|
@ -483,7 +480,6 @@ def pessimistic_connection_handling(some_engine):
|
|||
|
||||
|
||||
class QueryStatus(object):
|
||||
|
||||
"""Enum-type class for query statuses"""
|
||||
|
||||
STOPPED = 'stopped'
|
||||
|
|
@ -539,11 +535,11 @@ def send_email_smtp(to, subject, html_content, config, files=None,
|
|||
for fname in files or []:
|
||||
basename = os.path.basename(fname)
|
||||
with open(fname, "rb") as f:
|
||||
msg.attach(MIMEApplication(
|
||||
f.read(),
|
||||
Content_Disposition='attachment; filename="%s"' % basename,
|
||||
Name=basename
|
||||
))
|
||||
msg.attach(
|
||||
MIMEApplication(
|
||||
f.read(),
|
||||
Content_Disposition='attachment; filename="%s"' % basename,
|
||||
Name=basename))
|
||||
|
||||
send_MIME_email(smtp_mail_from, recipients, msg, config, dryrun=dryrun)
|
||||
|
||||
|
|
@ -600,17 +596,20 @@ def has_access(f):
|
|||
|
||||
def wraps(self, *args, **kwargs):
|
||||
permission_str = PERMISSION_PREFIX + f._permission_name
|
||||
if self.appbuilder.sm.has_access(
|
||||
permission_str, self.__class__.__name__):
|
||||
if self.appbuilder.sm.has_access(permission_str,
|
||||
self.__class__.__name__):
|
||||
return f(self, *args, **kwargs)
|
||||
else:
|
||||
logging.warning(LOGMSG_ERR_SEC_ACCESS_DENIED.format(
|
||||
permission_str, self.__class__.__name__))
|
||||
logging.warning(
|
||||
LOGMSG_ERR_SEC_ACCESS_DENIED.format(permission_str,
|
||||
self.__class__.__name__))
|
||||
flash(as_unicode(FLAMSG_ERR_SEC_ACCESS_DENIED), "danger")
|
||||
# adds next arg to forward to the original path once user is logged in.
|
||||
return redirect(url_for(
|
||||
self.appbuilder.sm.auth_view.__class__.__name__ + ".login",
|
||||
next=request.path))
|
||||
return redirect(
|
||||
url_for(
|
||||
self.appbuilder.sm.auth_view.__class__.__name__ + ".login",
|
||||
next=request.path))
|
||||
|
||||
f._permission_name = permission_str
|
||||
return functools.update_wrapper(wraps, f)
|
||||
|
||||
|
|
@ -656,6 +655,7 @@ def zlib_decompress_to_string(blob):
|
|||
return decompressed.decode("utf-8")
|
||||
return zlib.decompress(blob)
|
||||
|
||||
|
||||
_celery_app = None
|
||||
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue