[flake8] Resolving Q??? errors (#3847)
This commit is contained in:
parent
630604bc6b
commit
ac57780607
14
setup.py
14
setup.py
|
|
@ -16,7 +16,7 @@ def get_git_sha():
|
|||
s = str(subprocess.check_output(['git', 'rev-parse', 'HEAD']))
|
||||
return s.strip()
|
||||
except Exception:
|
||||
return ""
|
||||
return ''
|
||||
|
||||
|
||||
GIT_SHA = get_git_sha()
|
||||
|
|
@ -24,10 +24,10 @@ version_info = {
|
|||
'GIT_SHA': GIT_SHA,
|
||||
'version': version_string,
|
||||
}
|
||||
print("-==-" * 15)
|
||||
print("VERSION: " + version_string)
|
||||
print("GIT SHA: " + GIT_SHA)
|
||||
print("-==-" * 15)
|
||||
print('-==-' * 15)
|
||||
print('VERSION: ' + version_string)
|
||||
print('GIT SHA: ' + GIT_SHA)
|
||||
print('-==-' * 15)
|
||||
|
||||
with open(os.path.join(PACKAGE_DIR, 'version_info.json'), 'w') as version_file:
|
||||
json.dump(version_info, version_file)
|
||||
|
|
@ -36,8 +36,8 @@ with open(os.path.join(PACKAGE_DIR, 'version_info.json'), 'w') as version_file:
|
|||
setup(
|
||||
name='superset',
|
||||
description=(
|
||||
"A interactive data visualization platform build on SqlAlchemy "
|
||||
"and druid.io"),
|
||||
'A interactive data visualization platform build on SqlAlchemy '
|
||||
'and druid.io'),
|
||||
version=version_string,
|
||||
packages=find_packages(),
|
||||
include_package_data=True,
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ def parse_manifest_json():
|
|||
with open(MANIFEST_FILE, 'r') as f:
|
||||
manifest = json.load(f)
|
||||
except Exception:
|
||||
print("no manifest file found at " + MANIFEST_FILE)
|
||||
print('no manifest file found at ' + MANIFEST_FILE)
|
||||
|
||||
|
||||
def get_manifest_file(filename):
|
||||
|
|
@ -66,7 +66,7 @@ for bp in conf.get('BLUEPRINTS'):
|
|||
print("Registering blueprint: '{}'".format(bp.name))
|
||||
app.register_blueprint(bp)
|
||||
except Exception as e:
|
||||
print("blueprint registration failed")
|
||||
print('blueprint registration failed')
|
||||
logging.exception(e)
|
||||
|
||||
if conf.get('SILENCE_FAB'):
|
||||
|
|
@ -91,7 +91,7 @@ utils.pessimistic_connection_handling(db.engine)
|
|||
cache = utils.setup_cache(app, conf.get('CACHE_CONFIG'))
|
||||
tables_cache = utils.setup_cache(app, conf.get('TABLE_NAMES_CACHE_CONFIG'))
|
||||
|
||||
migrate = Migrate(app, db, directory=APP_DIR + "/migrations")
|
||||
migrate = Migrate(app, db, directory=APP_DIR + '/migrations')
|
||||
|
||||
# Logging configuration
|
||||
logging.basicConfig(format=app.config.get('LOG_FORMAT'))
|
||||
|
|
@ -149,15 +149,15 @@ appbuilder = AppBuilder(
|
|||
db.session,
|
||||
base_template='superset/base.html',
|
||||
indexview=MyIndexView,
|
||||
security_manager_class=app.config.get("CUSTOM_SECURITY_MANAGER"))
|
||||
security_manager_class=app.config.get('CUSTOM_SECURITY_MANAGER'))
|
||||
|
||||
sm = appbuilder.sm
|
||||
|
||||
results_backend = app.config.get("RESULTS_BACKEND")
|
||||
results_backend = app.config.get('RESULTS_BACKEND')
|
||||
|
||||
# Registering sources
|
||||
module_datasource_map = app.config.get("DEFAULT_MODULE_DS_MAP")
|
||||
module_datasource_map.update(app.config.get("ADDITIONAL_MODULE_DS_MAP"))
|
||||
module_datasource_map = app.config.get('DEFAULT_MODULE_DS_MAP')
|
||||
module_datasource_map.update(app.config.get('ADDITIONAL_MODULE_DS_MAP'))
|
||||
ConnectorRegistry.register_sources(module_datasource_map)
|
||||
|
||||
from superset import views # noqa
|
||||
|
|
|
|||
106
superset/cli.py
106
superset/cli.py
|
|
@ -29,38 +29,38 @@ def init():
|
|||
|
||||
@manager.option(
|
||||
'-d', '--debug', action='store_true',
|
||||
help="Start the web server in debug mode")
|
||||
help='Start the web server in debug mode')
|
||||
@manager.option(
|
||||
'-n', '--no-reload', action='store_false', dest='no_reload',
|
||||
default=config.get("FLASK_USE_RELOAD"),
|
||||
default=config.get('FLASK_USE_RELOAD'),
|
||||
help="Don't use the reloader in debug mode")
|
||||
@manager.option(
|
||||
'-a', '--address', default=config.get("SUPERSET_WEBSERVER_ADDRESS"),
|
||||
help="Specify the address to which to bind the web server")
|
||||
'-a', '--address', default=config.get('SUPERSET_WEBSERVER_ADDRESS'),
|
||||
help='Specify the address to which to bind the web server')
|
||||
@manager.option(
|
||||
'-p', '--port', default=config.get("SUPERSET_WEBSERVER_PORT"),
|
||||
help="Specify the port on which to run the web server")
|
||||
'-p', '--port', default=config.get('SUPERSET_WEBSERVER_PORT'),
|
||||
help='Specify the port on which to run the web server')
|
||||
@manager.option(
|
||||
'-w', '--workers',
|
||||
default=config.get("SUPERSET_WORKERS", 2),
|
||||
help="Number of gunicorn web server workers to fire up")
|
||||
default=config.get('SUPERSET_WORKERS', 2),
|
||||
help='Number of gunicorn web server workers to fire up')
|
||||
@manager.option(
|
||||
'-t', '--timeout', default=config.get("SUPERSET_WEBSERVER_TIMEOUT"),
|
||||
help="Specify the timeout (seconds) for the gunicorn web server")
|
||||
'-t', '--timeout', default=config.get('SUPERSET_WEBSERVER_TIMEOUT'),
|
||||
help='Specify the timeout (seconds) for the gunicorn web server')
|
||||
@manager.option(
|
||||
'-s', '--socket', default=config.get("SUPERSET_WEBSERVER_SOCKET"),
|
||||
help="Path to a UNIX socket as an alternative to address:port, e.g. "
|
||||
"/var/run/superset.sock. "
|
||||
"Will override the address and port values.")
|
||||
'-s', '--socket', default=config.get('SUPERSET_WEBSERVER_SOCKET'),
|
||||
help='Path to a UNIX socket as an alternative to address:port, e.g. '
|
||||
'/var/run/superset.sock. '
|
||||
'Will override the address and port values.')
|
||||
def runserver(debug, no_reload, address, port, timeout, workers, socket):
|
||||
"""Starts a Superset web server."""
|
||||
debug = debug or config.get("DEBUG")
|
||||
debug = debug or config.get('DEBUG')
|
||||
if debug:
|
||||
print(Fore.BLUE + '-=' * 20)
|
||||
print(
|
||||
Fore.YELLOW + "Starting Superset server in " +
|
||||
Fore.RED + "DEBUG" +
|
||||
Fore.YELLOW + " mode")
|
||||
Fore.YELLOW + 'Starting Superset server in ' +
|
||||
Fore.RED + 'DEBUG' +
|
||||
Fore.YELLOW + ' mode')
|
||||
print(Fore.BLUE + '-=' * 20)
|
||||
print(Style.RESET_ALL)
|
||||
app.run(
|
||||
|
|
@ -70,16 +70,16 @@ def runserver(debug, no_reload, address, port, timeout, workers, socket):
|
|||
debug=True,
|
||||
use_reloader=no_reload)
|
||||
else:
|
||||
addr_str = " unix:{socket} " if socket else" {address}:{port} "
|
||||
addr_str = ' unix:{socket} ' if socket else' {address}:{port} '
|
||||
cmd = (
|
||||
"gunicorn "
|
||||
"-w {workers} "
|
||||
"--timeout {timeout} "
|
||||
"-b " + addr_str +
|
||||
"--limit-request-line 0 "
|
||||
"--limit-request-field_size 0 "
|
||||
"superset:app").format(**locals())
|
||||
print(Fore.GREEN + "Starting server with command: ")
|
||||
'gunicorn '
|
||||
'-w {workers} '
|
||||
'--timeout {timeout} '
|
||||
'-b ' + addr_str +
|
||||
'--limit-request-line 0 '
|
||||
'--limit-request-field_size 0 '
|
||||
'superset:app').format(**locals())
|
||||
print(Fore.GREEN + 'Starting server with command: ')
|
||||
print(Fore.YELLOW + cmd)
|
||||
print(Style.RESET_ALL)
|
||||
Popen(cmd, shell=True).wait()
|
||||
|
|
@ -87,69 +87,69 @@ def runserver(debug, no_reload, address, port, timeout, workers, socket):
|
|||
|
||||
@manager.option(
|
||||
'-v', '--verbose', action='store_true',
|
||||
help="Show extra information")
|
||||
help='Show extra information')
|
||||
def version(verbose):
|
||||
"""Prints the current version number"""
|
||||
print(Fore.BLUE + '-=' * 15)
|
||||
print(Fore.YELLOW + "Superset " + Fore.CYAN + "{version}".format(
|
||||
print(Fore.YELLOW + 'Superset ' + Fore.CYAN + '{version}'.format(
|
||||
version=config.get('VERSION_STRING')))
|
||||
print(Fore.BLUE + '-=' * 15)
|
||||
if verbose:
|
||||
print("[DB] : " + "{}".format(db.engine))
|
||||
print('[DB] : ' + '{}'.format(db.engine))
|
||||
print(Style.RESET_ALL)
|
||||
|
||||
|
||||
@manager.option(
|
||||
'-t', '--load-test-data', action='store_true',
|
||||
help="Load additional test data")
|
||||
help='Load additional test data')
|
||||
def load_examples(load_test_data):
|
||||
"""Loads a set of Slices and Dashboards and a supporting dataset """
|
||||
from superset import data
|
||||
print("Loading examples into {}".format(db))
|
||||
print('Loading examples into {}'.format(db))
|
||||
|
||||
data.load_css_templates()
|
||||
|
||||
print("Loading energy related dataset")
|
||||
print('Loading energy related dataset')
|
||||
data.load_energy()
|
||||
|
||||
print("Loading [World Bank's Health Nutrition and Population Stats]")
|
||||
data.load_world_bank_health_n_pop()
|
||||
|
||||
print("Loading [Birth names]")
|
||||
print('Loading [Birth names]')
|
||||
data.load_birth_names()
|
||||
|
||||
print("Loading [Random time series data]")
|
||||
print('Loading [Random time series data]')
|
||||
data.load_random_time_series_data()
|
||||
|
||||
print("Loading [Random long/lat data]")
|
||||
print('Loading [Random long/lat data]')
|
||||
data.load_long_lat_data()
|
||||
|
||||
print("Loading [Country Map data]")
|
||||
print('Loading [Country Map data]')
|
||||
data.load_country_map_data()
|
||||
|
||||
print("Loading [Multiformat time series]")
|
||||
print('Loading [Multiformat time series]')
|
||||
data.load_multiformat_time_series_data()
|
||||
|
||||
print("Loading [Misc Charts] dashboard")
|
||||
print('Loading [Misc Charts] dashboard')
|
||||
data.load_misc_dashboard()
|
||||
|
||||
if load_test_data:
|
||||
print("Loading [Unicode test data]")
|
||||
print('Loading [Unicode test data]')
|
||||
data.load_unicode_test_data()
|
||||
|
||||
|
||||
@manager.option(
|
||||
'-d', '--datasource',
|
||||
help=(
|
||||
"Specify which datasource name to load, if omitted, all "
|
||||
"datasources will be refreshed"
|
||||
'Specify which datasource name to load, if omitted, all '
|
||||
'datasources will be refreshed'
|
||||
),
|
||||
)
|
||||
@manager.option(
|
||||
'-m', '--merge',
|
||||
help=(
|
||||
"Specify using 'merge' property during operation. "
|
||||
"Default value is False "
|
||||
'Default value is False '
|
||||
),
|
||||
)
|
||||
def refresh_druid(datasource, merge):
|
||||
|
|
@ -167,8 +167,8 @@ def refresh_druid(datasource, merge):
|
|||
logging.exception(e)
|
||||
cluster.metadata_last_refreshed = datetime.now()
|
||||
print(
|
||||
"Refreshed metadata from cluster "
|
||||
"[" + cluster.cluster_name + "]")
|
||||
'Refreshed metadata from cluster '
|
||||
'[' + cluster.cluster_name + ']')
|
||||
session.commit()
|
||||
|
||||
|
||||
|
|
@ -188,14 +188,14 @@ def update_datasources_cache():
|
|||
@manager.option(
|
||||
'-w', '--workers',
|
||||
type=int,
|
||||
help="Number of celery server workers to fire up")
|
||||
help='Number of celery server workers to fire up')
|
||||
def worker(workers):
|
||||
"""Starts a Superset worker for async SQL query execution."""
|
||||
if workers:
|
||||
celery_app.conf.update(CELERYD_CONCURRENCY=workers)
|
||||
elif config.get("SUPERSET_CELERY_WORKERS"):
|
||||
elif config.get('SUPERSET_CELERY_WORKERS'):
|
||||
celery_app.conf.update(
|
||||
CELERYD_CONCURRENCY=config.get("SUPERSET_CELERY_WORKERS"))
|
||||
CELERYD_CONCURRENCY=config.get('SUPERSET_CELERY_WORKERS'))
|
||||
|
||||
worker = celery_app.Worker(optimization='fair')
|
||||
worker.start()
|
||||
|
|
@ -216,12 +216,12 @@ def flower(port, address):
|
|||
broker"""
|
||||
BROKER_URL = celery_app.conf.BROKER_URL
|
||||
cmd = (
|
||||
"celery flower "
|
||||
"--broker={BROKER_URL} "
|
||||
"--port={port} "
|
||||
"--address={address} "
|
||||
'celery flower '
|
||||
'--broker={BROKER_URL} '
|
||||
'--port={port} '
|
||||
'--address={address} '
|
||||
).format(**locals())
|
||||
print(Fore.GREEN + "Starting a Celery Flower instance")
|
||||
print(Fore.GREEN + 'Starting a Celery Flower instance')
|
||||
print(Fore.BLUE + '-=' * 40)
|
||||
print(Fore.YELLOW + cmd)
|
||||
print(Fore.BLUE + '-=' * 40)
|
||||
|
|
|
|||
|
|
@ -92,10 +92,10 @@ ENABLE_PROXY_FIX = False
|
|||
# GLOBALS FOR APP Builder
|
||||
# ------------------------------
|
||||
# Uncomment to setup Your App name
|
||||
APP_NAME = "Superset"
|
||||
APP_NAME = 'Superset'
|
||||
|
||||
# Uncomment to setup an App icon
|
||||
APP_ICON = "/static/assets/images/superset-logo@2x.png"
|
||||
APP_ICON = '/static/assets/images/superset-logo@2x.png'
|
||||
|
||||
# Druid query timezone
|
||||
# tz.tzutc() : Using utc timezone
|
||||
|
|
@ -239,7 +239,7 @@ INTERVAL = 1
|
|||
BACKUP_COUNT = 30
|
||||
|
||||
# Set this API key to enable Mapbox visualizations
|
||||
MAPBOX_API_KEY = ""
|
||||
MAPBOX_API_KEY = ''
|
||||
|
||||
# Maximum number of rows returned in the SQL editor
|
||||
SQL_MAX_ROW = 1000000
|
||||
|
|
@ -329,7 +329,7 @@ SILENCE_FAB = True
|
|||
|
||||
# The link to a page containing common errors and their resolutions
|
||||
# It will be appended at the bottom of sql_lab errors.
|
||||
TROUBLESHOOTING_LINK = ""
|
||||
TROUBLESHOOTING_LINK = ''
|
||||
|
||||
|
||||
# Integrate external Blueprints to the app by passing them to your
|
||||
|
|
|
|||
|
|
@ -60,7 +60,7 @@ class BaseDatasource(AuditMixinNullable, ImportMixin):
|
|||
@property
|
||||
def uid(self):
|
||||
"""Unique id across datasource types"""
|
||||
return "{self.id}__{self.type}".format(**locals())
|
||||
return '{self.id}__{self.type}'.format(**locals())
|
||||
|
||||
@property
|
||||
def column_names(self):
|
||||
|
|
@ -72,7 +72,7 @@ class BaseDatasource(AuditMixinNullable, ImportMixin):
|
|||
|
||||
@property
|
||||
def main_dttm_col(self):
|
||||
return "timestamp"
|
||||
return 'timestamp'
|
||||
|
||||
@property
|
||||
def connection(self):
|
||||
|
|
@ -105,7 +105,7 @@ class BaseDatasource(AuditMixinNullable, ImportMixin):
|
|||
if self.default_endpoint:
|
||||
return self.default_endpoint
|
||||
else:
|
||||
return "/superset/explore/{obj.type}/{obj.id}/".format(obj=self)
|
||||
return '/superset/explore/{obj.type}/{obj.id}/'.format(obj=self)
|
||||
|
||||
@property
|
||||
def column_formats(self):
|
||||
|
|
|
|||
|
|
@ -8,6 +8,6 @@ class DatasourceModelView(SupersetModelView):
|
|||
def pre_delete(self, obj):
|
||||
if obj.slices:
|
||||
raise SupersetException(Markup(
|
||||
"Cannot delete a datasource that has slices attached to it."
|
||||
'Cannot delete a datasource that has slices attached to it.'
|
||||
"Here's the list of associated slices: " +
|
||||
"".join([o.slice_link for o in obj.slices])))
|
||||
''.join([o.slice_link for o in obj.slices])))
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ from superset.utils import (
|
|||
DimSelector, DTTM_ALIAS, flasher, MetricPermException,
|
||||
)
|
||||
|
||||
DRUID_TZ = conf.get("DRUID_TZ")
|
||||
DRUID_TZ = conf.get('DRUID_TZ')
|
||||
|
||||
|
||||
# Function wrapper because bound methods cannot
|
||||
|
|
@ -65,7 +65,7 @@ class DruidCluster(Model, AuditMixinNullable):
|
|||
"""ORM object referencing the Druid clusters"""
|
||||
|
||||
__tablename__ = 'clusters'
|
||||
type = "druid"
|
||||
type = 'druid'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
verbose_name = Column(String(250), unique=True)
|
||||
|
|
@ -86,21 +86,21 @@ class DruidCluster(Model, AuditMixinNullable):
|
|||
|
||||
def get_pydruid_client(self):
|
||||
cli = PyDruid(
|
||||
"http://{0}:{1}/".format(self.broker_host, self.broker_port),
|
||||
'http://{0}:{1}/'.format(self.broker_host, self.broker_port),
|
||||
self.broker_endpoint)
|
||||
return cli
|
||||
|
||||
def get_datasources(self):
|
||||
endpoint = (
|
||||
"http://{obj.coordinator_host}:{obj.coordinator_port}/"
|
||||
"{obj.coordinator_endpoint}/datasources"
|
||||
'http://{obj.coordinator_host}:{obj.coordinator_port}/'
|
||||
'{obj.coordinator_endpoint}/datasources'
|
||||
).format(obj=self)
|
||||
|
||||
return json.loads(requests.get(endpoint).text)
|
||||
|
||||
def get_druid_version(self):
|
||||
endpoint = (
|
||||
"http://{obj.coordinator_host}:{obj.coordinator_port}/status"
|
||||
'http://{obj.coordinator_host}:{obj.coordinator_port}/status'
|
||||
).format(obj=self)
|
||||
return json.loads(requests.get(endpoint).text)['version']
|
||||
|
||||
|
|
@ -144,11 +144,11 @@ class DruidCluster(Model, AuditMixinNullable):
|
|||
with session.no_autoflush:
|
||||
session.add(datasource)
|
||||
flasher(
|
||||
"Adding new datasource [{}]".format(ds_name), 'success')
|
||||
'Adding new datasource [{}]'.format(ds_name), 'success')
|
||||
ds_map[ds_name] = datasource
|
||||
elif refreshAll:
|
||||
flasher(
|
||||
"Refreshing datasource [{}]".format(ds_name), 'info')
|
||||
'Refreshing datasource [{}]'.format(ds_name), 'info')
|
||||
else:
|
||||
del ds_map[ds_name]
|
||||
continue
|
||||
|
|
@ -200,7 +200,7 @@ class DruidCluster(Model, AuditMixinNullable):
|
|||
|
||||
@property
|
||||
def perm(self):
|
||||
return "[{obj.cluster_name}].(id:{obj.id})".format(obj=self)
|
||||
return '[{obj.cluster_name}].(id:{obj.id})'.format(obj=self)
|
||||
|
||||
def get_perm(self):
|
||||
return self.perm
|
||||
|
|
@ -390,7 +390,7 @@ class DruidMetric(Model, BaseMetric):
|
|||
@property
|
||||
def perm(self):
|
||||
return (
|
||||
"{parent_name}.[{obj.metric_name}](id:{obj.id})"
|
||||
'{parent_name}.[{obj.metric_name}](id:{obj.id})'
|
||||
).format(obj=self,
|
||||
parent_name=self.datasource.full_name,
|
||||
) if self.datasource else None
|
||||
|
|
@ -410,13 +410,13 @@ class DruidDatasource(Model, BaseDatasource):
|
|||
|
||||
__tablename__ = 'datasources'
|
||||
|
||||
type = "druid"
|
||||
query_langtage = "json"
|
||||
type = 'druid'
|
||||
query_langtage = 'json'
|
||||
cluster_class = DruidCluster
|
||||
metric_class = DruidMetric
|
||||
column_class = DruidColumn
|
||||
|
||||
baselink = "druiddatasourcemodelview"
|
||||
baselink = 'druiddatasourcemodelview'
|
||||
|
||||
# Columns
|
||||
datasource_name = Column(String(255), unique=True)
|
||||
|
|
@ -469,8 +469,8 @@ class DruidDatasource(Model, BaseDatasource):
|
|||
|
||||
def get_perm(self):
|
||||
return (
|
||||
"[{obj.cluster_name}].[{obj.datasource_name}]"
|
||||
"(id:{obj.id})").format(obj=self)
|
||||
'[{obj.cluster_name}].[{obj.datasource_name}]'
|
||||
'(id:{obj.id})').format(obj=self)
|
||||
|
||||
@property
|
||||
def link(self):
|
||||
|
|
@ -485,13 +485,13 @@ class DruidDatasource(Model, BaseDatasource):
|
|||
@property
|
||||
def time_column_grains(self):
|
||||
return {
|
||||
"time_columns": [
|
||||
'time_columns': [
|
||||
'all', '5 seconds', '30 seconds', '1 minute',
|
||||
'5 minutes', '1 hour', '6 hour', '1 day', '7 days',
|
||||
'week', 'week_starting_sunday', 'week_ending_saturday',
|
||||
'month',
|
||||
],
|
||||
"time_grains": ['now'],
|
||||
'time_grains': ['now'],
|
||||
}
|
||||
|
||||
def __repr__(self):
|
||||
|
|
@ -499,7 +499,7 @@ class DruidDatasource(Model, BaseDatasource):
|
|||
|
||||
@renders('datasource_name')
|
||||
def datasource_link(self):
|
||||
url = "/superset/explore/{obj.type}/{obj.id}/".format(obj=self)
|
||||
url = '/superset/explore/{obj.type}/{obj.id}/'.format(obj=self)
|
||||
name = escape(self.datasource_name)
|
||||
return Markup('<a href="{url}">{name}</a>'.format(**locals()))
|
||||
|
||||
|
|
@ -561,7 +561,7 @@ class DruidDatasource(Model, BaseDatasource):
|
|||
|
||||
def latest_metadata(self):
|
||||
"""Returns segment metadata from the latest segment"""
|
||||
logging.info("Syncing datasource [{}]".format(self.datasource_name))
|
||||
logging.info('Syncing datasource [{}]'.format(self.datasource_name))
|
||||
client = self.cluster.get_pydruid_client()
|
||||
results = client.time_boundary(datasource=self.datasource_name)
|
||||
if not results:
|
||||
|
|
@ -585,7 +585,7 @@ class DruidDatasource(Model, BaseDatasource):
|
|||
merge=self.merge_flag,
|
||||
analysisTypes=[])
|
||||
except Exception as e:
|
||||
logging.warning("Failed first attempt to get latest segment")
|
||||
logging.warning('Failed first attempt to get latest segment')
|
||||
logging.exception(e)
|
||||
if not segment_metadata:
|
||||
# if no segments in the past 7 days, look at all segments
|
||||
|
|
@ -601,7 +601,7 @@ class DruidDatasource(Model, BaseDatasource):
|
|||
merge=self.merge_flag,
|
||||
analysisTypes=[])
|
||||
except Exception as e:
|
||||
logging.warning("Failed 2nd attempt to get latest segment")
|
||||
logging.warning('Failed 2nd attempt to get latest segment')
|
||||
logging.exception(e)
|
||||
if segment_metadata:
|
||||
return segment_metadata[-1]['columns']
|
||||
|
|
@ -669,7 +669,7 @@ class DruidDatasource(Model, BaseDatasource):
|
|||
groupby=True,
|
||||
filterable=True,
|
||||
# TODO: fetch type from Hive.
|
||||
type="STRING",
|
||||
type='STRING',
|
||||
datasource=datasource,
|
||||
)
|
||||
session.add(col_obj)
|
||||
|
|
@ -678,20 +678,20 @@ class DruidDatasource(Model, BaseDatasource):
|
|||
session.query(DruidMetric)
|
||||
.filter(DruidMetric.datasource_name == druid_config['name'])
|
||||
.filter(or_(DruidMetric.metric_name == spec['name']
|
||||
for spec in druid_config["metrics_spec"]))
|
||||
for spec in druid_config['metrics_spec']))
|
||||
)
|
||||
metric_objs = {metric.metric_name: metric for metric in metric_objs}
|
||||
for metric_spec in druid_config["metrics_spec"]:
|
||||
metric_name = metric_spec["name"]
|
||||
metric_type = metric_spec["type"]
|
||||
for metric_spec in druid_config['metrics_spec']:
|
||||
metric_name = metric_spec['name']
|
||||
metric_type = metric_spec['type']
|
||||
metric_json = json.dumps(metric_spec)
|
||||
|
||||
if metric_type == "count":
|
||||
metric_type = "longSum"
|
||||
if metric_type == 'count':
|
||||
metric_type = 'longSum'
|
||||
metric_json = json.dumps({
|
||||
"type": "longSum",
|
||||
"name": metric_name,
|
||||
"fieldName": metric_name,
|
||||
'type': 'longSum',
|
||||
'name': metric_name,
|
||||
'fieldName': metric_name,
|
||||
})
|
||||
|
||||
metric_obj = metric_objs.get(metric_name, None)
|
||||
|
|
@ -699,11 +699,11 @@ class DruidDatasource(Model, BaseDatasource):
|
|||
metric_obj = DruidMetric(
|
||||
metric_name=metric_name,
|
||||
metric_type=metric_type,
|
||||
verbose_name="%s(%s)" % (metric_type, metric_name),
|
||||
verbose_name='%s(%s)' % (metric_type, metric_name),
|
||||
datasource=datasource,
|
||||
json=metric_json,
|
||||
description=(
|
||||
"Imported from the airolap config dir for %s" %
|
||||
'Imported from the airolap config dir for %s' %
|
||||
druid_config['name']),
|
||||
)
|
||||
session.add(metric_obj)
|
||||
|
|
@ -823,7 +823,7 @@ class DruidDatasource(Model, BaseDatasource):
|
|||
)
|
||||
elif mconf.get('type') == 'arithmetic':
|
||||
post_aggs[metric_name] = Postaggregator(
|
||||
mconf.get('fn', "/"),
|
||||
mconf.get('fn', '/'),
|
||||
mconf.get('fields', []),
|
||||
mconf.get('name', ''))
|
||||
else:
|
||||
|
|
@ -844,11 +844,11 @@ class DruidDatasource(Model, BaseDatasource):
|
|||
|
||||
qry = dict(
|
||||
datasource=self.datasource_name,
|
||||
granularity="all",
|
||||
granularity='all',
|
||||
intervals=from_dttm.isoformat() + '/' + datetime.now().isoformat(),
|
||||
aggregations=dict(count=count("count")),
|
||||
aggregations=dict(count=count('count')),
|
||||
dimension=column_name,
|
||||
metric="count",
|
||||
metric='count',
|
||||
threshold=limit,
|
||||
)
|
||||
|
||||
|
|
@ -870,16 +870,16 @@ class DruidDatasource(Model, BaseDatasource):
|
|||
f = Dimension(dim) == row[dim]
|
||||
fields.append(f)
|
||||
if len(fields) > 1:
|
||||
term = Filter(type="and", fields=fields)
|
||||
term = Filter(type='and', fields=fields)
|
||||
new_filters.append(term)
|
||||
elif fields:
|
||||
new_filters.append(fields[0])
|
||||
if new_filters:
|
||||
ff = Filter(type="or", fields=new_filters)
|
||||
ff = Filter(type='or', fields=new_filters)
|
||||
if not dim_filter:
|
||||
ret = ff
|
||||
else:
|
||||
ret = Filter(type="and", fields=[ff, dim_filter])
|
||||
ret = Filter(type='and', fields=[ff, dim_filter])
|
||||
return ret
|
||||
|
||||
def run_query( # noqa / druid
|
||||
|
|
@ -913,7 +913,7 @@ class DruidDatasource(Model, BaseDatasource):
|
|||
to_dttm = to_dttm.replace(tzinfo=DRUID_TZ)
|
||||
timezone = from_dttm.tzname()
|
||||
|
||||
query_str = ""
|
||||
query_str = ''
|
||||
metrics_dict = {m.metric_name: m for m in self.metrics}
|
||||
|
||||
columns_dict = {c.column_name: c for c in self.columns}
|
||||
|
|
@ -936,7 +936,7 @@ class DruidDatasource(Model, BaseDatasource):
|
|||
|
||||
if rejected_metrics:
|
||||
raise MetricPermException(
|
||||
"Access to the metrics denied: " + ', '.join(rejected_metrics),
|
||||
'Access to the metrics denied: ' + ', '.join(rejected_metrics),
|
||||
)
|
||||
|
||||
# the dimensions list with dimensionSpecs expanded
|
||||
|
|
@ -969,7 +969,7 @@ class DruidDatasource(Model, BaseDatasource):
|
|||
having_filters = self.get_having_filters(extras.get('having_druid'))
|
||||
if having_filters:
|
||||
qry['having'] = having_filters
|
||||
order_direction = "descending" if order_desc else "ascending"
|
||||
order_direction = 'descending' if order_desc else 'ascending'
|
||||
if len(groupby) == 0 and not having_filters:
|
||||
del qry['dimensions']
|
||||
client.timeseries(**qry)
|
||||
|
|
@ -987,17 +987,17 @@ class DruidDatasource(Model, BaseDatasource):
|
|||
order_by = list(qry['aggregations'].keys())[0]
|
||||
# Limit on the number of timeseries, doing a two-phases query
|
||||
pre_qry = deepcopy(qry)
|
||||
pre_qry['granularity'] = "all"
|
||||
pre_qry['granularity'] = 'all'
|
||||
pre_qry['threshold'] = min(row_limit,
|
||||
timeseries_limit or row_limit)
|
||||
pre_qry['metric'] = order_by
|
||||
pre_qry['dimension'] = dim
|
||||
del pre_qry['dimensions']
|
||||
client.topn(**pre_qry)
|
||||
query_str += "// Two phase query\n// Phase 1\n"
|
||||
query_str += '// Two phase query\n// Phase 1\n'
|
||||
query_str += json.dumps(
|
||||
client.query_builder.last_query.query_dict, indent=2)
|
||||
query_str += "\n"
|
||||
query_str += '\n'
|
||||
if phase == 1:
|
||||
return query_str
|
||||
query_str += (
|
||||
|
|
@ -1023,23 +1023,23 @@ class DruidDatasource(Model, BaseDatasource):
|
|||
order_by = timeseries_limit_metric
|
||||
# Limit on the number of timeseries, doing a two-phases query
|
||||
pre_qry = deepcopy(qry)
|
||||
pre_qry['granularity'] = "all"
|
||||
pre_qry['granularity'] = 'all'
|
||||
pre_qry['limit_spec'] = {
|
||||
"type": "default",
|
||||
"limit": min(timeseries_limit, row_limit),
|
||||
'type': 'default',
|
||||
'limit': min(timeseries_limit, row_limit),
|
||||
'intervals': (
|
||||
inner_from_dttm.isoformat() + '/' +
|
||||
inner_to_dttm.isoformat()),
|
||||
"columns": [{
|
||||
"dimension": order_by,
|
||||
"direction": order_direction,
|
||||
'columns': [{
|
||||
'dimension': order_by,
|
||||
'direction': order_direction,
|
||||
}],
|
||||
}
|
||||
client.groupby(**pre_qry)
|
||||
query_str += "// Two phase query\n// Phase 1\n"
|
||||
query_str += '// Two phase query\n// Phase 1\n'
|
||||
query_str += json.dumps(
|
||||
client.query_builder.last_query.query_dict, indent=2)
|
||||
query_str += "\n"
|
||||
query_str += '\n'
|
||||
if phase == 1:
|
||||
return query_str
|
||||
query_str += (
|
||||
|
|
@ -1053,12 +1053,12 @@ class DruidDatasource(Model, BaseDatasource):
|
|||
qry['limit_spec'] = None
|
||||
if row_limit:
|
||||
qry['limit_spec'] = {
|
||||
"type": "default",
|
||||
"limit": row_limit,
|
||||
"columns": [{
|
||||
"dimension": (
|
||||
'type': 'default',
|
||||
'limit': row_limit,
|
||||
'columns': [{
|
||||
'dimension': (
|
||||
metrics[0] if metrics else self.metrics[0]),
|
||||
"direction": order_direction,
|
||||
'direction': order_direction,
|
||||
}],
|
||||
}
|
||||
client.groupby(**qry)
|
||||
|
|
@ -1074,7 +1074,7 @@ class DruidDatasource(Model, BaseDatasource):
|
|||
df = client.export_pandas()
|
||||
|
||||
if df is None or df.size == 0:
|
||||
raise Exception(_("No data was returned."))
|
||||
raise Exception(_('No data was returned.'))
|
||||
df.columns = [
|
||||
DTTM_ALIAS if c == 'timestamp' else c for c in df.columns]
|
||||
|
||||
|
|
@ -1120,7 +1120,7 @@ class DruidDatasource(Model, BaseDatasource):
|
|||
cond = None
|
||||
if op in ('in', 'not in'):
|
||||
eq = [
|
||||
types.replace("'", '').strip()
|
||||
types.replace('"', '').strip()
|
||||
if isinstance(types, string_types)
|
||||
else types
|
||||
for types in eq]
|
||||
|
|
@ -1149,13 +1149,13 @@ class DruidDatasource(Model, BaseDatasource):
|
|||
else:
|
||||
for s in eq:
|
||||
fields.append(Dimension(col) == s)
|
||||
cond = Filter(type="or", fields=fields)
|
||||
cond = Filter(type='or', fields=fields)
|
||||
|
||||
if op == 'not in':
|
||||
cond = ~cond
|
||||
|
||||
elif op == 'regex':
|
||||
cond = Filter(type="regex", pattern=eq, dimension=col)
|
||||
cond = Filter(type='regex', pattern=eq, dimension=col)
|
||||
elif op == '>=':
|
||||
cond = Bound(col, eq, None, alphaNumeric=is_numeric_col)
|
||||
elif op == '<=':
|
||||
|
|
@ -1172,7 +1172,7 @@ class DruidDatasource(Model, BaseDatasource):
|
|||
)
|
||||
|
||||
if filters:
|
||||
filters = Filter(type="and", fields=[
|
||||
filters = Filter(type='and', fields=[
|
||||
cond,
|
||||
filters,
|
||||
])
|
||||
|
|
|
|||
|
|
@ -37,27 +37,27 @@ class DruidColumnInlineView(CompactCRUDMixin, SupersetModelView): # noqa
|
|||
can_delete = False
|
||||
page_size = 500
|
||||
label_columns = {
|
||||
'column_name': _("Column"),
|
||||
'type': _("Type"),
|
||||
'datasource': _("Datasource"),
|
||||
'groupby': _("Groupable"),
|
||||
'filterable': _("Filterable"),
|
||||
'count_distinct': _("Count Distinct"),
|
||||
'sum': _("Sum"),
|
||||
'min': _("Min"),
|
||||
'max': _("Max"),
|
||||
'column_name': _('Column'),
|
||||
'type': _('Type'),
|
||||
'datasource': _('Datasource'),
|
||||
'groupby': _('Groupable'),
|
||||
'filterable': _('Filterable'),
|
||||
'count_distinct': _('Count Distinct'),
|
||||
'sum': _('Sum'),
|
||||
'min': _('Min'),
|
||||
'max': _('Max'),
|
||||
}
|
||||
description_columns = {
|
||||
'filterable': _(
|
||||
"Whether this column is exposed in the `Filters` section "
|
||||
"of the explore view."),
|
||||
'Whether this column is exposed in the `Filters` section '
|
||||
'of the explore view.'),
|
||||
'dimension_spec_json': utils.markdown(
|
||||
"this field can be used to specify "
|
||||
"a `dimensionSpec` as documented [here]"
|
||||
"(http://druid.io/docs/latest/querying/dimensionspecs.html). "
|
||||
"Make sure to input valid JSON and that the "
|
||||
"`outputName` matches the `column_name` defined "
|
||||
"above.",
|
||||
'this field can be used to specify '
|
||||
'a `dimensionSpec` as documented [here]'
|
||||
'(http://druid.io/docs/latest/querying/dimensionspecs.html). '
|
||||
'Make sure to input valid JSON and that the '
|
||||
'`outputName` matches the `column_name` defined '
|
||||
'above.',
|
||||
True),
|
||||
}
|
||||
|
||||
|
|
@ -91,23 +91,23 @@ class DruidMetricInlineView(CompactCRUDMixin, SupersetModelView): # noqa
|
|||
}
|
||||
description_columns = {
|
||||
'metric_type': utils.markdown(
|
||||
"use `postagg` as the metric type if you are defining a "
|
||||
"[Druid Post Aggregation]"
|
||||
"(http://druid.io/docs/latest/querying/post-aggregations.html)",
|
||||
'use `postagg` as the metric type if you are defining a '
|
||||
'[Druid Post Aggregation]'
|
||||
'(http://druid.io/docs/latest/querying/post-aggregations.html)',
|
||||
True),
|
||||
'is_restricted': _("Whether the access to this metric is restricted "
|
||||
"to certain roles. Only roles with the permission "
|
||||
'is_restricted': _('Whether the access to this metric is restricted '
|
||||
'to certain roles. Only roles with the permission '
|
||||
"'metric access on XXX (the name of this metric)' "
|
||||
"are allowed to access this metric"),
|
||||
'are allowed to access this metric'),
|
||||
}
|
||||
label_columns = {
|
||||
'metric_name': _("Metric"),
|
||||
'description': _("Description"),
|
||||
'verbose_name': _("Verbose Name"),
|
||||
'metric_type': _("Type"),
|
||||
'json': _("JSON"),
|
||||
'datasource': _("Druid Datasource"),
|
||||
'warning_text': _("Warning Message"),
|
||||
'metric_name': _('Metric'),
|
||||
'description': _('Description'),
|
||||
'verbose_name': _('Verbose Name'),
|
||||
'metric_type': _('Type'),
|
||||
'json': _('JSON'),
|
||||
'datasource': _('Druid Datasource'),
|
||||
'warning_text': _('Warning Message'),
|
||||
}
|
||||
|
||||
def post_add(self, metric):
|
||||
|
|
@ -139,13 +139,13 @@ class DruidClusterModelView(SupersetModelView, DeleteMixin): # noqa
|
|||
list_columns = ['cluster_name', 'metadata_last_refreshed']
|
||||
search_columns = ('cluster_name',)
|
||||
label_columns = {
|
||||
'cluster_name': _("Cluster"),
|
||||
'coordinator_host': _("Coordinator Host"),
|
||||
'coordinator_port': _("Coordinator Port"),
|
||||
'coordinator_endpoint': _("Coordinator Endpoint"),
|
||||
'broker_host': _("Broker Host"),
|
||||
'broker_port': _("Broker Port"),
|
||||
'broker_endpoint': _("Broker Endpoint"),
|
||||
'cluster_name': _('Cluster'),
|
||||
'coordinator_host': _('Coordinator Host'),
|
||||
'coordinator_port': _('Coordinator Port'),
|
||||
'coordinator_endpoint': _('Coordinator Endpoint'),
|
||||
'broker_host': _('Broker Host'),
|
||||
'broker_port': _('Broker Port'),
|
||||
'broker_endpoint': _('Broker Endpoint'),
|
||||
}
|
||||
|
||||
def pre_add(self, cluster):
|
||||
|
|
@ -160,11 +160,11 @@ class DruidClusterModelView(SupersetModelView, DeleteMixin): # noqa
|
|||
|
||||
appbuilder.add_view(
|
||||
DruidClusterModelView,
|
||||
name="Druid Clusters",
|
||||
label=__("Druid Clusters"),
|
||||
icon="fa-cubes",
|
||||
category="Sources",
|
||||
category_label=__("Sources"),
|
||||
name='Druid Clusters',
|
||||
label=__('Druid Clusters'),
|
||||
icon='fa-cubes',
|
||||
category='Sources',
|
||||
category_label=__('Sources'),
|
||||
category_icon='fa-database',)
|
||||
|
||||
|
||||
|
|
@ -195,44 +195,44 @@ class DruidDatasourceModelView(DatasourceModelView, DeleteMixin): # noqa
|
|||
base_order = ('datasource_name', 'asc')
|
||||
description_columns = {
|
||||
'slices': _(
|
||||
"The list of slices associated with this table. By "
|
||||
"altering this datasource, you may change how these associated "
|
||||
"slices behave. "
|
||||
"Also note that slices need to point to a datasource, so "
|
||||
"this form will fail at saving if removing slices from a "
|
||||
"datasource. If you want to change the datasource for a slice, "
|
||||
'The list of slices associated with this table. By '
|
||||
'altering this datasource, you may change how these associated '
|
||||
'slices behave. '
|
||||
'Also note that slices need to point to a datasource, so '
|
||||
'this form will fail at saving if removing slices from a '
|
||||
'datasource. If you want to change the datasource for a slice, '
|
||||
"overwrite the slice from the 'explore view'"),
|
||||
'offset': _("Timezone offset (in hours) for this datasource"),
|
||||
'offset': _('Timezone offset (in hours) for this datasource'),
|
||||
'description': Markup(
|
||||
"Supports <a href='"
|
||||
"https://daringfireball.net/projects/markdown/'>markdown</a>"),
|
||||
'Supports <a href="'
|
||||
'https://daringfireball.net/projects/markdown/">markdown</a>'),
|
||||
'fetch_values_from': _(
|
||||
"Time expression to use as a predicate when retrieving "
|
||||
"distinct values to populate the filter component. "
|
||||
"Only applies when `Enable Filter Select` is on. If "
|
||||
"you enter `7 days ago`, the distinct list of values in "
|
||||
"the filter will be populated based on the distinct value over "
|
||||
"the past week"),
|
||||
'Time expression to use as a predicate when retrieving '
|
||||
'distinct values to populate the filter component. '
|
||||
'Only applies when `Enable Filter Select` is on. If '
|
||||
'you enter `7 days ago`, the distinct list of values in '
|
||||
'the filter will be populated based on the distinct value over '
|
||||
'the past week'),
|
||||
'filter_select_enabled': _(
|
||||
"Whether to populate the filter's dropdown in the explore "
|
||||
"view's filter section with a list of distinct values fetched "
|
||||
"from the backend on the fly"),
|
||||
'from the backend on the fly'),
|
||||
'default_endpoint': _(
|
||||
"Redirects to this endpoint when clicking on the datasource "
|
||||
"from the datasource list"),
|
||||
'Redirects to this endpoint when clicking on the datasource '
|
||||
'from the datasource list'),
|
||||
}
|
||||
base_filters = [['id', DatasourceFilter, lambda: []]]
|
||||
label_columns = {
|
||||
'slices': _("Associated Slices"),
|
||||
'datasource_link': _("Data Source"),
|
||||
'cluster': _("Cluster"),
|
||||
'description': _("Description"),
|
||||
'owner': _("Owner"),
|
||||
'is_hidden': _("Is Hidden"),
|
||||
'filter_select_enabled': _("Enable Filter Select"),
|
||||
'default_endpoint': _("Default Endpoint"),
|
||||
'offset': _("Time Offset"),
|
||||
'cache_timeout': _("Cache Timeout"),
|
||||
'slices': _('Associated Slices'),
|
||||
'datasource_link': _('Data Source'),
|
||||
'cluster': _('Cluster'),
|
||||
'description': _('Description'),
|
||||
'owner': _('Owner'),
|
||||
'is_hidden': _('Is Hidden'),
|
||||
'filter_select_enabled': _('Enable Filter Select'),
|
||||
'default_endpoint': _('Default Endpoint'),
|
||||
'offset': _('Time Offset'),
|
||||
'cache_timeout': _('Cache Timeout'),
|
||||
}
|
||||
|
||||
def pre_add(self, datasource):
|
||||
|
|
@ -263,18 +263,18 @@ class DruidDatasourceModelView(DatasourceModelView, DeleteMixin): # noqa
|
|||
|
||||
appbuilder.add_view(
|
||||
DruidDatasourceModelView,
|
||||
"Druid Datasources",
|
||||
label=__("Druid Datasources"),
|
||||
category="Sources",
|
||||
category_label=__("Sources"),
|
||||
icon="fa-cube")
|
||||
'Druid Datasources',
|
||||
label=__('Druid Datasources'),
|
||||
category='Sources',
|
||||
category_label=__('Sources'),
|
||||
icon='fa-cube')
|
||||
|
||||
|
||||
class Druid(BaseSupersetView):
|
||||
"""The base views for Superset!"""
|
||||
|
||||
@has_access
|
||||
@expose("/refresh_datasources/")
|
||||
@expose('/refresh_datasources/')
|
||||
def refresh_datasources(self, refreshAll=True):
|
||||
"""endpoint that refreshes druid datasources metadata"""
|
||||
session = db.session()
|
||||
|
|
@ -287,19 +287,19 @@ class Druid(BaseSupersetView):
|
|||
flash(
|
||||
"Error while processing cluster '{}'\n{}".format(
|
||||
cluster_name, utils.error_msg_from_exception(e)),
|
||||
"danger")
|
||||
'danger')
|
||||
logging.exception(e)
|
||||
return redirect('/druidclustermodelview/list/')
|
||||
cluster.metadata_last_refreshed = datetime.now()
|
||||
flash(
|
||||
"Refreshed metadata from cluster "
|
||||
"[" + cluster.cluster_name + "]",
|
||||
'Refreshed metadata from cluster '
|
||||
'[' + cluster.cluster_name + ']',
|
||||
'info')
|
||||
session.commit()
|
||||
return redirect("/druiddatasourcemodelview/list/")
|
||||
return redirect('/druiddatasourcemodelview/list/')
|
||||
|
||||
@has_access
|
||||
@expose("/scan_new_datasources/")
|
||||
@expose('/scan_new_datasources/')
|
||||
def scan_new_datasources(self):
|
||||
"""
|
||||
Calling this endpoint will cause a scan for new
|
||||
|
|
@ -311,21 +311,21 @@ class Druid(BaseSupersetView):
|
|||
appbuilder.add_view_no_menu(Druid)
|
||||
|
||||
appbuilder.add_link(
|
||||
"Scan New Datasources",
|
||||
label=__("Scan New Datasources"),
|
||||
'Scan New Datasources',
|
||||
label=__('Scan New Datasources'),
|
||||
href='/druid/scan_new_datasources/',
|
||||
category='Sources',
|
||||
category_label=__("Sources"),
|
||||
category_label=__('Sources'),
|
||||
category_icon='fa-database',
|
||||
icon="fa-refresh")
|
||||
icon='fa-refresh')
|
||||
appbuilder.add_link(
|
||||
"Refresh Druid Metadata",
|
||||
label=__("Refresh Druid Metadata"),
|
||||
'Refresh Druid Metadata',
|
||||
label=__('Refresh Druid Metadata'),
|
||||
href='/druid/refresh_datasources/',
|
||||
category='Sources',
|
||||
category_label=__("Sources"),
|
||||
category_label=__('Sources'),
|
||||
category_icon='fa-database',
|
||||
icon="fa-cog")
|
||||
icon='fa-cog')
|
||||
|
||||
|
||||
appbuilder.add_separator("Sources", )
|
||||
appbuilder.add_separator('Sources', )
|
||||
|
|
|
|||
|
|
@ -138,7 +138,7 @@ class SqlMetric(Model, BaseMetric):
|
|||
@property
|
||||
def perm(self):
|
||||
return (
|
||||
"{parent_name}.[{obj.metric_name}](id:{obj.id})"
|
||||
'{parent_name}.[{obj.metric_name}](id:{obj.id})'
|
||||
).format(obj=self,
|
||||
parent_name=self.table.full_name) if self.table else None
|
||||
|
||||
|
|
@ -155,7 +155,7 @@ class SqlaTable(Model, BaseDatasource):
|
|||
|
||||
"""An ORM object for SqlAlchemy table references"""
|
||||
|
||||
type = "table"
|
||||
type = 'table'
|
||||
query_language = 'sql'
|
||||
metric_class = SqlMetric
|
||||
column_class = TableColumn
|
||||
|
|
@ -177,7 +177,7 @@ class SqlaTable(Model, BaseDatasource):
|
|||
schema = Column(String(255))
|
||||
sql = Column(Text)
|
||||
|
||||
baselink = "tablemodelview"
|
||||
baselink = 'tablemodelview'
|
||||
export_fields = (
|
||||
'table_name', 'main_dttm_col', 'description', 'default_endpoint',
|
||||
'database_id', 'offset', 'cache_timeout', 'schema',
|
||||
|
|
@ -212,14 +212,14 @@ class SqlaTable(Model, BaseDatasource):
|
|||
|
||||
def get_perm(self):
|
||||
return (
|
||||
"[{obj.database}].[{obj.table_name}]"
|
||||
"(id:{obj.id})").format(obj=self)
|
||||
'[{obj.database}].[{obj.table_name}]'
|
||||
'(id:{obj.id})').format(obj=self)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
if not self.schema:
|
||||
return self.table_name
|
||||
return "{}.{}".format(self.schema, self.table_name)
|
||||
return '{}.{}'.format(self.schema, self.table_name)
|
||||
|
||||
@property
|
||||
def full_name(self):
|
||||
|
|
@ -251,18 +251,18 @@ class SqlaTable(Model, BaseDatasource):
|
|||
return df.to_html(
|
||||
index=False,
|
||||
classes=(
|
||||
"dataframe table table-striped table-bordered "
|
||||
"table-condensed"))
|
||||
'dataframe table table-striped table-bordered '
|
||||
'table-condensed'))
|
||||
|
||||
@property
|
||||
def sql_url(self):
|
||||
return self.database.sql_url + "?table_name=" + str(self.table_name)
|
||||
return self.database.sql_url + '?table_name=' + str(self.table_name)
|
||||
|
||||
@property
|
||||
def time_column_grains(self):
|
||||
return {
|
||||
"time_columns": self.dttm_cols,
|
||||
"time_grains": [grain.name for grain in self.database.grains()],
|
||||
'time_columns': self.dttm_cols,
|
||||
'time_grains': [grain.name for grain in self.database.grains()],
|
||||
}
|
||||
|
||||
def get_col(self, col_name):
|
||||
|
|
@ -304,9 +304,9 @@ class SqlaTable(Model, BaseDatasource):
|
|||
qry = qry.where(tp.process_template(self.fetch_values_predicate))
|
||||
|
||||
engine = self.database.get_sqla_engine()
|
||||
sql = "{}".format(
|
||||
sql = '{}'.format(
|
||||
qry.compile(
|
||||
engine, compile_kwargs={"literal_binds": True}, ),
|
||||
engine, compile_kwargs={'literal_binds': True}, ),
|
||||
)
|
||||
|
||||
df = pd.read_sql_query(sql=sql, con=engine)
|
||||
|
|
@ -322,7 +322,7 @@ class SqlaTable(Model, BaseDatasource):
|
|||
sql = str(
|
||||
qry.compile(
|
||||
engine,
|
||||
compile_kwargs={"literal_binds": True},
|
||||
compile_kwargs={'literal_binds': True},
|
||||
),
|
||||
)
|
||||
logging.info(sql)
|
||||
|
|
@ -389,10 +389,10 @@ class SqlaTable(Model, BaseDatasource):
|
|||
|
||||
if not granularity and is_timeseries:
|
||||
raise Exception(_(
|
||||
"Datetime column not provided as part table configuration "
|
||||
"and is required by this type of chart"))
|
||||
'Datetime column not provided as part table configuration '
|
||||
'and is required by this type of chart'))
|
||||
if not groupby and not metrics and not columns:
|
||||
raise Exception(_("Empty query?"))
|
||||
raise Exception(_('Empty query?'))
|
||||
for m in metrics:
|
||||
if m not in metrics_dict:
|
||||
raise Exception(_("Metric '{}' is not valid".format(m)))
|
||||
|
|
@ -400,7 +400,7 @@ class SqlaTable(Model, BaseDatasource):
|
|||
if metrics_exprs:
|
||||
main_metric_expr = metrics_exprs[0]
|
||||
else:
|
||||
main_metric_expr = literal_column("COUNT(*)").label("ccount")
|
||||
main_metric_expr = literal_column('COUNT(*)').label('ccount')
|
||||
|
||||
select_exprs = []
|
||||
groupby_exprs = []
|
||||
|
|
@ -465,7 +465,7 @@ class SqlaTable(Model, BaseDatasource):
|
|||
# For backwards compatibility and edge cases
|
||||
# where a column data type might have changed
|
||||
if isinstance(v, basestring):
|
||||
v = v.strip("'").strip('"')
|
||||
v = v.strip(""").strip(""")
|
||||
if col_obj.is_num:
|
||||
v = utils.string_to_num(v)
|
||||
|
||||
|
|
@ -600,9 +600,9 @@ class SqlaTable(Model, BaseDatasource):
|
|||
try:
|
||||
datatype = col.type.compile(dialect=db_dialect).upper()
|
||||
except Exception as e:
|
||||
datatype = "UNKNOWN"
|
||||
datatype = 'UNKNOWN'
|
||||
logging.error(
|
||||
"Unrecognized data type in {}.{}".format(table, col.name))
|
||||
'Unrecognized data type in {}.{}'.format(table, col.name))
|
||||
logging.exception(e)
|
||||
dbcol = dbcols.get(col.name, None)
|
||||
if not dbcol:
|
||||
|
|
@ -622,35 +622,35 @@ class SqlaTable(Model, BaseDatasource):
|
|||
metric_name='sum__' + dbcol.column_name,
|
||||
verbose_name='sum__' + dbcol.column_name,
|
||||
metric_type='sum',
|
||||
expression="SUM({})".format(quoted),
|
||||
expression='SUM({})'.format(quoted),
|
||||
))
|
||||
if dbcol.avg:
|
||||
metrics.append(M(
|
||||
metric_name='avg__' + dbcol.column_name,
|
||||
verbose_name='avg__' + dbcol.column_name,
|
||||
metric_type='avg',
|
||||
expression="AVG({})".format(quoted),
|
||||
expression='AVG({})'.format(quoted),
|
||||
))
|
||||
if dbcol.max:
|
||||
metrics.append(M(
|
||||
metric_name='max__' + dbcol.column_name,
|
||||
verbose_name='max__' + dbcol.column_name,
|
||||
metric_type='max',
|
||||
expression="MAX({})".format(quoted),
|
||||
expression='MAX({})'.format(quoted),
|
||||
))
|
||||
if dbcol.min:
|
||||
metrics.append(M(
|
||||
metric_name='min__' + dbcol.column_name,
|
||||
verbose_name='min__' + dbcol.column_name,
|
||||
metric_type='min',
|
||||
expression="MIN({})".format(quoted),
|
||||
expression='MIN({})'.format(quoted),
|
||||
))
|
||||
if dbcol.count_distinct:
|
||||
metrics.append(M(
|
||||
metric_name='count_distinct__' + dbcol.column_name,
|
||||
verbose_name='count_distinct__' + dbcol.column_name,
|
||||
metric_type='count_distinct',
|
||||
expression="COUNT(DISTINCT {})".format(quoted),
|
||||
expression='COUNT(DISTINCT {})'.format(quoted),
|
||||
))
|
||||
dbcol.type = datatype
|
||||
|
||||
|
|
@ -658,7 +658,7 @@ class SqlaTable(Model, BaseDatasource):
|
|||
metric_name='count',
|
||||
verbose_name='COUNT(*)',
|
||||
metric_type='count',
|
||||
expression="COUNT(*)",
|
||||
expression='COUNT(*)',
|
||||
))
|
||||
|
||||
dbmetrics = db.session.query(M).filter(M.table_id == self.id).filter(
|
||||
|
|
|
|||
|
|
@ -39,54 +39,54 @@ class TableColumnInlineView(CompactCRUDMixin, SupersetModelView): # noqa
|
|||
page_size = 500
|
||||
description_columns = {
|
||||
'is_dttm': _(
|
||||
"Whether to make this column available as a "
|
||||
"[Time Granularity] option, column has to be DATETIME or "
|
||||
"DATETIME-like"),
|
||||
'Whether to make this column available as a '
|
||||
'[Time Granularity] option, column has to be DATETIME or '
|
||||
'DATETIME-like'),
|
||||
'filterable': _(
|
||||
"Whether this column is exposed in the `Filters` section "
|
||||
"of the explore view."),
|
||||
'Whether this column is exposed in the `Filters` section '
|
||||
'of the explore view.'),
|
||||
'type': _(
|
||||
"The data type that was inferred by the database. "
|
||||
"It may be necessary to input a type manually for "
|
||||
"expression-defined columns in some cases. In most case "
|
||||
"users should not need to alter this."),
|
||||
'The data type that was inferred by the database. '
|
||||
'It may be necessary to input a type manually for '
|
||||
'expression-defined columns in some cases. In most case '
|
||||
'users should not need to alter this.'),
|
||||
'expression': utils.markdown(
|
||||
"a valid SQL expression as supported by the underlying backend. "
|
||||
"Example: `substr(name, 1, 1)`", True),
|
||||
'a valid SQL expression as supported by the underlying backend. '
|
||||
'Example: `substr(name, 1, 1)`', True),
|
||||
'python_date_format': utils.markdown(Markup(
|
||||
"The pattern of timestamp format, use "
|
||||
"<a href='https://docs.python.org/2/library/"
|
||||
"datetime.html#strftime-strptime-behavior'>"
|
||||
"python datetime string pattern</a> "
|
||||
"expression. If time is stored in epoch "
|
||||
"format, put `epoch_s` or `epoch_ms`. Leave `Database Expression` "
|
||||
"below empty if timestamp is stored in "
|
||||
"String or Integer(epoch) type"), True),
|
||||
'The pattern of timestamp format, use '
|
||||
'<a href="https://docs.python.org/2/library/'
|
||||
'datetime.html#strftime-strptime-behavior">'
|
||||
'python datetime string pattern</a> '
|
||||
'expression. If time is stored in epoch '
|
||||
'format, put `epoch_s` or `epoch_ms`. Leave `Database Expression` '
|
||||
'below empty if timestamp is stored in '
|
||||
'String or Integer(epoch) type'), True),
|
||||
'database_expression': utils.markdown(
|
||||
"The database expression to cast internal datetime "
|
||||
"constants to database date/timestamp type according to the DBAPI. "
|
||||
"The expression should follow the pattern of "
|
||||
"%Y-%m-%d %H:%M:%S, based on different DBAPI. "
|
||||
"The string should be a python string formatter \n"
|
||||
"`Ex: TO_DATE('{}', 'YYYY-MM-DD HH24:MI:SS')` for Oracle"
|
||||
"Superset uses default expression based on DB URI if this "
|
||||
"field is blank.", True),
|
||||
'The database expression to cast internal datetime '
|
||||
'constants to database date/timestamp type according to the DBAPI. '
|
||||
'The expression should follow the pattern of '
|
||||
'%Y-%m-%d %H:%M:%S, based on different DBAPI. '
|
||||
'The string should be a python string formatter \n'
|
||||
"`Ex: TO_DATE('{}', 'YYYY-MM-DD HH24:MI:SS')` for Oracle "
|
||||
'Superset uses default expression based on DB URI if this '
|
||||
'field is blank.', True),
|
||||
}
|
||||
label_columns = {
|
||||
'column_name': _("Column"),
|
||||
'verbose_name': _("Verbose Name"),
|
||||
'description': _("Description"),
|
||||
'groupby': _("Groupable"),
|
||||
'filterable': _("Filterable"),
|
||||
'table': _("Table"),
|
||||
'count_distinct': _("Count Distinct"),
|
||||
'sum': _("Sum"),
|
||||
'min': _("Min"),
|
||||
'max': _("Max"),
|
||||
'expression': _("Expression"),
|
||||
'is_dttm': _("Is temporal"),
|
||||
'python_date_format': _("Datetime Format"),
|
||||
'database_expression': _("Database Expression"),
|
||||
'column_name': _('Column'),
|
||||
'verbose_name': _('Verbose Name'),
|
||||
'description': _('Description'),
|
||||
'groupby': _('Groupable'),
|
||||
'filterable': _('Filterable'),
|
||||
'table': _('Table'),
|
||||
'count_distinct': _('Count Distinct'),
|
||||
'sum': _('Sum'),
|
||||
'min': _('Min'),
|
||||
'max': _('Max'),
|
||||
'expression': _('Expression'),
|
||||
'is_dttm': _('Is temporal'),
|
||||
'python_date_format': _('Datetime Format'),
|
||||
'database_expression': _('Database Expression'),
|
||||
'type': _('Type'),
|
||||
}
|
||||
|
||||
|
|
@ -108,30 +108,30 @@ class SqlMetricInlineView(CompactCRUDMixin, SupersetModelView): # noqa
|
|||
'expression', 'table', 'd3format', 'is_restricted', 'warning_text']
|
||||
description_columns = {
|
||||
'expression': utils.markdown(
|
||||
"a valid SQL expression as supported by the underlying backend. "
|
||||
"Example: `count(DISTINCT userid)`", True),
|
||||
'is_restricted': _("Whether the access to this metric is restricted "
|
||||
"to certain roles. Only roles with the permission "
|
||||
'a valid SQL expression as supported by the underlying backend. '
|
||||
'Example: `count(DISTINCT userid)`', True),
|
||||
'is_restricted': _('Whether the access to this metric is restricted '
|
||||
'to certain roles. Only roles with the permission '
|
||||
"'metric access on XXX (the name of this metric)' "
|
||||
"are allowed to access this metric"),
|
||||
'are allowed to access this metric'),
|
||||
'd3format': utils.markdown(
|
||||
"d3 formatting string as defined [here]"
|
||||
"(https://github.com/d3/d3-format/blob/master/README.md#format). "
|
||||
"For instance, this default formatting applies in the Table "
|
||||
"visualization and allow for different metric to use different "
|
||||
"formats", True,
|
||||
'd3 formatting string as defined [here]'
|
||||
'(https://github.com/d3/d3-format/blob/master/README.md#format). '
|
||||
'For instance, this default formatting applies in the Table '
|
||||
'visualization and allow for different metric to use different '
|
||||
'formats', True,
|
||||
),
|
||||
}
|
||||
add_columns = edit_columns
|
||||
page_size = 500
|
||||
label_columns = {
|
||||
'metric_name': _("Metric"),
|
||||
'description': _("Description"),
|
||||
'verbose_name': _("Verbose Name"),
|
||||
'metric_type': _("Type"),
|
||||
'expression': _("SQL Expression"),
|
||||
'table': _("Table"),
|
||||
'd3format': _("D3 Format"),
|
||||
'metric_name': _('Metric'),
|
||||
'description': _('Description'),
|
||||
'verbose_name': _('Verbose Name'),
|
||||
'metric_type': _('Type'),
|
||||
'expression': _('SQL Expression'),
|
||||
'table': _('Table'),
|
||||
'd3format': _('D3 Format'),
|
||||
'is_restricted': _('Is Restricted'),
|
||||
'warning_text': _('Warning Message'),
|
||||
}
|
||||
|
|
@ -174,56 +174,56 @@ class TableModelView(DatasourceModelView, DeleteMixin): # noqa
|
|||
)
|
||||
description_columns = {
|
||||
'slices': _(
|
||||
"The list of slices associated with this table. By "
|
||||
"altering this datasource, you may change how these associated "
|
||||
"slices behave. "
|
||||
"Also note that slices need to point to a datasource, so "
|
||||
"this form will fail at saving if removing slices from a "
|
||||
"datasource. If you want to change the datasource for a slice, "
|
||||
'The list of slices associated with this table. By '
|
||||
'altering this datasource, you may change how these associated '
|
||||
'slices behave. '
|
||||
'Also note that slices need to point to a datasource, so '
|
||||
'this form will fail at saving if removing slices from a '
|
||||
'datasource. If you want to change the datasource for a slice, '
|
||||
"overwrite the slice from the 'explore view'"),
|
||||
'offset': _("Timezone offset (in hours) for this datasource"),
|
||||
'offset': _('Timezone offset (in hours) for this datasource'),
|
||||
'table_name': _(
|
||||
"Name of the table that exists in the source database"),
|
||||
'Name of the table that exists in the source database'),
|
||||
'schema': _(
|
||||
"Schema, as used only in some databases like Postgres, Redshift "
|
||||
"and DB2"),
|
||||
'Schema, as used only in some databases like Postgres, Redshift '
|
||||
'and DB2'),
|
||||
'description': Markup(
|
||||
"Supports <a href='https://daringfireball.net/projects/markdown/'>"
|
||||
"markdown</a>"),
|
||||
'Supports <a href="https://daringfireball.net/projects/markdown/">'
|
||||
'markdown</a>'),
|
||||
'sql': _(
|
||||
"This fields acts a Superset view, meaning that Superset will "
|
||||
"run a query against this string as a subquery.",
|
||||
'This fields acts a Superset view, meaning that Superset will '
|
||||
'run a query against this string as a subquery.',
|
||||
),
|
||||
'fetch_values_predicate': _(
|
||||
"Predicate applied when fetching distinct value to "
|
||||
"populate the filter control component. Supports "
|
||||
"jinja template syntax. Applies only when "
|
||||
"`Enable Filter Select` is on.",
|
||||
'Predicate applied when fetching distinct value to '
|
||||
'populate the filter control component. Supports '
|
||||
'jinja template syntax. Applies only when '
|
||||
'`Enable Filter Select` is on.',
|
||||
),
|
||||
'default_endpoint': _(
|
||||
"Redirects to this endpoint when clicking on the table "
|
||||
"from the table list"),
|
||||
'Redirects to this endpoint when clicking on the table '
|
||||
'from the table list'),
|
||||
'filter_select_enabled': _(
|
||||
"Whether to populate the filter's dropdown in the explore "
|
||||
"view's filter section with a list of distinct values fetched "
|
||||
"from the backend on the fly"),
|
||||
'from the backend on the fly'),
|
||||
}
|
||||
base_filters = [['id', DatasourceFilter, lambda: []]]
|
||||
label_columns = {
|
||||
'slices': _("Associated Slices"),
|
||||
'link': _("Table"),
|
||||
'changed_by_': _("Changed By"),
|
||||
'database': _("Database"),
|
||||
'changed_on_': _("Last Changed"),
|
||||
'filter_select_enabled': _("Enable Filter Select"),
|
||||
'schema': _("Schema"),
|
||||
'slices': _('Associated Slices'),
|
||||
'link': _('Table'),
|
||||
'changed_by_': _('Changed By'),
|
||||
'database': _('Database'),
|
||||
'changed_on_': _('Last Changed'),
|
||||
'filter_select_enabled': _('Enable Filter Select'),
|
||||
'schema': _('Schema'),
|
||||
'default_endpoint': _('Default Endpoint'),
|
||||
'offset': _("Offset"),
|
||||
'cache_timeout': _("Cache Timeout"),
|
||||
'table_name': _("Table Name"),
|
||||
'offset': _('Offset'),
|
||||
'cache_timeout': _('Cache Timeout'),
|
||||
'table_name': _('Table Name'),
|
||||
'fetch_values_predicate': _('Fetch Values Predicate'),
|
||||
'owner': _("Owner"),
|
||||
'main_dttm_col': _("Main Datetime Column"),
|
||||
'owner': _('Owner'),
|
||||
'main_dttm_col': _('Main Datetime Column'),
|
||||
'description': _('Description'),
|
||||
}
|
||||
|
||||
|
|
@ -240,10 +240,10 @@ class TableModelView(DatasourceModelView, DeleteMixin): # noqa
|
|||
# Fail before adding if the table can't be found
|
||||
if not table.database.has_table(table):
|
||||
raise Exception(_(
|
||||
"Table [{}] could not be found, "
|
||||
"please double check your "
|
||||
"database connection, schema, and "
|
||||
"table name").format(table.name))
|
||||
'Table [{}] could not be found, '
|
||||
'please double check your '
|
||||
'database connection, schema, and '
|
||||
'table name').format(table.name))
|
||||
|
||||
def post_add(self, table, flash_message=True):
|
||||
table.fetch_metadata()
|
||||
|
|
@ -253,10 +253,10 @@ class TableModelView(DatasourceModelView, DeleteMixin): # noqa
|
|||
|
||||
if flash_message:
|
||||
flash(_(
|
||||
"The table was created. "
|
||||
"As part of this two phase configuration "
|
||||
"process, you should now click the edit button by "
|
||||
"the new table to configure it."), "info")
|
||||
'The table was created. '
|
||||
'As part of this two phase configuration '
|
||||
'process, you should now click the edit button by '
|
||||
'the new table to configure it.'), 'info')
|
||||
|
||||
def post_update(self, table):
|
||||
self.post_add(table, flash_message=False)
|
||||
|
|
@ -274,26 +274,26 @@ class TableModelView(DatasourceModelView, DeleteMixin): # noqa
|
|||
return redirect('/superset/explore/table/{}/'.format(pk))
|
||||
|
||||
@action(
|
||||
"refresh",
|
||||
__("Refresh Metadata"),
|
||||
__("Refresh column metadata"),
|
||||
"fa-refresh")
|
||||
'refresh',
|
||||
__('Refresh Metadata'),
|
||||
__('Refresh column metadata'),
|
||||
'fa-refresh')
|
||||
def refresh(self, tables):
|
||||
for t in tables:
|
||||
t.fetch_metadata()
|
||||
msg = _(
|
||||
"Metadata refreshed for the following table(s): %(tables)s",
|
||||
tables=", ".join([t.table_name for t in tables]))
|
||||
'Metadata refreshed for the following table(s): %(tables)s',
|
||||
tables=', '.join([t.table_name for t in tables]))
|
||||
flash(msg, 'info')
|
||||
return redirect('/tablemodelview/list/')
|
||||
|
||||
|
||||
appbuilder.add_view(
|
||||
TableModelView,
|
||||
"Tables",
|
||||
label=__("Tables"),
|
||||
category="Sources",
|
||||
category_label=__("Sources"),
|
||||
'Tables',
|
||||
label=__('Tables'),
|
||||
category='Sources',
|
||||
category_label=__('Sources'),
|
||||
icon='fa-table',)
|
||||
|
||||
appbuilder.add_separator("Sources")
|
||||
appbuilder.add_separator('Sources')
|
||||
|
|
|
|||
|
|
@ -106,7 +106,7 @@ class BaseEngineSpec(object):
|
|||
all_result_sets += [
|
||||
'{}.{}'.format(schema, t) for t in result_sets[schema]]
|
||||
if all_result_sets:
|
||||
result_sets[""] = all_result_sets
|
||||
result_sets[''] = all_result_sets
|
||||
return result_sets
|
||||
|
||||
@classmethod
|
||||
|
|
@ -139,7 +139,7 @@ class BaseEngineSpec(object):
|
|||
For those it's probably better to not alter the database
|
||||
component of the URI with the schema name, it won't work.
|
||||
|
||||
Some database drivers like presto accept "{catalog}/{schema}" in
|
||||
Some database drivers like presto accept '{catalog}/{schema}' in
|
||||
the database component of the URL, that can be handled here.
|
||||
"""
|
||||
return uri
|
||||
|
|
@ -211,15 +211,15 @@ class PostgresEngineSpec(BaseEngineSpec):
|
|||
engine = 'postgresql'
|
||||
|
||||
time_grains = (
|
||||
Grain("Time Column", _('Time Column'), "{col}"),
|
||||
Grain("second", _('second'), "DATE_TRUNC('second', \"{col}\")"),
|
||||
Grain("minute", _('minute'), "DATE_TRUNC('minute', \"{col}\")"),
|
||||
Grain("hour", _('hour'), "DATE_TRUNC('hour', \"{col}\")"),
|
||||
Grain("day", _('day'), "DATE_TRUNC('day', \"{col}\")"),
|
||||
Grain("week", _('week'), "DATE_TRUNC('week', \"{col}\")"),
|
||||
Grain("month", _('month'), "DATE_TRUNC('month', \"{col}\")"),
|
||||
Grain("quarter", _('quarter'), "DATE_TRUNC('quarter', \"{col}\")"),
|
||||
Grain("year", _('year'), "DATE_TRUNC('year', \"{col}\")"),
|
||||
Grain('Time Column', _('Time Column'), '{col}'),
|
||||
Grain('second', _('second'), "DATE_TRUNC('second', '{col}')"),
|
||||
Grain('minute', _('minute'), "DATE_TRUNC('minute', '{col}')"),
|
||||
Grain('hour', _('hour'), "DATE_TRUNC('hour', '{col}')"),
|
||||
Grain('day', _('day'), "DATE_TRUNC('day', '{col}')"),
|
||||
Grain('week', _('week'), "DATE_TRUNC('week', '{col}')"),
|
||||
Grain('month', _('month'), "DATE_TRUNC('month', '{col}')"),
|
||||
Grain('quarter', _('quarter'), "DATE_TRUNC('quarter', '{col}')"),
|
||||
Grain('year', _('year'), "DATE_TRUNC('year', '{col}')"),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
|
@ -295,9 +295,9 @@ class SqliteEngineSpec(BaseEngineSpec):
|
|||
time_grains = (
|
||||
Grain('Time Column', _('Time Column'), '{col}'),
|
||||
Grain('day', _('day'), 'DATE({col})'),
|
||||
Grain("week", _('week'),
|
||||
Grain('week', _('week'),
|
||||
"DATE({col}, -strftime('%w', {col}) || ' days')"),
|
||||
Grain("month", _('month'),
|
||||
Grain('month', _('month'),
|
||||
"DATE({col}, -strftime('%d', {col}) || ' days', '+1 day')"),
|
||||
)
|
||||
|
||||
|
|
@ -321,7 +321,7 @@ class SqliteEngineSpec(BaseEngineSpec):
|
|||
all_result_sets += [
|
||||
'{}.{}'.format(schema, t) for t in result_sets[schema]]
|
||||
if all_result_sets:
|
||||
result_sets[""] = all_result_sets
|
||||
result_sets[''] = all_result_sets
|
||||
return result_sets
|
||||
|
||||
@classmethod
|
||||
|
|
@ -342,25 +342,25 @@ class MySQLEngineSpec(BaseEngineSpec):
|
|||
cursor_execute_kwargs = {'args': {}}
|
||||
time_grains = (
|
||||
Grain('Time Column', _('Time Column'), '{col}'),
|
||||
Grain("second", _('second'), "DATE_ADD(DATE({col}), "
|
||||
"INTERVAL (HOUR({col})*60*60 + MINUTE({col})*60"
|
||||
" + SECOND({col})) SECOND)"),
|
||||
Grain("minute", _('minute'), "DATE_ADD(DATE({col}), "
|
||||
"INTERVAL (HOUR({col})*60 + MINUTE({col})) MINUTE)"),
|
||||
Grain("hour", _('hour'), "DATE_ADD(DATE({col}), "
|
||||
"INTERVAL HOUR({col}) HOUR)"),
|
||||
Grain('second', _('second'), 'DATE_ADD(DATE({col}), '
|
||||
'INTERVAL (HOUR({col})*60*60 + MINUTE({col})*60'
|
||||
' + SECOND({col})) SECOND)'),
|
||||
Grain('minute', _('minute'), 'DATE_ADD(DATE({col}), '
|
||||
'INTERVAL (HOUR({col})*60 + MINUTE({col})) MINUTE)'),
|
||||
Grain('hour', _('hour'), 'DATE_ADD(DATE({col}), '
|
||||
'INTERVAL HOUR({col}) HOUR)'),
|
||||
Grain('day', _('day'), 'DATE({col})'),
|
||||
Grain("week", _('week'), "DATE(DATE_SUB({col}, "
|
||||
"INTERVAL DAYOFWEEK({col}) - 1 DAY))"),
|
||||
Grain("month", _('month'), "DATE(DATE_SUB({col}, "
|
||||
"INTERVAL DAYOFMONTH({col}) - 1 DAY))"),
|
||||
Grain("quarter", _('quarter'), "MAKEDATE(YEAR({col}), 1) "
|
||||
"+ INTERVAL QUARTER({col}) QUARTER - INTERVAL 1 QUARTER"),
|
||||
Grain("year", _('year'), "DATE(DATE_SUB({col}, "
|
||||
"INTERVAL DAYOFYEAR({col}) - 1 DAY))"),
|
||||
Grain("week_start_monday", _('week_start_monday'),
|
||||
"DATE(DATE_SUB({col}, "
|
||||
"INTERVAL DAYOFWEEK(DATE_SUB({col}, INTERVAL 1 DAY)) - 1 DAY))"),
|
||||
Grain('week', _('week'), 'DATE(DATE_SUB({col}, '
|
||||
'INTERVAL DAYOFWEEK({col}) - 1 DAY))'),
|
||||
Grain('month', _('month'), 'DATE(DATE_SUB({col}, '
|
||||
'INTERVAL DAYOFMONTH({col}) - 1 DAY))'),
|
||||
Grain('quarter', _('quarter'), 'MAKEDATE(YEAR({col}), 1) '
|
||||
'+ INTERVAL QUARTER({col}) QUARTER - INTERVAL 1 QUARTER'),
|
||||
Grain('year', _('year'), 'DATE(DATE_SUB({col}, '
|
||||
'INTERVAL DAYOFYEAR({col}) - 1 DAY))'),
|
||||
Grain('week_start_monday', _('week_start_monday'),
|
||||
'DATE(DATE_SUB({col}, '
|
||||
'INTERVAL DAYOFWEEK(DATE_SUB({col}, INTERVAL 1 DAY)) - 1 DAY))'),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
|
@ -378,7 +378,7 @@ class MySQLEngineSpec(BaseEngineSpec):
|
|||
|
||||
@classmethod
|
||||
def epoch_to_dttm(cls):
|
||||
return "from_unixtime({col})"
|
||||
return 'from_unixtime({col})'
|
||||
|
||||
@classmethod
|
||||
def extract_error_message(cls, e):
|
||||
|
|
@ -412,10 +412,10 @@ class PrestoEngineSpec(BaseEngineSpec):
|
|||
"date_trunc('month', CAST({col} AS TIMESTAMP))"),
|
||||
Grain('quarter', _('quarter'),
|
||||
"date_trunc('quarter', CAST({col} AS TIMESTAMP))"),
|
||||
Grain("week_ending_saturday", _('week_ending_saturday'),
|
||||
Grain('week_ending_saturday', _('week_ending_saturday'),
|
||||
"date_add('day', 5, date_trunc('week', date_add('day', 1, "
|
||||
"CAST({col} AS TIMESTAMP))))"),
|
||||
Grain("week_start_sunday", _('week_start_sunday'),
|
||||
'CAST({col} AS TIMESTAMP))))'),
|
||||
Grain('week_start_sunday', _('week_start_sunday'),
|
||||
"date_add('day', -1, date_trunc('week', "
|
||||
"date_add('day', 1, CAST({col} AS TIMESTAMP))))"),
|
||||
)
|
||||
|
|
@ -439,7 +439,7 @@ class PrestoEngineSpec(BaseEngineSpec):
|
|||
|
||||
@classmethod
|
||||
def escape_sql(cls, sql):
|
||||
return re.sub(r'%%|%', "%%", sql)
|
||||
return re.sub(r'%%|%', '%%', sql)
|
||||
|
||||
@classmethod
|
||||
def convert_dttm(cls, target_type, dttm):
|
||||
|
|
@ -452,7 +452,7 @@ class PrestoEngineSpec(BaseEngineSpec):
|
|||
|
||||
@classmethod
|
||||
def epoch_to_dttm(cls):
|
||||
return "from_unixtime({col})"
|
||||
return 'from_unixtime({col})'
|
||||
|
||||
@classmethod
|
||||
@cache_util.memoized_func(
|
||||
|
|
@ -474,7 +474,7 @@ class PrestoEngineSpec(BaseEngineSpec):
|
|||
result_sets = defaultdict(list)
|
||||
for unused, row in result_set_df.iterrows():
|
||||
result_sets[row['table_schema']].append(row['table_name'])
|
||||
result_sets[""].append('{}.{}'.format(
|
||||
result_sets[''].append('{}.{}'.format(
|
||||
row['table_schema'], row['table_name']))
|
||||
return result_sets
|
||||
|
||||
|
|
@ -486,7 +486,7 @@ class PrestoEngineSpec(BaseEngineSpec):
|
|||
cols = indexes[0].get('column_names', [])
|
||||
full_table_name = table_name
|
||||
if schema_name and '.' not in table_name:
|
||||
full_table_name = "{}.{}".format(schema_name, table_name)
|
||||
full_table_name = '{}.{}'.format(schema_name, table_name)
|
||||
pql = cls._partition_query(full_table_name)
|
||||
col_name, latest_part = cls.latest_partition(
|
||||
table_name, schema_name, database, show_first=True)
|
||||
|
|
@ -561,7 +561,7 @@ class PrestoEngineSpec(BaseEngineSpec):
|
|||
:param filters: a list of filters to apply
|
||||
:param filters: dict of field name and filter value combinations
|
||||
"""
|
||||
limit_clause = "LIMIT {}".format(limit) if limit else ''
|
||||
limit_clause = 'LIMIT {}'.format(limit) if limit else ''
|
||||
order_by_clause = ''
|
||||
if order_by:
|
||||
l = [] # noqa: E741
|
||||
|
|
@ -610,12 +610,12 @@ class PrestoEngineSpec(BaseEngineSpec):
|
|||
indexes = database.get_indexes(table_name, schema)
|
||||
if len(indexes[0]['column_names']) < 1:
|
||||
raise SupersetTemplateException(
|
||||
"The table should have one partitioned field")
|
||||
'The table should have one partitioned field')
|
||||
elif not show_first and len(indexes[0]['column_names']) > 1:
|
||||
raise SupersetTemplateException(
|
||||
"The table should have a single partitioned field "
|
||||
"to use this function. You may want to use "
|
||||
"`presto.latest_sub_partition`")
|
||||
'The table should have a single partitioned field '
|
||||
'to use this function. You may want to use '
|
||||
'`presto.latest_sub_partition`')
|
||||
part_field = indexes[0]['column_names'][0]
|
||||
sql = cls._partition_query(table_name, 1, [(part_field, True)])
|
||||
df = database.get_df(sql, schema)
|
||||
|
|
@ -652,12 +652,12 @@ class PrestoEngineSpec(BaseEngineSpec):
|
|||
part_fields = indexes[0]['column_names']
|
||||
for k in kwargs.keys():
|
||||
if k not in k in part_fields:
|
||||
msg = "Field [{k}] is not part of the portioning key"
|
||||
msg = 'Field [{k}] is not part of the portioning key'
|
||||
raise SupersetTemplateException(msg)
|
||||
if len(kwargs.keys()) != len(part_fields) - 1:
|
||||
msg = (
|
||||
"A filter needs to be specified for {} out of the "
|
||||
"{} fields."
|
||||
'A filter needs to be specified for {} out of the '
|
||||
'{} fields.'
|
||||
).format(len(part_fields) - 1, len(part_fields))
|
||||
raise SupersetTemplateException(msg)
|
||||
|
||||
|
|
@ -762,9 +762,9 @@ class HiveEngineSpec(PrestoEngineSpec):
|
|||
reduce_progress = int(match.groupdict()['reduce_progress'])
|
||||
stages[stage_number] = (map_progress + reduce_progress) / 2
|
||||
logging.info(
|
||||
"Progress detail: {}, "
|
||||
"current job {}, "
|
||||
"total jobs: {}".format(stages, current_job, total_jobs))
|
||||
'Progress detail: {}, '
|
||||
'current job {}, '
|
||||
'total jobs: {}'.format(stages, current_job, total_jobs))
|
||||
|
||||
stage_progress = sum(
|
||||
stages.values()) / len(stages.values()) if stages else 0
|
||||
|
|
@ -776,7 +776,7 @@ class HiveEngineSpec(PrestoEngineSpec):
|
|||
|
||||
@classmethod
|
||||
def get_tracking_url(cls, log_lines):
|
||||
lkp = "Tracking URL = "
|
||||
lkp = 'Tracking URL = '
|
||||
for line in log_lines:
|
||||
if lkp in line:
|
||||
return line.split(lkp)[1]
|
||||
|
|
@ -803,7 +803,7 @@ class HiveEngineSpec(PrestoEngineSpec):
|
|||
if log:
|
||||
log_lines = log.splitlines()
|
||||
progress = cls.progress(log_lines)
|
||||
logging.info("Progress total: {}".format(progress))
|
||||
logging.info('Progress total: {}'.format(progress))
|
||||
needs_commit = False
|
||||
if progress > query.progress:
|
||||
query.progress = progress
|
||||
|
|
@ -813,19 +813,19 @@ class HiveEngineSpec(PrestoEngineSpec):
|
|||
if tracking_url:
|
||||
job_id = tracking_url.split('/')[-2]
|
||||
logging.info(
|
||||
"Found the tracking url: {}".format(tracking_url))
|
||||
'Found the tracking url: {}'.format(tracking_url))
|
||||
tracking_url = tracking_url_trans(tracking_url)
|
||||
logging.info(
|
||||
"Transformation applied: {}".format(tracking_url))
|
||||
'Transformation applied: {}'.format(tracking_url))
|
||||
query.tracking_url = tracking_url
|
||||
logging.info("Job id: {}".format(job_id))
|
||||
logging.info('Job id: {}'.format(job_id))
|
||||
needs_commit = True
|
||||
if job_id and len(log_lines) > last_log_line:
|
||||
# Wait for job id before logging things out
|
||||
# this allows for prefixing all log lines and becoming
|
||||
# searchable in something like Kibana
|
||||
for l in log_lines[last_log_line:]:
|
||||
logging.info("[{}] {}".format(job_id, l))
|
||||
logging.info('[{}] {}'.format(job_id, l))
|
||||
last_log_line = len(log_lines)
|
||||
if needs_commit:
|
||||
session.commit()
|
||||
|
|
@ -859,7 +859,7 @@ class HiveEngineSpec(PrestoEngineSpec):
|
|||
@classmethod
|
||||
def _partition_query(
|
||||
cls, table_name, limit=0, order_by=None, filters=None):
|
||||
return "SHOW PARTITIONS {table_name}".format(**locals())
|
||||
return 'SHOW PARTITIONS {table_name}'.format(**locals())
|
||||
|
||||
@classmethod
|
||||
def modify_url_for_impersonation(cls, url, impersonate_user, username):
|
||||
|
|
@ -888,9 +888,9 @@ class HiveEngineSpec(PrestoEngineSpec):
|
|||
backend_name = url.get_backend_name()
|
||||
|
||||
# Must be Hive connection, enable impersonation, and set param auth=LDAP|KERBEROS
|
||||
if (backend_name == "hive" and "auth" in url.query.keys() and
|
||||
if (backend_name == 'hive' and 'auth' in url.query.keys() and
|
||||
impersonate_user is True and username is not None):
|
||||
configuration["hive.server2.proxy.user"] = username
|
||||
configuration['hive.server2.proxy.user'] = username
|
||||
return configuration
|
||||
|
||||
|
||||
|
|
@ -899,27 +899,27 @@ class MssqlEngineSpec(BaseEngineSpec):
|
|||
epoch_to_dttm = "dateadd(S, {col}, '1970-01-01')"
|
||||
|
||||
time_grains = (
|
||||
Grain("Time Column", _('Time Column'), "{col}"),
|
||||
Grain("second", _('second'), "DATEADD(second, "
|
||||
Grain('Time Column', _('Time Column'), '{col}'),
|
||||
Grain('second', _('second'), 'DATEADD(second, '
|
||||
"DATEDIFF(second, '2000-01-01', {col}), '2000-01-01')"),
|
||||
Grain("minute", _('minute'), "DATEADD(minute, "
|
||||
"DATEDIFF(minute, 0, {col}), 0)"),
|
||||
Grain("5 minute", _('5 minute'), "DATEADD(minute, "
|
||||
"DATEDIFF(minute, 0, {col}) / 5 * 5, 0)"),
|
||||
Grain("half hour", _('half hour'), "DATEADD(minute, "
|
||||
"DATEDIFF(minute, 0, {col}) / 30 * 30, 0)"),
|
||||
Grain("hour", _('hour'), "DATEADD(hour, "
|
||||
"DATEDIFF(hour, 0, {col}), 0)"),
|
||||
Grain("day", _('day'), "DATEADD(day, "
|
||||
"DATEDIFF(day, 0, {col}), 0)"),
|
||||
Grain("week", _('week'), "DATEADD(week, "
|
||||
"DATEDIFF(week, 0, {col}), 0)"),
|
||||
Grain("month", _('month'), "DATEADD(month, "
|
||||
"DATEDIFF(month, 0, {col}), 0)"),
|
||||
Grain("quarter", _('quarter'), "DATEADD(quarter, "
|
||||
"DATEDIFF(quarter, 0, {col}), 0)"),
|
||||
Grain("year", _('year'), "DATEADD(year, "
|
||||
"DATEDIFF(year, 0, {col}), 0)"),
|
||||
Grain('minute', _('minute'), 'DATEADD(minute, '
|
||||
'DATEDIFF(minute, 0, {col}), 0)'),
|
||||
Grain('5 minute', _('5 minute'), 'DATEADD(minute, '
|
||||
'DATEDIFF(minute, 0, {col}) / 5 * 5, 0)'),
|
||||
Grain('half hour', _('half hour'), 'DATEADD(minute, '
|
||||
'DATEDIFF(minute, 0, {col}) / 30 * 30, 0)'),
|
||||
Grain('hour', _('hour'), 'DATEADD(hour, '
|
||||
'DATEDIFF(hour, 0, {col}), 0)'),
|
||||
Grain('day', _('day'), 'DATEADD(day, '
|
||||
'DATEDIFF(day, 0, {col}), 0)'),
|
||||
Grain('week', _('week'), 'DATEADD(week, '
|
||||
'DATEDIFF(week, 0, {col}), 0)'),
|
||||
Grain('month', _('month'), 'DATEADD(month, '
|
||||
'DATEDIFF(month, 0, {col}), 0)'),
|
||||
Grain('quarter', _('quarter'), 'DATEADD(quarter, '
|
||||
'DATEDIFF(quarter, 0, {col}), 0)'),
|
||||
Grain('year', _('year'), 'DATEADD(year, '
|
||||
'DATEDIFF(year, 0, {col}), 0)'),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
|
@ -955,7 +955,7 @@ class OracleEngineSpec(PostgresEngineSpec):
|
|||
@classmethod
|
||||
def convert_dttm(cls, target_type, dttm):
|
||||
return (
|
||||
"""TO_TIMESTAMP('{}', 'YYYY-MM-DD"T"HH24:MI:SS.ff6')"""
|
||||
"""TO_TIMESTAMP('{}', 'YYYY-MM-DD'T'HH24:MI:SS.ff6')"""
|
||||
).format(dttm.isoformat())
|
||||
|
||||
|
||||
|
|
@ -982,10 +982,10 @@ class AthenaEngineSpec(BaseEngineSpec):
|
|||
"date_trunc('month', CAST({col} AS TIMESTAMP))"),
|
||||
Grain('quarter', _('quarter'),
|
||||
"date_trunc('quarter', CAST({col} AS TIMESTAMP))"),
|
||||
Grain("week_ending_saturday", _('week_ending_saturday'),
|
||||
Grain('week_ending_saturday', _('week_ending_saturday'),
|
||||
"date_add('day', 5, date_trunc('week', date_add('day', 1, "
|
||||
"CAST({col} AS TIMESTAMP))))"),
|
||||
Grain("week_start_sunday", _('week_start_sunday'),
|
||||
'CAST({col} AS TIMESTAMP))))'),
|
||||
Grain('week_start_sunday', _('week_start_sunday'),
|
||||
"date_add('day', -1, date_trunc('week', "
|
||||
"date_add('day', 1, CAST({col} AS TIMESTAMP))))"),
|
||||
)
|
||||
|
|
@ -1002,7 +1002,7 @@ class AthenaEngineSpec(BaseEngineSpec):
|
|||
|
||||
@classmethod
|
||||
def epoch_to_dttm(cls):
|
||||
return "from_unixtime({col})"
|
||||
return 'from_unixtime({col})'
|
||||
|
||||
|
||||
class ClickHouseEngineSpec(BaseEngineSpec):
|
||||
|
|
@ -1015,21 +1015,21 @@ class ClickHouseEngineSpec(BaseEngineSpec):
|
|||
time_grains = (
|
||||
Grain('Time Column', _('Time Column'), '{col}'),
|
||||
Grain('minute', _('minute'),
|
||||
"toStartOfMinute(toDateTime({col}))"),
|
||||
'toStartOfMinute(toDateTime({col}))'),
|
||||
Grain('5 minute', _('5 minute'),
|
||||
"toDateTime(intDiv(toUInt32(toDateTime({col})), 300)*300)"),
|
||||
'toDateTime(intDiv(toUInt32(toDateTime({col})), 300)*300)'),
|
||||
Grain('10 minute', _('10 minute'),
|
||||
"toDateTime(intDiv(toUInt32(toDateTime({col})), 600)*600)"),
|
||||
'toDateTime(intDiv(toUInt32(toDateTime({col})), 600)*600)'),
|
||||
Grain('hour', _('hour'),
|
||||
"toStartOfHour(toDateTime({col}))"),
|
||||
'toStartOfHour(toDateTime({col}))'),
|
||||
Grain('day', _('day'),
|
||||
"toStartOfDay(toDateTime({col}))"),
|
||||
'toStartOfDay(toDateTime({col}))'),
|
||||
Grain('month', _('month'),
|
||||
"toStartOfMonth(toDateTime({col}))"),
|
||||
'toStartOfMonth(toDateTime({col}))'),
|
||||
Grain('quarter', _('quarter'),
|
||||
"toStartOfQuarter(toDateTime({col}))"),
|
||||
'toStartOfQuarter(toDateTime({col}))'),
|
||||
Grain('year', _('year'),
|
||||
"toStartOfYear(toDateTime({col}))"),
|
||||
'toStartOfYear(toDateTime({col}))'),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
|
@ -1050,22 +1050,22 @@ class BQEngineSpec(BaseEngineSpec):
|
|||
engine = 'bigquery'
|
||||
|
||||
time_grains = (
|
||||
Grain("Time Column", _('Time Column'), "{col}"),
|
||||
Grain("second", _('second'), "TIMESTAMP_TRUNC({col}, SECOND)"),
|
||||
Grain("minute", _('minute'), "TIMESTAMP_TRUNC({col}, MINUTE)"),
|
||||
Grain("hour", _('hour'), "TIMESTAMP_TRUNC({col}, HOUR)"),
|
||||
Grain("day", _('day'), "TIMESTAMP_TRUNC({col}, DAY)"),
|
||||
Grain("week", _('week'), "TIMESTAMP_TRUNC({col}, WEEK)"),
|
||||
Grain("month", _('month'), "TIMESTAMP_TRUNC({col}, MONTH)"),
|
||||
Grain("quarter", _('quarter'), "TIMESTAMP_TRUNC({col}, QUARTER)"),
|
||||
Grain("year", _('year'), "TIMESTAMP_TRUNC({col}, YEAR)"),
|
||||
Grain('Time Column', _('Time Column'), '{col}'),
|
||||
Grain('second', _('second'), 'TIMESTAMP_TRUNC({col}, SECOND)'),
|
||||
Grain('minute', _('minute'), 'TIMESTAMP_TRUNC({col}, MINUTE)'),
|
||||
Grain('hour', _('hour'), 'TIMESTAMP_TRUNC({col}, HOUR)'),
|
||||
Grain('day', _('day'), 'TIMESTAMP_TRUNC({col}, DAY)'),
|
||||
Grain('week', _('week'), 'TIMESTAMP_TRUNC({col}, WEEK)'),
|
||||
Grain('month', _('month'), 'TIMESTAMP_TRUNC({col}, MONTH)'),
|
||||
Grain('quarter', _('quarter'), 'TIMESTAMP_TRUNC({col}, QUARTER)'),
|
||||
Grain('year', _('year'), 'TIMESTAMP_TRUNC({col}, YEAR)'),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def convert_dttm(cls, target_type, dttm):
|
||||
tt = target_type.upper()
|
||||
if tt == 'DATE':
|
||||
return "'{}'".format(dttm.strftime('%Y-%m-%d'))
|
||||
return "{}'".format(dttm.strftime('%Y-%m-%d'))
|
||||
return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
|
||||
|
||||
|
||||
|
|
@ -1075,21 +1075,21 @@ class ImpalaEngineSpec(BaseEngineSpec):
|
|||
engine = 'impala'
|
||||
|
||||
time_grains = (
|
||||
Grain("Time Column", _('Time Column'), "{col}"),
|
||||
Grain("minute", _('minute'), "TRUNC({col}, 'MI')"),
|
||||
Grain("hour", _('hour'), "TRUNC({col}, 'HH')"),
|
||||
Grain("day", _('day'), "TRUNC({col}, 'DD')"),
|
||||
Grain("week", _('week'), "TRUNC({col}, 'WW')"),
|
||||
Grain("month", _('month'), "TRUNC({col}, 'MONTH')"),
|
||||
Grain("quarter", _('quarter'), "TRUNC({col}, 'Q')"),
|
||||
Grain("year", _('year'), "TRUNC({col}, 'YYYY')"),
|
||||
Grain('Time Column', _('Time Column'), '{col}'),
|
||||
Grain('minute', _('minute'), "TRUNC({col}, 'MI')"),
|
||||
Grain('hour', _('hour'), "TRUNC({col}, 'HH')"),
|
||||
Grain('day', _('day'), "TRUNC({col}, 'DD')"),
|
||||
Grain('week', _('week'), "TRUNC({col}, 'WW')"),
|
||||
Grain('month', _('month'), "TRUNC({col}, 'MONTH')"),
|
||||
Grain('quarter', _('quarter'), "TRUNC({col}, 'Q')"),
|
||||
Grain('year', _('year'), "TRUNC({col}, 'YYYY')"),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def convert_dttm(cls, target_type, dttm):
|
||||
tt = target_type.upper()
|
||||
if tt == 'DATE':
|
||||
return "'{}'".format(dttm.strftime('%Y-%m-%d'))
|
||||
return "{}'".format(dttm.strftime('%Y-%m-%d'))
|
||||
return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ def fetch_logs(self, max_rows=1024,
|
|||
except (ttypes.TApplicationException,
|
||||
Thrift.TApplicationException):
|
||||
if self._state == self._STATE_NONE:
|
||||
raise hive.ProgrammingError("No query yet")
|
||||
raise hive.ProgrammingError('No query yet')
|
||||
logs = []
|
||||
while True:
|
||||
req = ttypes.TFetchResultsReq(
|
||||
|
|
|
|||
|
|
@ -4,17 +4,17 @@ from pyhive import presto
|
|||
# TODO(bogdan): Remove this when new pyhive release will be available.
|
||||
def cancel(self):
|
||||
if self._state == self._STATE_NONE:
|
||||
raise presto.ProgrammingError("No query yet")
|
||||
raise presto.ProgrammingError('No query yet')
|
||||
if self._nextUri is None:
|
||||
assert self._state == self._STATE_FINISHED, \
|
||||
"Should be finished if nextUri is None"
|
||||
'Should be finished if nextUri is None'
|
||||
return
|
||||
|
||||
response = presto.requests.delete(self._nextUri)
|
||||
|
||||
# pylint: disable=no-member
|
||||
if response.status_code != presto.requests.codes.no_content:
|
||||
fmt = "Unexpected status code after cancel {}\n{}"
|
||||
fmt = 'Unexpected status code after cancel {}\n{}'
|
||||
raise presto.OperationalError(
|
||||
fmt.format(response.status_code, response.content))
|
||||
self._state = self._STATE_FINISHED
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ def cast_filter_data(form_data):
|
|||
flts = []
|
||||
having_flts = []
|
||||
fd = form_data
|
||||
filter_pattern = re.compile(r'''((?:[^,"']|"[^"]*"|'[^']*')+)''')
|
||||
filter_pattern = re.compile(r"""((?:[^,"']|"[^"]*"|'[^']*')+)""")
|
||||
for i in range(0, 10):
|
||||
for prefix in ['flt', 'having']:
|
||||
col_str = '{}_col_{}'.format(prefix, i)
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ config = app.config
|
|||
stats_logger = config.get('STATS_LOGGER')
|
||||
metadata = Model.metadata # pylint: disable=no-member
|
||||
|
||||
PASSWORD_MASK = "X" * 10
|
||||
PASSWORD_MASK = 'X' * 10
|
||||
|
||||
def set_related_perm(mapper, connection, target): # noqa
|
||||
src_class = target.cls_model
|
||||
|
|
@ -209,19 +209,19 @@ class Slice(Model, AuditMixinNullable, ImportMixin):
|
|||
def slice_url(self):
|
||||
"""Defines the url to access the slice"""
|
||||
return (
|
||||
"/superset/explore/{obj.datasource_type}/"
|
||||
"{obj.datasource_id}/?form_data={params}".format(
|
||||
'/superset/explore/{obj.datasource_type}/'
|
||||
'{obj.datasource_id}/?form_data={params}'.format(
|
||||
obj=self, params=parse.quote(json.dumps(self.form_data))))
|
||||
|
||||
@property
|
||||
def slice_id_url(self):
|
||||
return (
|
||||
"/superset/{slc.datasource_type}/{slc.datasource_id}/{slc.id}/"
|
||||
'/superset/{slc.datasource_type}/{slc.datasource_id}/{slc.id}/'
|
||||
).format(slc=self)
|
||||
|
||||
@property
|
||||
def edit_url(self):
|
||||
return "/slicemodelview/edit/{}".format(self.id)
|
||||
return '/slicemodelview/edit/{}'.format(self.id)
|
||||
|
||||
@property
|
||||
def slice_link(self):
|
||||
|
|
@ -238,9 +238,9 @@ class Slice(Model, AuditMixinNullable, ImportMixin):
|
|||
"""
|
||||
slice_params = json.loads(self.params)
|
||||
slice_params['slice_id'] = self.id
|
||||
slice_params['json'] = "false"
|
||||
slice_params['json'] = 'false'
|
||||
slice_params['slice_name'] = self.slice_name
|
||||
slice_params['viz_type'] = self.viz_type if self.viz_type else "table"
|
||||
slice_params['viz_type'] = self.viz_type if self.viz_type else 'table'
|
||||
|
||||
return viz_types[slice_params.get('viz_type')](
|
||||
self.datasource,
|
||||
|
|
@ -327,8 +327,8 @@ class Dashboard(Model, AuditMixinNullable, ImportMixin):
|
|||
@property
|
||||
def table_names(self):
|
||||
# pylint: disable=no-member
|
||||
return ", ".join(
|
||||
{"{}".format(s.datasource.full_name) for s in self.slices})
|
||||
return ', '.join(
|
||||
{'{}'.format(s.datasource.full_name) for s in self.slices})
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
|
|
@ -338,9 +338,9 @@ class Dashboard(Model, AuditMixinNullable, ImportMixin):
|
|||
default_filters = json_metadata.get('default_filters')
|
||||
if default_filters:
|
||||
filters = parse.quote(default_filters.encode('utf8'))
|
||||
return "/superset/dashboard/{}/?preselect_filters={}".format(
|
||||
return '/superset/dashboard/{}/?preselect_filters={}'.format(
|
||||
self.slug or self.id, filters)
|
||||
return "/superset/dashboard/{}/".format(self.slug or self.id)
|
||||
return '/superset/dashboard/{}/'.format(self.slug or self.id)
|
||||
|
||||
@property
|
||||
def datasources(self):
|
||||
|
|
@ -538,7 +538,7 @@ class Database(Model, AuditMixinNullable):
|
|||
"""An ORM object that stores Database related information"""
|
||||
|
||||
__tablename__ = 'dbs'
|
||||
type = "table"
|
||||
type = 'table'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
verbose_name = Column(String(250), unique=True)
|
||||
|
|
@ -633,7 +633,7 @@ class Database(Model, AuditMixinNullable):
|
|||
effective_username)
|
||||
|
||||
masked_url = self.get_password_masked_url(url)
|
||||
logging.info("Database.get_sqla_engine(). Masked URL: {0}".format(masked_url))
|
||||
logging.info('Database.get_sqla_engine(). Masked URL: {0}'.format(masked_url))
|
||||
|
||||
params = extra.get('engine_params', {})
|
||||
if nullpool:
|
||||
|
|
@ -647,7 +647,7 @@ class Database(Model, AuditMixinNullable):
|
|||
self.impersonate_user,
|
||||
effective_username))
|
||||
if configuration:
|
||||
params["connect_args"] = {"configuration": configuration}
|
||||
params['connect_args'] = {'configuration': configuration}
|
||||
|
||||
return create_engine(url, **params)
|
||||
|
||||
|
|
@ -676,7 +676,7 @@ class Database(Model, AuditMixinNullable):
|
|||
|
||||
def compile_sqla_query(self, qry, schema=None):
|
||||
eng = self.get_sqla_engine(schema=schema)
|
||||
compiled = qry.compile(eng, compile_kwargs={"literal_binds": True})
|
||||
compiled = qry.compile(eng, compile_kwargs={'literal_binds': True})
|
||||
return '{}'.format(compiled)
|
||||
|
||||
def select_star(
|
||||
|
|
@ -709,7 +709,7 @@ class Database(Model, AuditMixinNullable):
|
|||
if not schema:
|
||||
tables_dict = self.db_engine_spec.fetch_result_sets(
|
||||
self, 'table', force=force)
|
||||
return tables_dict.get("", [])
|
||||
return tables_dict.get('', [])
|
||||
return sorted(
|
||||
self.db_engine_spec.get_table_names(schema, self.inspector))
|
||||
|
||||
|
|
@ -717,7 +717,7 @@ class Database(Model, AuditMixinNullable):
|
|||
if not schema:
|
||||
views_dict = self.db_engine_spec.fetch_result_sets(
|
||||
self, 'view', force=force)
|
||||
return views_dict.get("", [])
|
||||
return views_dict.get('', [])
|
||||
views = []
|
||||
try:
|
||||
views = self.inspector.get_view_names(schema)
|
||||
|
|
@ -796,7 +796,7 @@ class Database(Model, AuditMixinNullable):
|
|||
|
||||
def get_perm(self):
|
||||
return (
|
||||
"[{obj.database_name}].(id:{obj.id})").format(obj=self)
|
||||
'[{obj.database_name}].(id:{obj.id})').format(obj=self)
|
||||
|
||||
def has_table(self, table):
|
||||
engine = self.get_sqla_engine()
|
||||
|
|
@ -851,7 +851,7 @@ class Log(Model):
|
|||
except (ValueError, TypeError):
|
||||
slice_id = 0
|
||||
|
||||
params = ""
|
||||
params = ''
|
||||
try:
|
||||
params = json.dumps(d)
|
||||
except Exception:
|
||||
|
|
@ -948,6 +948,6 @@ class DatasourceAccessRequest(Model, AuditMixinNullable):
|
|||
)
|
||||
href = '<a href="{}">Extend {} Role</a>'.format(url, r.name)
|
||||
if r.name in self.ROLES_BLACKLIST:
|
||||
href = "{} Role".format(r.name)
|
||||
href = '{} Role'.format(r.name)
|
||||
action_list = action_list + '<li>' + href + '</li>'
|
||||
return '<ul>' + action_list + '</ul>'
|
||||
|
|
|
|||
|
|
@ -39,8 +39,8 @@ class ImportMixin(object):
|
|||
@property
|
||||
def params_dict(self):
|
||||
if self.params:
|
||||
params = re.sub(",[ \t\r\n]+}", "}", self.params)
|
||||
params = re.sub(",[ \t\r\n]+\]", "]", params)
|
||||
params = re.sub(',[ \t\r\n]+}', '}', self.params)
|
||||
params = re.sub(',[ \t\r\n]+\]', ']', params)
|
||||
return json.loads(params)
|
||||
else:
|
||||
return {}
|
||||
|
|
|
|||
|
|
@ -122,7 +122,7 @@ class Query(Model):
|
|||
tab = (self.tab_name.replace(' ', '_').lower()
|
||||
if self.tab_name else 'notab')
|
||||
tab = re.sub(r'\W+', '', tab)
|
||||
return "sqllab_{tab}_{ts}".format(**locals())
|
||||
return 'sqllab_{tab}_{ts}'.format(**locals())
|
||||
|
||||
|
||||
class SavedQuery(Model, AuditMixinNullable):
|
||||
|
|
|
|||
|
|
@ -86,15 +86,15 @@ def is_user_defined_permission(perm):
|
|||
|
||||
|
||||
def get_or_create_main_db():
|
||||
logging.info("Creating database reference")
|
||||
logging.info('Creating database reference')
|
||||
dbobj = (
|
||||
db.session.query(models.Database)
|
||||
.filter_by(database_name='main')
|
||||
.first()
|
||||
)
|
||||
if not dbobj:
|
||||
dbobj = models.Database(database_name="main")
|
||||
dbobj.set_sqlalchemy_uri(conf.get("SQLALCHEMY_DATABASE_URI"))
|
||||
dbobj = models.Database(database_name='main')
|
||||
dbobj.set_sqlalchemy_uri(conf.get('SQLALCHEMY_DATABASE_URI'))
|
||||
dbobj.expose_in_sqllab = True
|
||||
dbobj.allow_run_sync = True
|
||||
db.session.add(dbobj)
|
||||
|
|
@ -146,7 +146,7 @@ def is_granter_pvm(pvm):
|
|||
|
||||
|
||||
def set_role(role_name, pvm_check):
|
||||
logging.info("Syncing {} perms".format(role_name))
|
||||
logging.info('Syncing {} perms'.format(role_name))
|
||||
sesh = sm.get_session()
|
||||
pvms = sesh.query(ab_models.PermissionView).all()
|
||||
pvms = [p for p in pvms if p.permission and p.view_menu]
|
||||
|
|
@ -167,7 +167,7 @@ def create_missing_perms():
|
|||
"""Creates missing perms for datasources, schemas and metrics"""
|
||||
|
||||
logging.info(
|
||||
"Fetching a set of all perms to lookup which ones are missing")
|
||||
'Fetching a set of all perms to lookup which ones are missing')
|
||||
all_pvs = set()
|
||||
for pv in sm.get_session.query(sm.permissionview_model).all():
|
||||
if pv.permission and pv.view_menu:
|
||||
|
|
@ -178,18 +178,18 @@ def create_missing_perms():
|
|||
if view_menu and perm and (view_menu, perm) not in all_pvs:
|
||||
merge_perm(sm, view_menu, perm)
|
||||
|
||||
logging.info("Creating missing datasource permissions.")
|
||||
logging.info('Creating missing datasource permissions.')
|
||||
datasources = ConnectorRegistry.get_all_datasources(db.session)
|
||||
for datasource in datasources:
|
||||
merge_pv('datasource_access', datasource.get_perm())
|
||||
merge_pv('schema_access', datasource.schema_perm)
|
||||
|
||||
logging.info("Creating missing database permissions.")
|
||||
logging.info('Creating missing database permissions.')
|
||||
databases = db.session.query(models.Database).all()
|
||||
for database in databases:
|
||||
merge_pv('database_access', database.perm)
|
||||
|
||||
logging.info("Creating missing metrics permissions")
|
||||
logging.info('Creating missing metrics permissions')
|
||||
metrics = []
|
||||
for datasource_class in ConnectorRegistry.sources.values():
|
||||
metrics += list(db.session.query(datasource_class.metric_class).all())
|
||||
|
|
@ -201,7 +201,7 @@ def create_missing_perms():
|
|||
|
||||
def sync_role_definitions():
|
||||
"""Inits the Superset application with security roles and such"""
|
||||
logging.info("Syncing role definition")
|
||||
logging.info('Syncing role definition')
|
||||
|
||||
get_or_create_main_db()
|
||||
create_custom_permissions()
|
||||
|
|
|
|||
|
|
@ -63,13 +63,13 @@ def get_query(query_id, session, retry_count=5):
|
|||
except Exception:
|
||||
attempt += 1
|
||||
logging.error(
|
||||
"Query with id `{}` could not be retrieved".format(query_id))
|
||||
'Query with id `{}` could not be retrieved'.format(query_id))
|
||||
stats_logger.incr('error_attempting_orm_query_' + str(attempt))
|
||||
logging.error("Sleeping for a sec before retrying...")
|
||||
logging.error('Sleeping for a sec before retrying...')
|
||||
sleep(1)
|
||||
if not query:
|
||||
stats_logger.incr('error_failed_at_getting_orm_query')
|
||||
raise SqlLabException("Failed at getting query")
|
||||
raise SqlLabException('Failed at getting query')
|
||||
return query
|
||||
|
||||
|
||||
|
|
@ -119,9 +119,9 @@ def execute_sql(
|
|||
|
||||
def handle_error(msg):
|
||||
"""Local method handling error while processing the SQL"""
|
||||
troubleshooting_link = config["TROUBLESHOOTING_LINK"]
|
||||
msg = "Error: {}. You can find common superset errors and their \
|
||||
resolutions at: {}".format(msg, troubleshooting_link) \
|
||||
troubleshooting_link = config['TROUBLESHOOTING_LINK']
|
||||
msg = 'Error: {}. You can find common superset errors and their \
|
||||
resolutions at: {}'.format(msg, troubleshooting_link) \
|
||||
if troubleshooting_link else msg
|
||||
query.error_message = msg
|
||||
query.status = QueryStatus.FAILED
|
||||
|
|
@ -141,12 +141,12 @@ def execute_sql(
|
|||
executed_sql = superset_query.stripped()
|
||||
if not superset_query.is_select() and not database.allow_dml:
|
||||
return handle_error(
|
||||
"Only `SELECT` statements are allowed against this database")
|
||||
'Only `SELECT` statements are allowed against this database')
|
||||
if query.select_as_cta:
|
||||
if not superset_query.is_select():
|
||||
return handle_error(
|
||||
"Only `SELECT` statements can be used with the CREATE TABLE "
|
||||
"feature.")
|
||||
'Only `SELECT` statements can be used with the CREATE TABLE '
|
||||
'feature.')
|
||||
return
|
||||
if not query.tmp_table_name:
|
||||
start_dttm = datetime.fromtimestamp(query.start_time)
|
||||
|
|
@ -164,7 +164,7 @@ def execute_sql(
|
|||
executed_sql = template_processor.process_template(executed_sql)
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
msg = "Template rendering failed: " + utils.error_msg_from_exception(e)
|
||||
msg = 'Template rendering failed: ' + utils.error_msg_from_exception(e)
|
||||
return handle_error(msg)
|
||||
|
||||
query.executed_sql = executed_sql
|
||||
|
|
@ -182,13 +182,13 @@ def execute_sql(
|
|||
)
|
||||
conn = engine.raw_connection()
|
||||
cursor = conn.cursor()
|
||||
logging.info("Running query: \n{}".format(executed_sql))
|
||||
logging.info('Running query: \n{}'.format(executed_sql))
|
||||
logging.info(query.executed_sql)
|
||||
cursor.execute(query.executed_sql,
|
||||
**db_engine_spec.cursor_execute_kwargs)
|
||||
logging.info("Handling cursor")
|
||||
logging.info('Handling cursor')
|
||||
db_engine_spec.handle_cursor(cursor, query, session)
|
||||
logging.info("Fetching data: {}".format(query.to_dict()))
|
||||
logging.info('Fetching data: {}'.format(query.to_dict()))
|
||||
data = db_engine_spec.fetch_data(cursor, query.limit)
|
||||
except SoftTimeLimitExceeded as e:
|
||||
logging.exception(e)
|
||||
|
|
@ -196,14 +196,14 @@ def execute_sql(
|
|||
conn.close()
|
||||
return handle_error(
|
||||
"SQL Lab timeout. This environment's policy is to kill queries "
|
||||
"after {} seconds.".format(SQLLAB_TIMEOUT))
|
||||
'after {} seconds.'.format(SQLLAB_TIMEOUT))
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
if conn is not None:
|
||||
conn.close()
|
||||
return handle_error(db_engine_spec.extract_error_message(e))
|
||||
|
||||
logging.info("Fetching cursor description")
|
||||
logging.info('Fetching cursor description')
|
||||
cursor_description = cursor.description
|
||||
|
||||
if conn is not None:
|
||||
|
|
@ -248,7 +248,7 @@ def execute_sql(
|
|||
})
|
||||
if store_results:
|
||||
key = '{}'.format(uuid.uuid4())
|
||||
logging.info("Storing results in results backend, key: {}".format(key))
|
||||
logging.info('Storing results in results backend, key: {}'.format(key))
|
||||
json_payload = json.dumps(payload, default=utils.json_iso_dttm_ser)
|
||||
results_backend.set(key, utils.zlib_compress(json_payload))
|
||||
query.results_key = key
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ class SupersetQuery(object):
|
|||
self._table_names = set()
|
||||
self._alias_names = set()
|
||||
# TODO: multistatement support
|
||||
logging.info("Parsing with sqlparse statement {}".format(self.sql))
|
||||
logging.info('Parsing with sqlparse statement {}'.format(self.sql))
|
||||
self._parsed = sqlparse.parse(self.sql)
|
||||
for statement in self._parsed:
|
||||
self.__extract_from_token(statement)
|
||||
|
|
@ -50,7 +50,7 @@ class SupersetQuery(object):
|
|||
@staticmethod
|
||||
def __get_full_name(identifier):
|
||||
if len(identifier.tokens) > 1 and identifier.tokens[1].value == '.':
|
||||
return "{}.{}".format(identifier.tokens[0].value,
|
||||
return '{}.{}'.format(identifier.tokens[0].value,
|
||||
identifier.tokens[2].value)
|
||||
return identifier.get_real_name()
|
||||
|
||||
|
|
@ -101,7 +101,7 @@ class SupersetQuery(object):
|
|||
sql = self.stripped()
|
||||
if overwrite:
|
||||
exec_sql = 'DROP TABLE IF EXISTS {table_name};\n'
|
||||
exec_sql += "CREATE TABLE {table_name} AS \n{sql}"
|
||||
exec_sql += 'CREATE TABLE {table_name} AS \n{sql}'
|
||||
return exec_sql.format(**locals())
|
||||
|
||||
def __extract_from_token(self, token):
|
||||
|
|
|
|||
|
|
@ -30,17 +30,17 @@ class BaseStatsLogger(object):
|
|||
class DummyStatsLogger(BaseStatsLogger):
|
||||
def incr(self, key):
|
||||
logging.debug(
|
||||
Fore.CYAN + "[stats_logger] (incr) " + key + Style.RESET_ALL)
|
||||
Fore.CYAN + '[stats_logger] (incr) ' + key + Style.RESET_ALL)
|
||||
|
||||
def decr(self, key):
|
||||
logging.debug((
|
||||
Fore.CYAN + "[stats_logger] (decr) " + key +
|
||||
Fore.CYAN + '[stats_logger] (decr) ' + key +
|
||||
Style.RESET_ALL))
|
||||
|
||||
def gauge(self, key, value):
|
||||
logging.debug((
|
||||
Fore.CYAN + "[stats_logger] (gauge) "
|
||||
"{key} | {value}" + Style.RESET_ALL).format(**locals()))
|
||||
Fore.CYAN + '[stats_logger] (gauge) '
|
||||
'{key} | {value}' + Style.RESET_ALL).format(**locals()))
|
||||
|
||||
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -186,9 +186,9 @@ def parse_human_datetime(s):
|
|||
datetime.datetime(2015, 4, 3, 0, 0)
|
||||
>>> parse_human_datetime('2/3/1969')
|
||||
datetime.datetime(1969, 2, 3, 0, 0)
|
||||
>>> parse_human_datetime("now") <= datetime.now()
|
||||
>>> parse_human_datetime('now') <= datetime.now()
|
||||
True
|
||||
>>> parse_human_datetime("yesterday") <= datetime.now()
|
||||
>>> parse_human_datetime('yesterday') <= datetime.now()
|
||||
True
|
||||
>>> date.today() - timedelta(1) == parse_human_datetime('yesterday').date()
|
||||
True
|
||||
|
|
@ -205,7 +205,7 @@ def parse_human_datetime(s):
|
|||
try:
|
||||
cal = parsedatetime.Calendar()
|
||||
parsed_dttm, parsed_flags = cal.parseDT(s)
|
||||
# when time is not extracted, we "reset to midnight"
|
||||
# when time is not extracted, we 'reset to midnight'
|
||||
if parsed_flags & 2 == 0:
|
||||
parsed_dttm = parsed_dttm.replace(hour=0, minute=0, second=0)
|
||||
dttm = dttm_from_timtuple(parsed_dttm.utctimetuple())
|
||||
|
|
@ -224,7 +224,7 @@ def parse_human_timedelta(s):
|
|||
"""
|
||||
Returns ``datetime.datetime`` from natural language time deltas
|
||||
|
||||
>>> parse_human_datetime("now") <= datetime.now()
|
||||
>>> parse_human_datetime('now') <= datetime.now()
|
||||
True
|
||||
"""
|
||||
cal = parsedatetime.Calendar()
|
||||
|
|
@ -260,7 +260,7 @@ def datetime_f(dttm):
|
|||
dttm = dttm[11:]
|
||||
elif now_iso[:4] == dttm[:4]:
|
||||
dttm = dttm[5:]
|
||||
return "<nobr>{}</nobr>".format(dttm)
|
||||
return '<nobr>{}</nobr>'.format(dttm)
|
||||
|
||||
|
||||
def base_json_conv(obj):
|
||||
|
|
@ -298,7 +298,7 @@ def json_iso_dttm_ser(obj):
|
|||
obj = obj.isoformat()
|
||||
else:
|
||||
raise TypeError(
|
||||
"Unserializable object {} of type {}".format(obj, type(obj)))
|
||||
'Unserializable object {} of type {}'.format(obj, type(obj)))
|
||||
return obj
|
||||
|
||||
|
||||
|
|
@ -324,7 +324,7 @@ def json_int_dttm_ser(obj):
|
|||
obj = (obj - EPOCH.date()).total_seconds() * 1000
|
||||
else:
|
||||
raise TypeError(
|
||||
"Unserializable object {} of type {}".format(obj, type(obj)))
|
||||
'Unserializable object {} of type {}'.format(obj, type(obj)))
|
||||
return obj
|
||||
|
||||
|
||||
|
|
@ -343,7 +343,7 @@ def error_msg_from_exception(e):
|
|||
created via create_engine.
|
||||
engine = create_engine('presto://localhost:3506/silver') -
|
||||
gives an e.message as the str(dict)
|
||||
presto.connect("localhost", port=3506, catalog='silver') - as a dict.
|
||||
presto.connect('localhost', port=3506, catalog='silver') - as a dict.
|
||||
The latter version is parsed correctly by this function.
|
||||
"""
|
||||
msg = ''
|
||||
|
|
@ -351,7 +351,7 @@ def error_msg_from_exception(e):
|
|||
if isinstance(e.message, dict):
|
||||
msg = e.message.get('message')
|
||||
elif e.message:
|
||||
msg = "{}".format(e.message)
|
||||
msg = '{}'.format(e.message)
|
||||
return msg or '{}'.format(e)
|
||||
|
||||
|
||||
|
|
@ -384,13 +384,13 @@ def generic_find_constraint_name(table, columns, referenced, db):
|
|||
|
||||
def get_datasource_full_name(database_name, datasource_name, schema=None):
|
||||
if not schema:
|
||||
return "[{}].[{}]".format(database_name, datasource_name)
|
||||
return "[{}].[{}].[{}]".format(database_name, schema, datasource_name)
|
||||
return '[{}].[{}]'.format(database_name, datasource_name)
|
||||
return '[{}].[{}].[{}]'.format(database_name, schema, datasource_name)
|
||||
|
||||
|
||||
def get_schema_perm(database, schema):
|
||||
if schema:
|
||||
return "[{}].[{}]".format(database, schema)
|
||||
return '[{}].[{}]'.format(database, schema)
|
||||
|
||||
|
||||
def validate_json(obj):
|
||||
|
|
@ -398,7 +398,7 @@ def validate_json(obj):
|
|||
try:
|
||||
json.loads(obj)
|
||||
except Exception:
|
||||
raise SupersetException("JSON is not valid")
|
||||
raise SupersetException('JSON is not valid')
|
||||
|
||||
|
||||
def table_has_constraint(table, name, db):
|
||||
|
|
@ -421,7 +421,7 @@ class timeout(object):
|
|||
self.error_message = error_message
|
||||
|
||||
def handle_timeout(self, signum, frame):
|
||||
logging.error("Process timed out")
|
||||
logging.error('Process timed out')
|
||||
raise SupersetTimeoutException(self.error_message)
|
||||
|
||||
def __enter__(self):
|
||||
|
|
@ -441,15 +441,15 @@ class timeout(object):
|
|||
|
||||
|
||||
def pessimistic_connection_handling(some_engine):
|
||||
@event.listens_for(some_engine, "engine_connect")
|
||||
@event.listens_for(some_engine, 'engine_connect')
|
||||
def ping_connection(connection, branch):
|
||||
if branch:
|
||||
# "branch" refers to a sub-connection of a connection,
|
||||
# 'branch' refers to a sub-connection of a connection,
|
||||
# we don't want to bother pinging on these.
|
||||
return
|
||||
|
||||
# turn off "close with result". This flag is only used with
|
||||
# "connectionless" execution, otherwise will be False in any case
|
||||
# turn off 'close with result'. This flag is only used with
|
||||
# 'connectionless' execution, otherwise will be False in any case
|
||||
save_should_close_with_result = connection.should_close_with_result
|
||||
connection.should_close_with_result = False
|
||||
|
||||
|
|
@ -461,7 +461,7 @@ def pessimistic_connection_handling(some_engine):
|
|||
except exc.DBAPIError as err:
|
||||
# catch SQLAlchemy's DBAPIError, which is a wrapper
|
||||
# for the DBAPI's exception. It includes a .connection_invalidated
|
||||
# attribute which specifies if this connection is a "disconnect"
|
||||
# attribute which specifies if this connection is a 'disconnect'
|
||||
# condition, which is based on inspection of the original exception
|
||||
# by the dialect in use.
|
||||
if err.connection_invalidated:
|
||||
|
|
@ -473,7 +473,7 @@ def pessimistic_connection_handling(some_engine):
|
|||
else:
|
||||
raise
|
||||
finally:
|
||||
# restore "close with result"
|
||||
# restore 'close with result'
|
||||
connection.should_close_with_result = save_should_close_with_result
|
||||
|
||||
|
||||
|
|
@ -514,11 +514,11 @@ def send_email_smtp(to, subject, html_content, config, files=None,
|
|||
msg = MIMEMultipart(mime_subtype)
|
||||
msg['Subject'] = subject
|
||||
msg['From'] = smtp_mail_from
|
||||
msg['To'] = ", ".join(to)
|
||||
msg['To'] = ', '.join(to)
|
||||
recipients = to
|
||||
if cc:
|
||||
cc = get_email_address_list(cc)
|
||||
msg['CC'] = ", ".join(cc)
|
||||
msg['CC'] = ', '.join(cc)
|
||||
recipients = recipients + cc
|
||||
|
||||
if bcc:
|
||||
|
|
@ -532,11 +532,11 @@ def send_email_smtp(to, subject, html_content, config, files=None,
|
|||
|
||||
for fname in files or []:
|
||||
basename = os.path.basename(fname)
|
||||
with open(fname, "rb") as f:
|
||||
with open(fname, 'rb') as f:
|
||||
msg.attach(
|
||||
MIMEApplication(
|
||||
f.read(),
|
||||
Content_Disposition='attachment; filename="%s"' % basename,
|
||||
Content_Disposition="attachment; filename='%s'" % basename,
|
||||
Name=basename))
|
||||
|
||||
send_MIME_email(smtp_mail_from, recipients, msg, config, dryrun=dryrun)
|
||||
|
|
@ -557,7 +557,7 @@ def send_MIME_email(e_from, e_to, mime_msg, config, dryrun=False):
|
|||
s.starttls()
|
||||
if SMTP_USER and SMTP_PASSWORD:
|
||||
s.login(SMTP_USER, SMTP_PASSWORD)
|
||||
logging.info("Sent an alert email to " + str(e_to))
|
||||
logging.info('Sent an alert email to ' + str(e_to))
|
||||
s.sendmail(e_from, e_to, mime_msg.as_string())
|
||||
s.quit()
|
||||
else:
|
||||
|
|
@ -601,11 +601,11 @@ def has_access(f):
|
|||
logging.warning(
|
||||
LOGMSG_ERR_SEC_ACCESS_DENIED.format(permission_str,
|
||||
self.__class__.__name__))
|
||||
flash(as_unicode(FLAMSG_ERR_SEC_ACCESS_DENIED), "danger")
|
||||
flash(as_unicode(FLAMSG_ERR_SEC_ACCESS_DENIED), 'danger')
|
||||
# adds next arg to forward to the original path once user is logged in.
|
||||
return redirect(
|
||||
url_for(
|
||||
self.appbuilder.sm.auth_view.__class__.__name__ + ".login",
|
||||
self.appbuilder.sm.auth_view.__class__.__name__ + '.login',
|
||||
next=request.path))
|
||||
|
||||
f._permission_name = permission_str
|
||||
|
|
@ -631,7 +631,7 @@ def zlib_compress(data):
|
|||
"""
|
||||
if PY3K:
|
||||
if isinstance(data, str):
|
||||
return zlib.compress(bytes(data, "utf-8"))
|
||||
return zlib.compress(bytes(data, 'utf-8'))
|
||||
return zlib.compress(data)
|
||||
return zlib.compress(data)
|
||||
|
||||
|
|
@ -649,8 +649,8 @@ def zlib_decompress_to_string(blob):
|
|||
if isinstance(blob, bytes):
|
||||
decompressed = zlib.decompress(blob)
|
||||
else:
|
||||
decompressed = zlib.decompress(bytes(blob, "utf-8"))
|
||||
return decompressed.decode("utf-8")
|
||||
decompressed = zlib.decompress(bytes(blob, 'utf-8'))
|
||||
return decompressed.decode('utf-8')
|
||||
return zlib.decompress(blob)
|
||||
|
||||
|
||||
|
|
@ -668,7 +668,7 @@ def get_celery_app(config):
|
|||
def merge_extra_filters(form_data):
|
||||
# extra_filters are temporary/contextual filters that are external
|
||||
# to the slice definition. We use those for dynamic interactive
|
||||
# filters like the ones emitted by the "Filter Box" visualization
|
||||
# filters like the ones emitted by the 'Filter Box' visualization
|
||||
if form_data.get('extra_filters'):
|
||||
# __form and __to are special extra_filters that target time
|
||||
# boundaries. The rest of extra_filters are simple
|
||||
|
|
|
|||
|
|
@ -20,15 +20,15 @@ class AnnotationModelView(SupersetModelView, DeleteMixin): # noqa
|
|||
|
||||
def pre_add(self, obj):
|
||||
if not obj.layer:
|
||||
raise Exception("Annotation layer is required.")
|
||||
raise Exception('Annotation layer is required.')
|
||||
if not obj.start_dttm and not obj.end_dttm:
|
||||
raise Exception("Annotation start time or end time is required.")
|
||||
raise Exception('Annotation start time or end time is required.')
|
||||
elif not obj.start_dttm:
|
||||
obj.start_dttm = obj.end_dttm
|
||||
elif not obj.end_dttm:
|
||||
obj.end_dttm = obj.start_dttm
|
||||
elif obj.end_dttm < obj.start_dttm:
|
||||
raise Exception("Annotation end time must be no earlier than start time.")
|
||||
raise Exception('Annotation end time must be no earlier than start time.')
|
||||
|
||||
def pre_update(self, obj):
|
||||
self.pre_add(obj)
|
||||
|
|
@ -43,17 +43,17 @@ class AnnotationLayerModelView(SupersetModelView, DeleteMixin):
|
|||
|
||||
appbuilder.add_view(
|
||||
AnnotationLayerModelView,
|
||||
"Annotation Layers",
|
||||
label=__("Annotation Layers"),
|
||||
icon="fa-comment",
|
||||
category="Manage",
|
||||
category_label=__("Manage"),
|
||||
'Annotation Layers',
|
||||
label=__('Annotation Layers'),
|
||||
icon='fa-comment',
|
||||
category='Manage',
|
||||
category_label=__('Manage'),
|
||||
category_icon='')
|
||||
appbuilder.add_view(
|
||||
AnnotationModelView,
|
||||
"Annotations",
|
||||
label=__("Annotations"),
|
||||
icon="fa-comments",
|
||||
category="Manage",
|
||||
category_label=__("Manage"),
|
||||
'Annotations',
|
||||
label=__('Annotations'),
|
||||
icon='fa-comments',
|
||||
category='Manage',
|
||||
category_label=__('Manage'),
|
||||
category_icon='')
|
||||
|
|
|
|||
|
|
@ -21,13 +21,13 @@ FRONTEND_CONF_KEYS = ('SUPERSET_WEBSERVER_TIMEOUT',)
|
|||
|
||||
|
||||
def get_error_msg():
|
||||
if conf.get("SHOW_STACKTRACE"):
|
||||
if conf.get('SHOW_STACKTRACE'):
|
||||
error_msg = traceback.format_exc()
|
||||
else:
|
||||
error_msg = "FATAL ERROR \n"
|
||||
error_msg = 'FATAL ERROR \n'
|
||||
error_msg += (
|
||||
"Stacktrace is hidden. Change the SHOW_STACKTRACE "
|
||||
"configuration setting to enable it")
|
||||
'Stacktrace is hidden. Change the SHOW_STACKTRACE '
|
||||
'configuration setting to enable it')
|
||||
return error_msg
|
||||
|
||||
|
||||
|
|
@ -38,7 +38,7 @@ def json_error_response(msg=None, status=500, stacktrace=None, payload=None):
|
|||
payload['stacktrace'] = stacktrace
|
||||
return Response(
|
||||
json.dumps(payload, default=utils.json_iso_dttm_ser),
|
||||
status=status, mimetype="application/json")
|
||||
status=status, mimetype='application/json')
|
||||
|
||||
|
||||
def api(f):
|
||||
|
|
@ -57,7 +57,7 @@ def api(f):
|
|||
|
||||
|
||||
def get_datasource_exist_error_mgs(full_name):
|
||||
return __("Datasource %(name)s already exists", name=full_name)
|
||||
return __('Datasource %(name)s already exists', name=full_name)
|
||||
|
||||
|
||||
def get_user_roles():
|
||||
|
|
@ -76,26 +76,26 @@ class BaseSupersetView(BaseView):
|
|||
|
||||
def all_datasource_access(self, user=None):
|
||||
return self.can_access(
|
||||
"all_datasource_access", "all_datasource_access", user=user)
|
||||
'all_datasource_access', 'all_datasource_access', user=user)
|
||||
|
||||
def database_access(self, database, user=None):
|
||||
return (
|
||||
self.can_access(
|
||||
"all_database_access", "all_database_access", user=user) or
|
||||
self.can_access("database_access", database.perm, user=user)
|
||||
'all_database_access', 'all_database_access', user=user) or
|
||||
self.can_access('database_access', database.perm, user=user)
|
||||
)
|
||||
|
||||
def schema_access(self, datasource, user=None):
|
||||
return (
|
||||
self.database_access(datasource.database, user=user) or
|
||||
self.all_datasource_access(user=user) or
|
||||
self.can_access("schema_access", datasource.schema_perm, user=user)
|
||||
self.can_access('schema_access', datasource.schema_perm, user=user)
|
||||
)
|
||||
|
||||
def datasource_access(self, datasource, user=None):
|
||||
return (
|
||||
self.schema_access(datasource, user=user) or
|
||||
self.can_access("datasource_access", datasource.perm, user=user)
|
||||
self.can_access('datasource_access', datasource.perm, user=user)
|
||||
)
|
||||
|
||||
def datasource_access_by_name(
|
||||
|
|
@ -110,13 +110,13 @@ class BaseSupersetView(BaseView):
|
|||
datasources = ConnectorRegistry.query_datasources_by_name(
|
||||
db.session, database, datasource_name, schema=schema)
|
||||
for datasource in datasources:
|
||||
if self.can_access("datasource_access", datasource.perm):
|
||||
if self.can_access('datasource_access', datasource.perm):
|
||||
return True
|
||||
return False
|
||||
|
||||
def datasource_access_by_fullname(
|
||||
self, database, full_table_name, schema):
|
||||
table_name_pieces = full_table_name.split(".")
|
||||
table_name_pieces = full_table_name.split('.')
|
||||
if len(table_name_pieces) == 2:
|
||||
table_schema = table_name_pieces[0]
|
||||
table_name = table_name_pieces[1]
|
||||
|
|
@ -234,7 +234,7 @@ class DeleteMixin(object):
|
|||
try:
|
||||
self.pre_delete(item)
|
||||
except Exception as e:
|
||||
flash(str(e), "danger")
|
||||
flash(str(e), 'danger')
|
||||
else:
|
||||
view_menu = sm.find_view_menu(item.get_perm())
|
||||
pvs = sm.get_session.query(sm.permissionview_model).filter_by(
|
||||
|
|
@ -266,10 +266,10 @@ class DeleteMixin(object):
|
|||
self.update_redirect()
|
||||
|
||||
@action(
|
||||
"muldelete",
|
||||
__("Delete"),
|
||||
__("Delete all Really?"),
|
||||
"fa-trash",
|
||||
'muldelete',
|
||||
__('Delete'),
|
||||
__('Delete all Really?'),
|
||||
'fa-trash',
|
||||
single=False,
|
||||
)
|
||||
def muldelete(self, items):
|
||||
|
|
@ -279,7 +279,7 @@ class DeleteMixin(object):
|
|||
try:
|
||||
self.pre_delete(item)
|
||||
except Exception as e:
|
||||
flash(str(e), "danger")
|
||||
flash(str(e), 'danger')
|
||||
else:
|
||||
self._delete(item.id)
|
||||
self.update_redirect()
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
|
|
@ -23,11 +23,11 @@ class QueryView(SupersetModelView):
|
|||
|
||||
appbuilder.add_view(
|
||||
QueryView,
|
||||
"Queries",
|
||||
label=__("Queries"),
|
||||
category="Manage",
|
||||
category_label=__("Manage"),
|
||||
icon="fa-search")
|
||||
'Queries',
|
||||
label=__('Queries'),
|
||||
category='Manage',
|
||||
category_label=__('Manage'),
|
||||
icon='fa-search')
|
||||
|
||||
|
||||
class SavedQueryView(SupersetModelView, DeleteMixin):
|
||||
|
|
@ -78,13 +78,13 @@ appbuilder.add_view_no_menu(SavedQueryView)
|
|||
appbuilder.add_link(
|
||||
__('Saved Queries'),
|
||||
href='/sqllab/my_queries/',
|
||||
icon="fa-save",
|
||||
icon='fa-save',
|
||||
category='SQL Lab')
|
||||
|
||||
|
||||
class SqlLab(BaseSupersetView):
|
||||
"""The base views for Superset!"""
|
||||
@expose("/my_queries/")
|
||||
@expose('/my_queries/')
|
||||
def my_queries(self):
|
||||
"""Assigns a list of found users to the given role."""
|
||||
return redirect(
|
||||
|
|
|
|||
467
superset/viz.py
467
superset/viz.py
File diff suppressed because it is too large
Load Diff
|
|
@ -446,7 +446,7 @@ class RequestAccessTests(SupersetTestCase):
|
|||
# request access to the table
|
||||
resp = self.get_resp(
|
||||
ACCESS_REQUEST.format('table', table_1_id, 'go'))
|
||||
assert "Access was requested" in resp
|
||||
assert 'Access was requested' in resp
|
||||
access_request1 = self.get_access_requests('gamma', 'table', table_1_id)
|
||||
assert access_request1 is not None
|
||||
|
||||
|
|
@ -463,7 +463,7 @@ class RequestAccessTests(SupersetTestCase):
|
|||
alpha_role,
|
||||
sm.find_permission_view_menu('datasource_access', table3_perm))
|
||||
sm.add_permission_role(
|
||||
sm.find_role("energy_usage_role"),
|
||||
sm.find_role('energy_usage_role'),
|
||||
sm.find_permission_view_menu('datasource_access', table3_perm))
|
||||
session.commit()
|
||||
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ from superset.security import sync_role_definitions
|
|||
|
||||
os.environ['SUPERSET_CONFIG'] = 'tests.superset_test_config'
|
||||
|
||||
BASE_DIR = app.config.get("BASE_DIR")
|
||||
BASE_DIR = app.config.get('BASE_DIR')
|
||||
|
||||
|
||||
class SupersetTestCase(unittest.TestCase):
|
||||
|
|
@ -32,9 +32,9 @@ class SupersetTestCase(unittest.TestCase):
|
|||
not os.environ.get('SOLO_TEST') and
|
||||
not os.environ.get('examples_loaded')
|
||||
):
|
||||
logging.info("Loading examples")
|
||||
logging.info('Loading examples')
|
||||
cli.load_examples(load_test_data=True)
|
||||
logging.info("Done loading examples")
|
||||
logging.info('Done loading examples')
|
||||
sync_role_definitions()
|
||||
os.environ['examples_loaded'] = '1'
|
||||
else:
|
||||
|
|
@ -43,7 +43,7 @@ class SupersetTestCase(unittest.TestCase):
|
|||
self.client = app.test_client()
|
||||
self.maxDiff = None
|
||||
|
||||
gamma_sqllab_role = sm.add_role("gamma_sqllab")
|
||||
gamma_sqllab_role = sm.add_role('gamma_sqllab')
|
||||
for perm in sm.find_role('Gamma').permissions:
|
||||
sm.add_permission_role(gamma_sqllab_role, perm)
|
||||
db_perm = self.get_main_database(sm.get_session).perm
|
||||
|
|
@ -92,11 +92,11 @@ class SupersetTestCase(unittest.TestCase):
|
|||
session = db.session
|
||||
cluster = (
|
||||
session.query(DruidCluster)
|
||||
.filter_by(cluster_name="druid_test")
|
||||
.filter_by(cluster_name='druid_test')
|
||||
.first()
|
||||
)
|
||||
if not cluster:
|
||||
cluster = DruidCluster(cluster_name="druid_test")
|
||||
cluster = DruidCluster(cluster_name='druid_test')
|
||||
session.add(cluster)
|
||||
session.commit()
|
||||
|
||||
|
|
@ -155,7 +155,7 @@ class SupersetTestCase(unittest.TestCase):
|
|||
resp = self.client.get(url, follow_redirects=follow_redirects)
|
||||
if raise_on_error and resp.status_code > 400:
|
||||
raise Exception(
|
||||
"http request failed with code {}".format(resp.status_code))
|
||||
'http request failed with code {}'.format(resp.status_code))
|
||||
return resp.data.decode('utf-8')
|
||||
|
||||
def get_json_resp(
|
||||
|
|
@ -214,7 +214,7 @@ class SupersetTestCase(unittest.TestCase):
|
|||
client_id=client_id),
|
||||
)
|
||||
if raise_on_error and 'error' in resp:
|
||||
raise Exception("run_sql failed")
|
||||
raise Exception('run_sql failed')
|
||||
return resp
|
||||
|
||||
def test_gamma_permissions(self):
|
||||
|
|
|
|||
|
|
@ -40,32 +40,32 @@ class UtilityFunctionTests(SupersetTestCase):
|
|||
|
||||
# TODO(bkyryliuk): support more cases in CTA function.
|
||||
def test_create_table_as(self):
|
||||
q = SupersetQuery("SELECT * FROM outer_space;")
|
||||
q = SupersetQuery('SELECT * FROM outer_space;')
|
||||
|
||||
self.assertEqual(
|
||||
"CREATE TABLE tmp AS \nSELECT * FROM outer_space",
|
||||
q.as_create_table("tmp"))
|
||||
'CREATE TABLE tmp AS \nSELECT * FROM outer_space',
|
||||
q.as_create_table('tmp'))
|
||||
|
||||
self.assertEqual(
|
||||
"DROP TABLE IF EXISTS tmp;\n"
|
||||
"CREATE TABLE tmp AS \nSELECT * FROM outer_space",
|
||||
q.as_create_table("tmp", overwrite=True))
|
||||
'DROP TABLE IF EXISTS tmp;\n'
|
||||
'CREATE TABLE tmp AS \nSELECT * FROM outer_space',
|
||||
q.as_create_table('tmp', overwrite=True))
|
||||
|
||||
# now without a semicolon
|
||||
q = SupersetQuery("SELECT * FROM outer_space")
|
||||
q = SupersetQuery('SELECT * FROM outer_space')
|
||||
self.assertEqual(
|
||||
"CREATE TABLE tmp AS \nSELECT * FROM outer_space",
|
||||
q.as_create_table("tmp"))
|
||||
'CREATE TABLE tmp AS \nSELECT * FROM outer_space',
|
||||
q.as_create_table('tmp'))
|
||||
|
||||
# now a multi-line query
|
||||
multi_line_query = (
|
||||
"SELECT * FROM planets WHERE\n"
|
||||
'SELECT * FROM planets WHERE\n'
|
||||
"Luke_Father = 'Darth Vader'")
|
||||
q = SupersetQuery(multi_line_query)
|
||||
self.assertEqual(
|
||||
"CREATE TABLE tmp AS \nSELECT * FROM planets WHERE\n"
|
||||
'CREATE TABLE tmp AS \nSELECT * FROM planets WHERE\n'
|
||||
"Luke_Father = 'Darth Vader'",
|
||||
q.as_create_table("tmp"),
|
||||
q.as_create_table('tmp'),
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -118,8 +118,7 @@ class CeleryTestCase(SupersetTestCase):
|
|||
shell=True,
|
||||
)
|
||||
subprocess.call(
|
||||
"ps auxww | grep 'superset worker' | awk '{print $2}' | "
|
||||
"xargs kill -9",
|
||||
"ps auxww | grep 'superset worker' | awk '{print $2}' | xargs kill -9",
|
||||
shell=True,
|
||||
)
|
||||
|
||||
|
|
@ -143,22 +142,22 @@ class CeleryTestCase(SupersetTestCase):
|
|||
def test_add_limit_to_the_query(self):
|
||||
main_db = self.get_main_database(db.session)
|
||||
|
||||
select_query = "SELECT * FROM outer_space;"
|
||||
select_query = 'SELECT * FROM outer_space;'
|
||||
updated_select_query = main_db.wrap_sql_limit(select_query, 100)
|
||||
# Different DB engines have their own spacing while compiling
|
||||
# the queries, that's why ' '.join(query.split()) is used.
|
||||
# In addition some of the engines do not include OFFSET 0.
|
||||
self.assertTrue(
|
||||
"SELECT * FROM (SELECT * FROM outer_space;) AS inner_qry "
|
||||
"LIMIT 100" in ' '.join(updated_select_query.split()),
|
||||
'SELECT * FROM (SELECT * FROM outer_space;) AS inner_qry '
|
||||
'LIMIT 100' in ' '.join(updated_select_query.split()),
|
||||
)
|
||||
|
||||
select_query_no_semicolon = "SELECT * FROM outer_space"
|
||||
select_query_no_semicolon = 'SELECT * FROM outer_space'
|
||||
updated_select_query_no_semicolon = main_db.wrap_sql_limit(
|
||||
select_query_no_semicolon, 100)
|
||||
self.assertTrue(
|
||||
"SELECT * FROM (SELECT * FROM outer_space) AS inner_qry "
|
||||
"LIMIT 100" in
|
||||
'SELECT * FROM (SELECT * FROM outer_space) AS inner_qry '
|
||||
'LIMIT 100' in
|
||||
' '.join(updated_select_query_no_semicolon.split()),
|
||||
)
|
||||
|
||||
|
|
@ -167,7 +166,7 @@ class CeleryTestCase(SupersetTestCase):
|
|||
)
|
||||
updated_multi_line_query = main_db.wrap_sql_limit(multi_line_query, 100)
|
||||
self.assertTrue(
|
||||
"SELECT * FROM (SELECT * FROM planets WHERE "
|
||||
'SELECT * FROM (SELECT * FROM planets WHERE '
|
||||
"Luke_Father = 'Darth Vader';) AS inner_qry LIMIT 100" in
|
||||
' '.join(updated_multi_line_query.split()),
|
||||
)
|
||||
|
|
@ -176,7 +175,7 @@ class CeleryTestCase(SupersetTestCase):
|
|||
main_db = self.get_main_database(db.session)
|
||||
db_id = main_db.id
|
||||
sql_dont_exist = 'SELECT name FROM table_dont_exist'
|
||||
result1 = self.run_sql(db_id, sql_dont_exist, "1", cta='true')
|
||||
result1 = self.run_sql(db_id, sql_dont_exist, '1', cta='true')
|
||||
self.assertTrue('error' in result1)
|
||||
|
||||
def test_run_sync_query_cta(self):
|
||||
|
|
@ -187,7 +186,7 @@ class CeleryTestCase(SupersetTestCase):
|
|||
sql_where = (
|
||||
"SELECT name FROM ab_permission WHERE name='{}'".format(perm_name))
|
||||
result2 = self.run_sql(
|
||||
db_id, sql_where, "2", tmp_table='tmp_table_2', cta='true')
|
||||
db_id, sql_where, '2', tmp_table='tmp_table_2', cta='true')
|
||||
self.assertEqual(QueryStatus.SUCCESS, result2['query']['state'])
|
||||
self.assertEqual([], result2['data'])
|
||||
self.assertEqual([], result2['columns'])
|
||||
|
|
@ -203,7 +202,7 @@ class CeleryTestCase(SupersetTestCase):
|
|||
db_id = main_db.id
|
||||
sql_empty_result = 'SELECT * FROM ab_user WHERE id=666'
|
||||
result3 = self.run_sql(
|
||||
db_id, sql_empty_result, "3", tmp_table='tmp_table_3', cta='true')
|
||||
db_id, sql_empty_result, '3', tmp_table='tmp_table_3', cta='true')
|
||||
self.assertEqual(QueryStatus.SUCCESS, result3['query']['state'])
|
||||
self.assertEqual([], result3['data'])
|
||||
self.assertEqual([], result3['columns'])
|
||||
|
|
@ -216,7 +215,7 @@ class CeleryTestCase(SupersetTestCase):
|
|||
eng = main_db.get_sqla_engine()
|
||||
sql_where = "SELECT name FROM ab_role WHERE name='Admin'"
|
||||
result = self.run_sql(
|
||||
main_db.id, sql_where, "4", async='true', tmp_table='tmp_async_1',
|
||||
main_db.id, sql_where, '4', async='true', tmp_table='tmp_async_1',
|
||||
cta='true')
|
||||
assert result['query']['state'] in (
|
||||
QueryStatus.PENDING, QueryStatus.RUNNING, QueryStatus.SUCCESS)
|
||||
|
|
@ -228,10 +227,10 @@ class CeleryTestCase(SupersetTestCase):
|
|||
self.assertEqual(QueryStatus.SUCCESS, query.status)
|
||||
self.assertEqual([{'name': 'Admin'}], df.to_dict(orient='records'))
|
||||
self.assertEqual(QueryStatus.SUCCESS, query.status)
|
||||
self.assertTrue("FROM tmp_async_1" in query.select_sql)
|
||||
self.assertTrue("LIMIT 666" in query.select_sql)
|
||||
self.assertTrue('FROM tmp_async_1' in query.select_sql)
|
||||
self.assertTrue('LIMIT 666' in query.select_sql)
|
||||
self.assertEqual(
|
||||
"CREATE TABLE tmp_async_1 AS \nSELECT name FROM ab_role "
|
||||
'CREATE TABLE tmp_async_1 AS \nSELECT name FROM ab_role '
|
||||
"WHERE name='Admin'", query.executed_sql)
|
||||
self.assertEqual(sql_where, query.sql)
|
||||
self.assertEqual(0, query.rows)
|
||||
|
|
@ -254,7 +253,7 @@ class CeleryTestCase(SupersetTestCase):
|
|||
|
||||
def test_get_columns(self):
|
||||
main_db = self.get_main_database(db.session)
|
||||
df = main_db.get_df("SELECT * FROM multiformat_time_series", None)
|
||||
df = main_db.get_df('SELECT * FROM multiformat_time_series', None)
|
||||
cdf = dataframe.SupersetDataFrame(df)
|
||||
|
||||
# Making ordering non-deterministic
|
||||
|
|
|
|||
|
|
@ -68,7 +68,7 @@ class CoreTests(SupersetTestCase):
|
|||
|
||||
def test_slice_endpoint(self):
|
||||
self.login(username='admin')
|
||||
slc = self.get_slice("Girls", db.session)
|
||||
slc = self.get_slice('Girls', db.session)
|
||||
resp = self.get_resp('/superset/slice/{}/'.format(slc.id))
|
||||
assert 'Time Column' in resp
|
||||
assert 'List Roles' in resp
|
||||
|
|
@ -80,7 +80,7 @@ class CoreTests(SupersetTestCase):
|
|||
|
||||
def test_slice_json_endpoint(self):
|
||||
self.login(username='admin')
|
||||
slc = self.get_slice("Girls", db.session)
|
||||
slc = self.get_slice('Girls', db.session)
|
||||
|
||||
json_endpoint = (
|
||||
'/superset/explore_json/{}/{}?form_data={}'
|
||||
|
|
@ -91,7 +91,7 @@ class CoreTests(SupersetTestCase):
|
|||
|
||||
def test_slice_csv_endpoint(self):
|
||||
self.login(username='admin')
|
||||
slc = self.get_slice("Girls", db.session)
|
||||
slc = self.get_slice('Girls', db.session)
|
||||
|
||||
csv_endpoint = (
|
||||
'/superset/explore_json/{}/{}?csv=true&form_data={}'
|
||||
|
|
@ -129,16 +129,16 @@ class CoreTests(SupersetTestCase):
|
|||
|
||||
def test_save_slice(self):
|
||||
self.login(username='admin')
|
||||
slice_name = "Energy Sankey"
|
||||
slice_name = 'Energy Sankey'
|
||||
slice_id = self.get_slice(slice_name, db.session).id
|
||||
db.session.commit()
|
||||
copy_name = "Test Sankey Save"
|
||||
copy_name = 'Test Sankey Save'
|
||||
tbl_id = self.table_ids.get('energy_usage')
|
||||
new_slice_name = "Test Sankey Overwirte"
|
||||
new_slice_name = 'Test Sankey Overwirte'
|
||||
|
||||
url = (
|
||||
"/superset/explore/table/{}/?slice_name={}&"
|
||||
"action={}&datasource_name=energy_usage&form_data={}")
|
||||
'/superset/explore/table/{}/?slice_name={}&'
|
||||
'action={}&datasource_name=energy_usage&form_data={}')
|
||||
|
||||
form_data = {
|
||||
'viz_type': 'sankey',
|
||||
|
|
@ -183,17 +183,17 @@ class CoreTests(SupersetTestCase):
|
|||
|
||||
def test_filter_endpoint(self):
|
||||
self.login(username='admin')
|
||||
slice_name = "Energy Sankey"
|
||||
slice_name = 'Energy Sankey'
|
||||
slice_id = self.get_slice(slice_name, db.session).id
|
||||
db.session.commit()
|
||||
tbl_id = self.table_ids.get('energy_usage')
|
||||
table = db.session.query(SqlaTable).filter(SqlaTable.id == tbl_id)
|
||||
table.filter_select_enabled = True
|
||||
url = (
|
||||
"/superset/filter/table/{}/target/?viz_type=sankey&groupby=source"
|
||||
"&metric=sum__value&flt_col_0=source&flt_op_0=in&flt_eq_0=&"
|
||||
"slice_id={}&datasource_name=energy_usage&"
|
||||
"datasource_id=1&datasource_type=table")
|
||||
'/superset/filter/table/{}/target/?viz_type=sankey&groupby=source'
|
||||
'&metric=sum__value&flt_col_0=source&flt_op_0=in&flt_eq_0=&'
|
||||
'slice_id={}&datasource_name=energy_usage&'
|
||||
'datasource_id=1&datasource_type=table')
|
||||
|
||||
# Changing name
|
||||
resp = self.get_resp(url.format(tbl_id, slice_id))
|
||||
|
|
@ -211,7 +211,7 @@ class CoreTests(SupersetTestCase):
|
|||
(slc.slice_name, 'slice_id_url', slc.slice_id_url),
|
||||
]
|
||||
for name, method, url in urls:
|
||||
logging.info("[{name}]/[{method}]: {url}".format(**locals()))
|
||||
logging.info('[{name}]/[{method}]: {url}'.format(**locals()))
|
||||
self.client.get(url)
|
||||
|
||||
def test_tablemodelview_list(self):
|
||||
|
|
@ -250,7 +250,7 @@ class CoreTests(SupersetTestCase):
|
|||
(slc.slice_name, 'slice_url', slc.slice_url),
|
||||
]
|
||||
for name, method, url in urls:
|
||||
print("[{name}]/[{method}]: {url}".format(**locals()))
|
||||
print('[{name}]/[{method}]: {url}'.format(**locals()))
|
||||
response = self.client.get(url)
|
||||
|
||||
def test_dashboard(self):
|
||||
|
|
@ -266,12 +266,12 @@ class CoreTests(SupersetTestCase):
|
|||
for mod in modules:
|
||||
failed, tests = doctest.testmod(mod)
|
||||
if failed:
|
||||
raise Exception("Failed a doctest")
|
||||
raise Exception('Failed a doctest')
|
||||
|
||||
def test_misc(self):
|
||||
assert self.get_resp('/health') == "OK"
|
||||
assert self.get_resp('/healthcheck') == "OK"
|
||||
assert self.get_resp('/ping') == "OK"
|
||||
assert self.get_resp('/health') == 'OK'
|
||||
assert self.get_resp('/healthcheck') == 'OK'
|
||||
assert self.get_resp('/ping') == 'OK'
|
||||
|
||||
def test_testconn(self, username='admin'):
|
||||
self.login(username=username)
|
||||
|
|
@ -308,12 +308,12 @@ class CoreTests(SupersetTestCase):
|
|||
conn_pre = sqla.engine.url.make_url(database.sqlalchemy_uri_decrypted)
|
||||
|
||||
def custom_password_store(uri):
|
||||
return "password_store_test"
|
||||
return 'password_store_test'
|
||||
|
||||
database.custom_password_store = custom_password_store
|
||||
conn = sqla.engine.url.make_url(database.sqlalchemy_uri_decrypted)
|
||||
if conn_pre.password:
|
||||
assert conn.password == "password_store_test"
|
||||
assert conn.password == 'password_store_test'
|
||||
assert conn.password != conn_pre.password
|
||||
|
||||
def test_databaseview_edit(self, username='admin'):
|
||||
|
|
@ -330,7 +330,7 @@ class CoreTests(SupersetTestCase):
|
|||
self.assertEqual(sqlalchemy_uri_decrypted, database.sqlalchemy_uri_decrypted)
|
||||
|
||||
def test_warm_up_cache(self):
|
||||
slc = self.get_slice("Girls", db.session)
|
||||
slc = self.get_slice('Girls', db.session)
|
||||
data = self.get_json_resp(
|
||||
'/superset/warm_up_cache?slice_id={}'.format(slc.id))
|
||||
|
||||
|
|
@ -343,12 +343,12 @@ class CoreTests(SupersetTestCase):
|
|||
def test_shortner(self):
|
||||
self.login(username='admin')
|
||||
data = (
|
||||
"//superset/explore/table/1/?viz_type=sankey&groupby=source&"
|
||||
"groupby=target&metric=sum__value&row_limit=5000&where=&having=&"
|
||||
"flt_col_0=source&flt_op_0=in&flt_eq_0=&slice_id=78&slice_name="
|
||||
"Energy+Sankey&collapsed_fieldsets=&action=&datasource_name="
|
||||
"energy_usage&datasource_id=1&datasource_type=table&"
|
||||
"previous_viz_type=sankey"
|
||||
'//superset/explore/table/1/?viz_type=sankey&groupby=source&'
|
||||
'groupby=target&metric=sum__value&row_limit=5000&where=&having=&'
|
||||
'flt_col_0=source&flt_op_0=in&flt_eq_0=&slice_id=78&slice_name='
|
||||
'Energy+Sankey&collapsed_fieldsets=&action=&datasource_name='
|
||||
'energy_usage&datasource_id=1&datasource_type=table&'
|
||||
'previous_viz_type=sankey'
|
||||
)
|
||||
resp = self.client.post('/r/shortner/', data=data)
|
||||
assert '/r/' in resp.data.decode('utf-8')
|
||||
|
|
@ -383,7 +383,7 @@ class CoreTests(SupersetTestCase):
|
|||
def test_save_dash(self, username='admin'):
|
||||
self.login(username=username)
|
||||
dash = db.session.query(models.Dashboard).filter_by(
|
||||
slug="births").first()
|
||||
slug='births').first()
|
||||
positions = []
|
||||
for i, slc in enumerate(dash.slices):
|
||||
d = {
|
||||
|
|
@ -401,12 +401,12 @@ class CoreTests(SupersetTestCase):
|
|||
}
|
||||
url = '/superset/save_dash/{}/'.format(dash.id)
|
||||
resp = self.get_resp(url, data=dict(data=json.dumps(data)))
|
||||
self.assertIn("SUCCESS", resp)
|
||||
self.assertIn('SUCCESS', resp)
|
||||
|
||||
def test_save_dash_with_filter(self, username='admin'):
|
||||
self.login(username=username)
|
||||
dash = db.session.query(models.Dashboard).filter_by(
|
||||
slug="world_health").first()
|
||||
slug='world_health').first()
|
||||
positions = []
|
||||
for i, slc in enumerate(dash.slices):
|
||||
d = {
|
||||
|
|
@ -429,21 +429,21 @@ class CoreTests(SupersetTestCase):
|
|||
|
||||
url = '/superset/save_dash/{}/'.format(dash.id)
|
||||
resp = self.get_resp(url, data=dict(data=json.dumps(data)))
|
||||
self.assertIn("SUCCESS", resp)
|
||||
self.assertIn('SUCCESS', resp)
|
||||
|
||||
updatedDash = db.session.query(models.Dashboard).filter_by(
|
||||
slug="world_health").first()
|
||||
slug='world_health').first()
|
||||
new_url = updatedDash.url
|
||||
self.assertIn("region", new_url)
|
||||
self.assertIn('region', new_url)
|
||||
|
||||
resp = self.get_resp(new_url)
|
||||
self.assertIn("North America", resp)
|
||||
self.assertIn('North America', resp)
|
||||
|
||||
def test_save_dash_with_dashboard_title(self, username='admin'):
|
||||
self.login(username=username)
|
||||
dash = (
|
||||
db.session.query(models.Dashboard)
|
||||
.filter_by(slug="births")
|
||||
.filter_by(slug='births')
|
||||
.first()
|
||||
)
|
||||
origin_title = dash.dashboard_title
|
||||
|
|
@ -466,7 +466,7 @@ class CoreTests(SupersetTestCase):
|
|||
self.get_resp(url, data=dict(data=json.dumps(data)))
|
||||
updatedDash = (
|
||||
db.session.query(models.Dashboard)
|
||||
.filter_by(slug="births")
|
||||
.filter_by(slug='births')
|
||||
.first()
|
||||
)
|
||||
self.assertEqual(updatedDash.dashboard_title, 'new title')
|
||||
|
|
@ -477,7 +477,7 @@ class CoreTests(SupersetTestCase):
|
|||
def test_copy_dash(self, username='admin'):
|
||||
self.login(username=username)
|
||||
dash = db.session.query(models.Dashboard).filter_by(
|
||||
slug="births").first()
|
||||
slug='births').first()
|
||||
positions = []
|
||||
for i, slc in enumerate(dash.slices):
|
||||
d = {
|
||||
|
|
@ -514,37 +514,37 @@ class CoreTests(SupersetTestCase):
|
|||
def test_add_slices(self, username='admin'):
|
||||
self.login(username=username)
|
||||
dash = db.session.query(models.Dashboard).filter_by(
|
||||
slug="births").first()
|
||||
slug='births').first()
|
||||
new_slice = db.session.query(models.Slice).filter_by(
|
||||
slice_name="Mapbox Long/Lat").first()
|
||||
slice_name='Mapbox Long/Lat').first()
|
||||
existing_slice = db.session.query(models.Slice).filter_by(
|
||||
slice_name="Name Cloud").first()
|
||||
slice_name='Name Cloud').first()
|
||||
data = {
|
||||
"slice_ids": [new_slice.data["slice_id"],
|
||||
existing_slice.data["slice_id"]],
|
||||
'slice_ids': [new_slice.data['slice_id'],
|
||||
existing_slice.data['slice_id']],
|
||||
}
|
||||
url = '/superset/add_slices/{}/'.format(dash.id)
|
||||
resp = self.client.post(url, data=dict(data=json.dumps(data)))
|
||||
assert "SLICES ADDED" in resp.data.decode('utf-8')
|
||||
assert 'SLICES ADDED' in resp.data.decode('utf-8')
|
||||
|
||||
dash = db.session.query(models.Dashboard).filter_by(
|
||||
slug="births").first()
|
||||
slug='births').first()
|
||||
new_slice = db.session.query(models.Slice).filter_by(
|
||||
slice_name="Mapbox Long/Lat").first()
|
||||
slice_name='Mapbox Long/Lat').first()
|
||||
assert new_slice in dash.slices
|
||||
assert len(set(dash.slices)) == len(dash.slices)
|
||||
|
||||
# cleaning up
|
||||
dash = db.session.query(models.Dashboard).filter_by(
|
||||
slug="births").first()
|
||||
slug='births').first()
|
||||
dash.slices = [
|
||||
o for o in dash.slices if o.slice_name != "Mapbox Long/Lat"]
|
||||
o for o in dash.slices if o.slice_name != 'Mapbox Long/Lat']
|
||||
db.session.commit()
|
||||
|
||||
def test_gamma(self):
|
||||
self.login(username='gamma')
|
||||
assert "List Slice" in self.get_resp('/slicemodelview/list/')
|
||||
assert "List Dashboard" in self.get_resp('/dashboardmodelview/list/')
|
||||
assert 'List Slice' in self.get_resp('/slicemodelview/list/')
|
||||
assert 'List Dashboard' in self.get_resp('/dashboardmodelview/list/')
|
||||
|
||||
def test_csv_endpoint(self):
|
||||
self.login('admin')
|
||||
|
|
@ -553,13 +553,13 @@ class CoreTests(SupersetTestCase):
|
|||
FROM ab_user
|
||||
WHERE first_name='admin'
|
||||
"""
|
||||
client_id = "{}".format(random.getrandbits(64))[:10]
|
||||
client_id = '{}'.format(random.getrandbits(64))[:10]
|
||||
self.run_sql(sql, client_id, raise_on_error=True)
|
||||
|
||||
resp = self.get_resp('/superset/csv/{}'.format(client_id))
|
||||
data = csv.reader(io.StringIO(resp))
|
||||
expected_data = csv.reader(
|
||||
io.StringIO("first_name,last_name\nadmin, user\n"))
|
||||
io.StringIO('first_name,last_name\nadmin, user\n'))
|
||||
|
||||
self.assertEqual(list(expected_data), list(data))
|
||||
self.logout()
|
||||
|
|
@ -587,7 +587,7 @@ class CoreTests(SupersetTestCase):
|
|||
self.assertIn('birth_names', self.get_resp('/slicemodelview/list/'))
|
||||
|
||||
resp = self.get_resp('/dashboardmodelview/list/')
|
||||
self.assertIn("/superset/dashboard/births/", resp)
|
||||
self.assertIn('/superset/dashboard/births/', resp)
|
||||
|
||||
self.assertIn('Births', self.get_resp('/superset/dashboard/births/'))
|
||||
|
||||
|
|
@ -596,7 +596,7 @@ class CoreTests(SupersetTestCase):
|
|||
self.assertNotIn('wb_health_population</a>', resp)
|
||||
|
||||
resp = self.get_resp('/dashboardmodelview/list/')
|
||||
self.assertNotIn("/superset/dashboard/world_health/", resp)
|
||||
self.assertNotIn('/superset/dashboard/world_health/', resp)
|
||||
|
||||
def test_dashboard_with_created_by_can_be_accessed_by_public_users(self):
|
||||
self.logout()
|
||||
|
|
@ -609,7 +609,7 @@ class CoreTests(SupersetTestCase):
|
|||
self.grant_public_access_to_table(table)
|
||||
|
||||
dash = db.session.query(models.Dashboard).filter_by(
|
||||
slug="births").first()
|
||||
slug='births').first()
|
||||
dash.owners = [appbuilder.sm.find_user('admin')]
|
||||
dash.created_by = appbuilder.sm.find_user('admin')
|
||||
db.session.merge(dash)
|
||||
|
|
@ -621,7 +621,7 @@ class CoreTests(SupersetTestCase):
|
|||
dash = (
|
||||
db.session
|
||||
.query(models.Dashboard)
|
||||
.filter_by(slug="births")
|
||||
.filter_by(slug='births')
|
||||
.first()
|
||||
)
|
||||
dash.owners = []
|
||||
|
|
@ -638,7 +638,7 @@ class CoreTests(SupersetTestCase):
|
|||
dash = (
|
||||
db.session
|
||||
.query(models.Dashboard)
|
||||
.filter_by(slug="births")
|
||||
.filter_by(slug='births')
|
||||
.first()
|
||||
)
|
||||
dash.owners = [alpha]
|
||||
|
|
@ -662,29 +662,29 @@ class CoreTests(SupersetTestCase):
|
|||
|
||||
def test_get_template_kwarg(self):
|
||||
maindb = self.get_main_database(db.session)
|
||||
s = "{{ foo }}"
|
||||
s = '{{ foo }}'
|
||||
tp = jinja_context.get_template_processor(database=maindb, foo='bar')
|
||||
rendered = tp.process_template(s)
|
||||
self.assertEqual("bar", rendered)
|
||||
self.assertEqual('bar', rendered)
|
||||
|
||||
def test_template_kwarg(self):
|
||||
maindb = self.get_main_database(db.session)
|
||||
s = "{{ foo }}"
|
||||
s = '{{ foo }}'
|
||||
tp = jinja_context.get_template_processor(database=maindb)
|
||||
rendered = tp.process_template(s, foo='bar')
|
||||
self.assertEqual("bar", rendered)
|
||||
self.assertEqual('bar', rendered)
|
||||
|
||||
def test_templated_sql_json(self):
|
||||
self.login('admin')
|
||||
sql = "SELECT '{{ datetime(2017, 1, 1).isoformat() }}' as test"
|
||||
data = self.run_sql(sql, "fdaklj3ws")
|
||||
self.assertEqual(data['data'][0]['test'], "2017-01-01T00:00:00")
|
||||
data = self.run_sql(sql, 'fdaklj3ws')
|
||||
self.assertEqual(data['data'][0]['test'], '2017-01-01T00:00:00')
|
||||
|
||||
def test_table_metadata(self):
|
||||
maindb = self.get_main_database(db.session)
|
||||
backend = maindb.backend
|
||||
data = self.get_json_resp(
|
||||
"/superset/table/{}/ab_user/null/".format(maindb.id))
|
||||
'/superset/table/{}/ab_user/null/'.format(maindb.id))
|
||||
self.assertEqual(data['name'], 'ab_user')
|
||||
assert len(data['columns']) > 5
|
||||
assert data.get('selectStar').startswith('SELECT')
|
||||
|
|
@ -717,7 +717,7 @@ class CoreTests(SupersetTestCase):
|
|||
|
||||
def test_user_profile(self, username='admin'):
|
||||
self.login(username=username)
|
||||
slc = self.get_slice("Girls", db.session)
|
||||
slc = self.get_slice('Girls', db.session)
|
||||
|
||||
# Setting some faves
|
||||
url = '/superset/favstar/Slice/{}/select/'.format(slc.id)
|
||||
|
|
@ -727,7 +727,7 @@ class CoreTests(SupersetTestCase):
|
|||
dash = (
|
||||
db.session
|
||||
.query(models.Dashboard)
|
||||
.filter_by(slug="births")
|
||||
.filter_by(slug='births')
|
||||
.first()
|
||||
)
|
||||
url = '/superset/favstar/Dashboard/{}/select/'.format(dash.id)
|
||||
|
|
@ -760,24 +760,24 @@ class CoreTests(SupersetTestCase):
|
|||
|
||||
def test_slice_id_is_always_logged_correctly_on_ajax_request(self):
|
||||
# superset/explore_json case
|
||||
self.login(username="admin")
|
||||
self.login(username='admin')
|
||||
slc = db.session.query(models.Slice).filter_by(slice_name='Girls').one()
|
||||
qry = db.session.query(models.Log).filter_by(slice_id=slc.id)
|
||||
slc_url = slc.slice_url.replace("explore", "explore_json")
|
||||
slc_url = slc.slice_url.replace('explore', 'explore_json')
|
||||
self.get_json_resp(slc_url)
|
||||
self.assertEqual(1, qry.count())
|
||||
|
||||
def test_slice_query_endpoint(self):
|
||||
# API endpoint for query string
|
||||
self.login(username="admin")
|
||||
slc = self.get_slice("Girls", db.session)
|
||||
self.login(username='admin')
|
||||
slc = self.get_slice('Girls', db.session)
|
||||
resp = self.get_resp('/superset/slice_query/{}/'.format(slc.id))
|
||||
assert 'query' in resp
|
||||
assert 'language' in resp
|
||||
self.logout()
|
||||
|
||||
def test_viz_get_fillna_for_columns(self):
|
||||
slc = self.get_slice("Girls", db.session)
|
||||
slc = self.get_slice('Girls', db.session)
|
||||
q = slc.viz.query_obj()
|
||||
results = slc.viz.datasource.query(q)
|
||||
fillna_columns = slc.viz.get_fillna_for_columns(results.df.columns)
|
||||
|
|
|
|||
|
|
@ -23,47 +23,47 @@ class PickableMock(Mock):
|
|||
|
||||
|
||||
SEGMENT_METADATA = [{
|
||||
"id": "some_id",
|
||||
"intervals": ["2013-05-13T00:00:00.000Z/2013-05-14T00:00:00.000Z"],
|
||||
"columns": {
|
||||
"__time": {
|
||||
"type": "LONG", "hasMultipleValues": False,
|
||||
"size": 407240380, "cardinality": None, "errorMessage": None},
|
||||
"dim1": {
|
||||
"type": "STRING", "hasMultipleValues": False,
|
||||
"size": 100000, "cardinality": 1944, "errorMessage": None},
|
||||
"dim2": {
|
||||
"type": "STRING", "hasMultipleValues": True,
|
||||
"size": 100000, "cardinality": 1504, "errorMessage": None},
|
||||
"metric1": {
|
||||
"type": "FLOAT", "hasMultipleValues": False,
|
||||
"size": 100000, "cardinality": None, "errorMessage": None},
|
||||
'id': 'some_id',
|
||||
'intervals': ['2013-05-13T00:00:00.000Z/2013-05-14T00:00:00.000Z'],
|
||||
'columns': {
|
||||
'__time': {
|
||||
'type': 'LONG', 'hasMultipleValues': False,
|
||||
'size': 407240380, 'cardinality': None, 'errorMessage': None},
|
||||
'dim1': {
|
||||
'type': 'STRING', 'hasMultipleValues': False,
|
||||
'size': 100000, 'cardinality': 1944, 'errorMessage': None},
|
||||
'dim2': {
|
||||
'type': 'STRING', 'hasMultipleValues': True,
|
||||
'size': 100000, 'cardinality': 1504, 'errorMessage': None},
|
||||
'metric1': {
|
||||
'type': 'FLOAT', 'hasMultipleValues': False,
|
||||
'size': 100000, 'cardinality': None, 'errorMessage': None},
|
||||
},
|
||||
"aggregators": {
|
||||
"metric1": {
|
||||
"type": "longSum",
|
||||
"name": "metric1",
|
||||
"fieldName": "metric1"},
|
||||
'aggregators': {
|
||||
'metric1': {
|
||||
'type': 'longSum',
|
||||
'name': 'metric1',
|
||||
'fieldName': 'metric1'},
|
||||
},
|
||||
"size": 300000,
|
||||
"numRows": 5000000,
|
||||
'size': 300000,
|
||||
'numRows': 5000000,
|
||||
}]
|
||||
|
||||
GB_RESULT_SET = [
|
||||
{
|
||||
"version": "v1",
|
||||
"timestamp": "2012-01-01T00:00:00.000Z",
|
||||
"event": {
|
||||
"dim1": 'Canada',
|
||||
"metric1": 12345678,
|
||||
'version': 'v1',
|
||||
'timestamp': '2012-01-01T00:00:00.000Z',
|
||||
'event': {
|
||||
'dim1': 'Canada',
|
||||
'metric1': 12345678,
|
||||
},
|
||||
},
|
||||
{
|
||||
"version": "v1",
|
||||
"timestamp": "2012-01-01T00:00:00.000Z",
|
||||
"event": {
|
||||
"dim1": 'USA',
|
||||
"metric1": 12345678 / 2,
|
||||
'version': 'v1',
|
||||
'timestamp': '2012-01-01T00:00:00.000Z',
|
||||
'event': {
|
||||
'dim1': 'USA',
|
||||
'metric1': 12345678 / 2,
|
||||
},
|
||||
},
|
||||
]
|
||||
|
|
@ -122,7 +122,7 @@ class DruidTests(SupersetTestCase):
|
|||
|
||||
resp = self.get_resp('/superset/explore/druid/{}/'.format(
|
||||
datasource_id))
|
||||
self.assertIn("test_datasource", resp)
|
||||
self.assertIn('test_datasource', resp)
|
||||
form_data = {
|
||||
'viz_type': 'table',
|
||||
'granularity': 'one+day',
|
||||
|
|
@ -141,7 +141,7 @@ class DruidTests(SupersetTestCase):
|
|||
datasource_id, json.dumps(form_data))
|
||||
)
|
||||
resp = self.get_json_resp(url)
|
||||
self.assertEqual("Canada", resp['data']['records'][0]['dim1'])
|
||||
self.assertEqual('Canada', resp['data']['records'][0]['dim1'])
|
||||
|
||||
form_data = {
|
||||
'viz_type': 'table',
|
||||
|
|
@ -161,7 +161,7 @@ class DruidTests(SupersetTestCase):
|
|||
datasource_id, json.dumps(form_data))
|
||||
)
|
||||
resp = self.get_json_resp(url)
|
||||
self.assertEqual("Canada", resp['data']['records'][0]['dim1'])
|
||||
self.assertEqual('Canada', resp['data']['records'][0]['dim1'])
|
||||
|
||||
def test_druid_sync_from_config(self):
|
||||
CLUSTER_NAME = 'new_druid'
|
||||
|
|
@ -184,19 +184,19 @@ class DruidTests(SupersetTestCase):
|
|||
db.session.commit()
|
||||
|
||||
cfg = {
|
||||
"user": "admin",
|
||||
"cluster": CLUSTER_NAME,
|
||||
"config": {
|
||||
"name": "test_click",
|
||||
"dimensions": ["affiliate_id", "campaign", "first_seen"],
|
||||
"metrics_spec": [{"type": "count", "name": "count"},
|
||||
{"type": "sum", "name": "sum"}],
|
||||
"batch_ingestion": {
|
||||
"sql": "SELECT * FROM clicks WHERE d='{{ ds }}'",
|
||||
"ts_column": "d",
|
||||
"sources": [{
|
||||
"table": "clicks",
|
||||
"partition": "d='{{ ds }}'",
|
||||
'user': 'admin',
|
||||
'cluster': CLUSTER_NAME,
|
||||
'config': {
|
||||
'name': 'test_click',
|
||||
'dimensions': ['affiliate_id', 'campaign', 'first_seen'],
|
||||
'metrics_spec': [{'type': 'count', 'name': 'count'},
|
||||
{'type': 'sum', 'name': 'sum'}],
|
||||
'batch_ingestion': {
|
||||
'sql': "SELECT * FROM clicks WHERE d='{{ ds }}'",
|
||||
'ts_column': 'd',
|
||||
'sources': [{
|
||||
'table': 'clicks',
|
||||
'partition': "d='{{ ds }}'",
|
||||
}],
|
||||
},
|
||||
},
|
||||
|
|
@ -207,13 +207,13 @@ class DruidTests(SupersetTestCase):
|
|||
druid_ds = (
|
||||
db.session
|
||||
.query(DruidDatasource)
|
||||
.filter_by(datasource_name="test_click")
|
||||
.filter_by(datasource_name='test_click')
|
||||
.one()
|
||||
)
|
||||
col_names = set([c.column_name for c in druid_ds.columns])
|
||||
assert {"affiliate_id", "campaign", "first_seen"} == col_names
|
||||
assert {'affiliate_id', 'campaign', 'first_seen'} == col_names
|
||||
metric_names = {m.metric_name for m in druid_ds.metrics}
|
||||
assert {"count", "sum"} == metric_names
|
||||
assert {'count', 'sum'} == metric_names
|
||||
assert resp.status_code == 201
|
||||
|
||||
check()
|
||||
|
|
@ -222,29 +222,29 @@ class DruidTests(SupersetTestCase):
|
|||
|
||||
# datasource exists, add new metrics and dimensions
|
||||
cfg = {
|
||||
"user": "admin",
|
||||
"cluster": CLUSTER_NAME,
|
||||
"config": {
|
||||
"name": "test_click",
|
||||
"dimensions": ["affiliate_id", "second_seen"],
|
||||
"metrics_spec": [
|
||||
{"type": "bla", "name": "sum"},
|
||||
{"type": "unique", "name": "unique"},
|
||||
'user': 'admin',
|
||||
'cluster': CLUSTER_NAME,
|
||||
'config': {
|
||||
'name': 'test_click',
|
||||
'dimensions': ['affiliate_id', 'second_seen'],
|
||||
'metrics_spec': [
|
||||
{'type': 'bla', 'name': 'sum'},
|
||||
{'type': 'unique', 'name': 'unique'},
|
||||
],
|
||||
},
|
||||
}
|
||||
resp = self.client.post('/superset/sync_druid/', data=json.dumps(cfg))
|
||||
druid_ds = db.session.query(DruidDatasource).filter_by(
|
||||
datasource_name="test_click").one()
|
||||
datasource_name='test_click').one()
|
||||
# columns and metrics are not deleted if config is changed as
|
||||
# user could define his own dimensions / metrics and want to keep them
|
||||
assert set([c.column_name for c in druid_ds.columns]) == set(
|
||||
["affiliate_id", "campaign", "first_seen", "second_seen"])
|
||||
['affiliate_id', 'campaign', 'first_seen', 'second_seen'])
|
||||
assert set([m.metric_name for m in druid_ds.metrics]) == set(
|
||||
["count", "sum", "unique"])
|
||||
['count', 'sum', 'unique'])
|
||||
# metric type will not be overridden, sum stays instead of bla
|
||||
assert set([m.metric_type for m in druid_ds.metrics]) == set(
|
||||
["longSum", "sum", "unique"])
|
||||
['longSum', 'sum', 'unique'])
|
||||
assert resp.status_code == 201
|
||||
|
||||
def test_filter_druid_datasource(self):
|
||||
|
|
@ -322,7 +322,7 @@ class DruidTests(SupersetTestCase):
|
|||
|
||||
view_menu_name = cluster.datasources[0].get_perm()
|
||||
view_menu = sm.find_view_menu(view_menu_name)
|
||||
permission = sm.find_permission("datasource_access")
|
||||
permission = sm.find_permission('datasource_access')
|
||||
|
||||
pv = sm.get_session.query(sm.permissionview_model).filter_by(
|
||||
permission=permission, view_menu=view_menu).first()
|
||||
|
|
@ -511,7 +511,7 @@ class DruidTests(SupersetTestCase):
|
|||
self.assertEqual('', res.filter['filter']['value'])
|
||||
|
||||
def test_get_filters_extracts_values_in_quotes(self):
|
||||
filtr = {'col': 'A', 'op': 'in', 'val': [" 'a' "]}
|
||||
filtr = {'col': 'A', 'op': 'in', 'val': [' "a" ']}
|
||||
res = DruidDatasource.get_filters([filtr], [])
|
||||
self.assertEqual('a', res.filter['filter']['value'])
|
||||
|
||||
|
|
|
|||
|
|
@ -60,9 +60,9 @@ class ImportExportTests(SupersetTestCase):
|
|||
'database_name': db_name,
|
||||
'schema': '',
|
||||
# Test for trailing commas
|
||||
"metrics": [
|
||||
"sum__signup_attempt_email",
|
||||
"sum__signup_attempt_facebook",
|
||||
'metrics': [
|
||||
'sum__signup_attempt_email',
|
||||
'sum__signup_attempt_facebook',
|
||||
],
|
||||
}
|
||||
|
||||
|
|
@ -319,7 +319,7 @@ class ImportExportTests(SupersetTestCase):
|
|||
make_transient(expected_dash)
|
||||
self.assert_dash_equals(
|
||||
expected_dash, imported_dash, check_position=False)
|
||||
self.assertEquals({"remote_id": 10002, "import_time": 1990},
|
||||
self.assertEquals({'remote_id': 10002, 'import_time': 1990},
|
||||
json.loads(imported_dash.json_metadata))
|
||||
|
||||
expected_position = dash_with_1_slice.position_array
|
||||
|
|
@ -333,11 +333,11 @@ class ImportExportTests(SupersetTestCase):
|
|||
dash_with_2_slices = self.create_dashboard(
|
||||
'dash_with_2_slices', slcs=[e_slc, b_slc], id=10003)
|
||||
dash_with_2_slices.json_metadata = json.dumps({
|
||||
"remote_id": 10003,
|
||||
"filter_immune_slices": ["{}".format(e_slc.id)],
|
||||
"expanded_slices": {
|
||||
"{}".format(e_slc.id): True,
|
||||
"{}".format(b_slc.id): False,
|
||||
'remote_id': 10003,
|
||||
'filter_immune_slices': ['{}'.format(e_slc.id)],
|
||||
'expanded_slices': {
|
||||
'{}'.format(e_slc.id): True,
|
||||
'{}'.format(b_slc.id): False,
|
||||
},
|
||||
})
|
||||
|
||||
|
|
@ -353,10 +353,10 @@ class ImportExportTests(SupersetTestCase):
|
|||
i_e_slc = self.get_slice_by_name('e_slc')
|
||||
i_b_slc = self.get_slice_by_name('b_slc')
|
||||
expected_json_metadata = {
|
||||
"remote_id": 10003,
|
||||
"import_time": 1991,
|
||||
"filter_immune_slices": ["{}".format(i_e_slc.id)],
|
||||
"expanded_slices": {
|
||||
'remote_id': 10003,
|
||||
'import_time': 1991,
|
||||
'filter_immune_slices': ['{}'.format(i_e_slc.id)],
|
||||
'expanded_slices': {
|
||||
'{}'.format(i_e_slc.id): True,
|
||||
'{}'.format(i_b_slc.id): False,
|
||||
},
|
||||
|
|
@ -391,7 +391,7 @@ class ImportExportTests(SupersetTestCase):
|
|||
imported_dash = self.get_dash(imported_dash_id_2)
|
||||
self.assert_dash_equals(
|
||||
expected_dash, imported_dash, check_position=False)
|
||||
self.assertEquals({"remote_id": 10004, "import_time": 1992},
|
||||
self.assertEquals({'remote_id': 10004, 'import_time': 1992},
|
||||
json.loads(imported_dash.json_metadata))
|
||||
|
||||
def test_import_table_no_metadata(self):
|
||||
|
|
@ -403,7 +403,7 @@ class ImportExportTests(SupersetTestCase):
|
|||
def test_import_table_1_col_1_met(self):
|
||||
table = self.create_table(
|
||||
'table_1_col_1_met', id=10002,
|
||||
cols_names=["col1"], metric_names=["metric1"])
|
||||
cols_names=['col1'], metric_names=['metric1'])
|
||||
imported_id = SqlaTable.import_obj(table, import_time=1990)
|
||||
imported = self.get_table(imported_id)
|
||||
self.assert_table_equals(table, imported)
|
||||
|
|
@ -464,7 +464,7 @@ class ImportExportTests(SupersetTestCase):
|
|||
def test_import_druid_1_col_1_met(self):
|
||||
datasource = self.create_druid_datasource(
|
||||
'druid_1_col_1_met', id=10002,
|
||||
cols_names=["col1"], metric_names=["metric1"])
|
||||
cols_names=['col1'], metric_names=['metric1'])
|
||||
imported_id = DruidDatasource.import_obj(
|
||||
datasource, import_time=1990)
|
||||
imported = self.get_datasource(imported_id)
|
||||
|
|
|
|||
|
|
@ -16,42 +16,42 @@ class SupersetTestCase(unittest.TestCase):
|
|||
return sq.tables
|
||||
|
||||
def test_simple_select(self):
|
||||
query = "SELECT * FROM tbname"
|
||||
self.assertEquals({"tbname"}, self.extract_tables(query))
|
||||
query = 'SELECT * FROM tbname'
|
||||
self.assertEquals({'tbname'}, self.extract_tables(query))
|
||||
|
||||
# underscores
|
||||
query = "SELECT * FROM tb_name"
|
||||
self.assertEquals({"tb_name"},
|
||||
query = 'SELECT * FROM tb_name'
|
||||
self.assertEquals({'tb_name'},
|
||||
self.extract_tables(query))
|
||||
|
||||
# quotes
|
||||
query = 'SELECT * FROM "tbname"'
|
||||
self.assertEquals({"tbname"}, self.extract_tables(query))
|
||||
self.assertEquals({'tbname'}, self.extract_tables(query))
|
||||
|
||||
# unicode encoding
|
||||
query = 'SELECT * FROM "tb_name" WHERE city = "Lübeck"'
|
||||
self.assertEquals({"tb_name"}, self.extract_tables(query))
|
||||
self.assertEquals({'tb_name'}, self.extract_tables(query))
|
||||
|
||||
# schema
|
||||
self.assertEquals(
|
||||
{"schemaname.tbname"},
|
||||
self.extract_tables("SELECT * FROM schemaname.tbname"))
|
||||
{'schemaname.tbname'},
|
||||
self.extract_tables('SELECT * FROM schemaname.tbname'))
|
||||
|
||||
# quotes
|
||||
query = "SELECT field1, field2 FROM tb_name"
|
||||
self.assertEquals({"tb_name"}, self.extract_tables(query))
|
||||
query = 'SELECT field1, field2 FROM tb_name'
|
||||
self.assertEquals({'tb_name'}, self.extract_tables(query))
|
||||
|
||||
query = "SELECT t1.f1, t2.f2 FROM t1, t2"
|
||||
self.assertEquals({"t1", "t2"}, self.extract_tables(query))
|
||||
query = 'SELECT t1.f1, t2.f2 FROM t1, t2'
|
||||
self.assertEquals({'t1', 't2'}, self.extract_tables(query))
|
||||
|
||||
def test_select_named_table(self):
|
||||
query = "SELECT a.date, a.field FROM left_table a LIMIT 10"
|
||||
query = 'SELECT a.date, a.field FROM left_table a LIMIT 10'
|
||||
self.assertEquals(
|
||||
{"left_table"}, self.extract_tables(query))
|
||||
{'left_table'}, self.extract_tables(query))
|
||||
|
||||
def test_reverse_select(self):
|
||||
query = "FROM t1 SELECT field"
|
||||
self.assertEquals({"t1"}, self.extract_tables(query))
|
||||
query = 'FROM t1 SELECT field'
|
||||
self.assertEquals({'t1'}, self.extract_tables(query))
|
||||
|
||||
def test_subselect(self):
|
||||
query = """
|
||||
|
|
@ -63,7 +63,7 @@ class SupersetTestCase(unittest.TestCase):
|
|||
) sub, s2.t2
|
||||
WHERE sub.resolution = 'NONE'
|
||||
"""
|
||||
self.assertEquals({"s1.t1", "s2.t2"},
|
||||
self.assertEquals({'s1.t1', 's2.t2'},
|
||||
self.extract_tables(query))
|
||||
|
||||
query = """
|
||||
|
|
@ -75,7 +75,7 @@ class SupersetTestCase(unittest.TestCase):
|
|||
) sub
|
||||
WHERE sub.resolution = 'NONE'
|
||||
"""
|
||||
self.assertEquals({"s1.t1"}, self.extract_tables(query))
|
||||
self.assertEquals({'s1.t1'}, self.extract_tables(query))
|
||||
|
||||
query = """
|
||||
SELECT * FROM t1
|
||||
|
|
@ -86,25 +86,25 @@ class SupersetTestCase(unittest.TestCase):
|
|||
WHERE ROW(5*t2.s1,77)=
|
||||
(SELECT 50,11*s1 FROM t4)));
|
||||
"""
|
||||
self.assertEquals({"t1", "t2", "t3", "t4"},
|
||||
self.assertEquals({'t1', 't2', 't3', 't4'},
|
||||
self.extract_tables(query))
|
||||
|
||||
def test_select_in_expression(self):
|
||||
query = "SELECT f1, (SELECT count(1) FROM t2) FROM t1"
|
||||
self.assertEquals({"t1", "t2"}, self.extract_tables(query))
|
||||
query = 'SELECT f1, (SELECT count(1) FROM t2) FROM t1'
|
||||
self.assertEquals({'t1', 't2'}, self.extract_tables(query))
|
||||
|
||||
def test_union(self):
|
||||
query = "SELECT * FROM t1 UNION SELECT * FROM t2"
|
||||
self.assertEquals({"t1", "t2"}, self.extract_tables(query))
|
||||
query = 'SELECT * FROM t1 UNION SELECT * FROM t2'
|
||||
self.assertEquals({'t1', 't2'}, self.extract_tables(query))
|
||||
|
||||
query = "SELECT * FROM t1 UNION ALL SELECT * FROM t2"
|
||||
self.assertEquals({"t1", "t2"}, self.extract_tables(query))
|
||||
query = 'SELECT * FROM t1 UNION ALL SELECT * FROM t2'
|
||||
self.assertEquals({'t1', 't2'}, self.extract_tables(query))
|
||||
|
||||
query = "SELECT * FROM t1 INTERSECT ALL SELECT * FROM t2"
|
||||
self.assertEquals({"t1", "t2"}, self.extract_tables(query))
|
||||
query = 'SELECT * FROM t1 INTERSECT ALL SELECT * FROM t2'
|
||||
self.assertEquals({'t1', 't2'}, self.extract_tables(query))
|
||||
|
||||
def test_select_from_values(self):
|
||||
query = "SELECT * FROM VALUES (13, 42)"
|
||||
query = 'SELECT * FROM VALUES (13, 42)'
|
||||
self.assertFalse(self.extract_tables(query))
|
||||
|
||||
def test_select_array(self):
|
||||
|
|
@ -112,25 +112,25 @@ class SupersetTestCase(unittest.TestCase):
|
|||
SELECT ARRAY[1, 2, 3] AS my_array
|
||||
FROM t1 LIMIT 10
|
||||
"""
|
||||
self.assertEquals({"t1"}, self.extract_tables(query))
|
||||
self.assertEquals({'t1'}, self.extract_tables(query))
|
||||
|
||||
def test_select_if(self):
|
||||
query = """
|
||||
SELECT IF(CARDINALITY(my_array) >= 3, my_array[3], NULL)
|
||||
FROM t1 LIMIT 10
|
||||
"""
|
||||
self.assertEquals({"t1"}, self.extract_tables(query))
|
||||
self.assertEquals({'t1'}, self.extract_tables(query))
|
||||
|
||||
# SHOW TABLES ((FROM | IN) qualifiedName)? (LIKE pattern=STRING)?
|
||||
def test_show_tables(self):
|
||||
query = 'SHOW TABLES FROM s1 like "%order%"'
|
||||
query = "SHOW TABLES FROM s1 like '%order%'"
|
||||
# TODO: figure out what should code do here
|
||||
self.assertEquals({"s1"}, self.extract_tables(query))
|
||||
self.assertEquals({'s1'}, self.extract_tables(query))
|
||||
|
||||
# SHOW COLUMNS (FROM | IN) qualifiedName
|
||||
def test_show_columns(self):
|
||||
query = "SHOW COLUMNS FROM t1"
|
||||
self.assertEquals({"t1"}, self.extract_tables(query))
|
||||
query = 'SHOW COLUMNS FROM t1'
|
||||
self.assertEquals({'t1'}, self.extract_tables(query))
|
||||
|
||||
def test_where_subquery(self):
|
||||
query = """
|
||||
|
|
@ -138,26 +138,26 @@ class SupersetTestCase(unittest.TestCase):
|
|||
FROM t1
|
||||
WHERE regionkey = (SELECT max(regionkey) FROM t2)
|
||||
"""
|
||||
self.assertEquals({"t1", "t2"}, self.extract_tables(query))
|
||||
self.assertEquals({'t1', 't2'}, self.extract_tables(query))
|
||||
|
||||
query = """
|
||||
SELECT name
|
||||
FROM t1
|
||||
WHERE regionkey IN (SELECT regionkey FROM t2)
|
||||
"""
|
||||
self.assertEquals({"t1", "t2"}, self.extract_tables(query))
|
||||
self.assertEquals({'t1', 't2'}, self.extract_tables(query))
|
||||
|
||||
query = """
|
||||
SELECT name
|
||||
FROM t1
|
||||
WHERE regionkey EXISTS (SELECT regionkey FROM t2)
|
||||
"""
|
||||
self.assertEquals({"t1", "t2"}, self.extract_tables(query))
|
||||
self.assertEquals({'t1', 't2'}, self.extract_tables(query))
|
||||
|
||||
# DESCRIBE | DESC qualifiedName
|
||||
def test_describe(self):
|
||||
self.assertEquals({"t1"}, self.extract_tables("DESCRIBE t1"))
|
||||
self.assertEquals({"t1"}, self.extract_tables("DESC t1"))
|
||||
self.assertEquals({'t1'}, self.extract_tables('DESCRIBE t1'))
|
||||
self.assertEquals({'t1'}, self.extract_tables('DESC t1'))
|
||||
|
||||
# SHOW PARTITIONS FROM qualifiedName (WHERE booleanExpression)?
|
||||
# (ORDER BY sortItem (',' sortItem)*)? (LIMIT limit=(INTEGER_VALUE | ALL))?
|
||||
|
|
@ -166,11 +166,11 @@ class SupersetTestCase(unittest.TestCase):
|
|||
SHOW PARTITIONS FROM orders
|
||||
WHERE ds >= '2013-01-01' ORDER BY ds DESC;
|
||||
"""
|
||||
self.assertEquals({"orders"}, self.extract_tables(query))
|
||||
self.assertEquals({'orders'}, self.extract_tables(query))
|
||||
|
||||
def test_join(self):
|
||||
query = "SELECT t1.*, t2.* FROM t1 JOIN t2 ON t1.a = t2.a;"
|
||||
self.assertEquals({"t1", "t2"}, self.extract_tables(query))
|
||||
query = 'SELECT t1.*, t2.* FROM t1 JOIN t2 ON t1.a = t2.a;'
|
||||
self.assertEquals({'t1', 't2'}, self.extract_tables(query))
|
||||
|
||||
# subquery + join
|
||||
query = """
|
||||
|
|
@ -184,7 +184,7 @@ class SupersetTestCase(unittest.TestCase):
|
|||
) b
|
||||
ON a.date = b.date
|
||||
"""
|
||||
self.assertEquals({"left_table", "right_table"},
|
||||
self.assertEquals({'left_table', 'right_table'},
|
||||
self.extract_tables(query))
|
||||
|
||||
query = """
|
||||
|
|
@ -198,7 +198,7 @@ class SupersetTestCase(unittest.TestCase):
|
|||
) b
|
||||
ON a.date = b.date
|
||||
"""
|
||||
self.assertEquals({"left_table", "right_table"},
|
||||
self.assertEquals({'left_table', 'right_table'},
|
||||
self.extract_tables(query))
|
||||
|
||||
query = """
|
||||
|
|
@ -212,7 +212,7 @@ class SupersetTestCase(unittest.TestCase):
|
|||
) b
|
||||
ON a.date = b.date
|
||||
"""
|
||||
self.assertEquals({"left_table", "right_table"},
|
||||
self.assertEquals({'left_table', 'right_table'},
|
||||
self.extract_tables(query))
|
||||
|
||||
query = """
|
||||
|
|
@ -226,7 +226,7 @@ class SupersetTestCase(unittest.TestCase):
|
|||
) b
|
||||
ON a.date = b.date
|
||||
"""
|
||||
self.assertEquals({"left_table", "right_table"},
|
||||
self.assertEquals({'left_table', 'right_table'},
|
||||
self.extract_tables(query))
|
||||
|
||||
# TODO: add SEMI join support, SQL Parse does not handle it.
|
||||
|
|
@ -241,7 +241,7 @@ class SupersetTestCase(unittest.TestCase):
|
|||
# ) b
|
||||
# ON a.date = b.date
|
||||
# """
|
||||
# self.assertEquals({"left_table", "right_table"},
|
||||
# self.assertEquals({'left_table', 'right_table'},
|
||||
# sql_parse.extract_tables(query))
|
||||
|
||||
def test_combinations(self):
|
||||
|
|
@ -255,14 +255,14 @@ class SupersetTestCase(unittest.TestCase):
|
|||
WHERE ROW(5*t3.s1,77)=
|
||||
(SELECT 50,11*s1 FROM t4)));
|
||||
"""
|
||||
self.assertEquals({"t1", "t3", "t4", "t6"},
|
||||
self.assertEquals({'t1', 't3', 't4', 't6'},
|
||||
self.extract_tables(query))
|
||||
|
||||
query = """
|
||||
SELECT * FROM (SELECT * FROM (SELECT * FROM (SELECT * FROM EmployeeS)
|
||||
AS S1) AS S2) AS S3;
|
||||
"""
|
||||
self.assertEquals({"EmployeeS"}, self.extract_tables(query))
|
||||
self.assertEquals({'EmployeeS'}, self.extract_tables(query))
|
||||
|
||||
def test_with(self):
|
||||
query = """
|
||||
|
|
@ -272,7 +272,7 @@ class SupersetTestCase(unittest.TestCase):
|
|||
z AS (SELECT b AS c FROM t3)
|
||||
SELECT c FROM z;
|
||||
"""
|
||||
self.assertEquals({"t1", "t2", "t3"},
|
||||
self.assertEquals({'t1', 't2', 't3'},
|
||||
self.extract_tables(query))
|
||||
|
||||
query = """
|
||||
|
|
@ -282,7 +282,7 @@ class SupersetTestCase(unittest.TestCase):
|
|||
z AS (SELECT b AS c FROM y)
|
||||
SELECT c FROM z;
|
||||
"""
|
||||
self.assertEquals({"t1"}, self.extract_tables(query))
|
||||
self.assertEquals({'t1'}, self.extract_tables(query))
|
||||
|
||||
def test_reusing_aliases(self):
|
||||
query = """
|
||||
|
|
@ -290,11 +290,11 @@ class SupersetTestCase(unittest.TestCase):
|
|||
q2 as ( select key from src where key = '5')
|
||||
select * from (select key from q1) a;
|
||||
"""
|
||||
self.assertEquals({"src"}, self.extract_tables(query))
|
||||
self.assertEquals({'src'}, self.extract_tables(query))
|
||||
|
||||
def multistatement(self):
|
||||
query = "SELECT * FROM t1; SELECT * FROM t2"
|
||||
self.assertEquals({"t1", "t2"}, self.extract_tables(query))
|
||||
query = 'SELECT * FROM t1; SELECT * FROM t2'
|
||||
self.assertEquals({'t1', 't2'}, self.extract_tables(query))
|
||||
|
||||
query = "SELECT * FROM t1; SELECT * FROM t2;"
|
||||
self.assertEquals({"t1", "t2"}, self.extract_tables(query))
|
||||
query = 'SELECT * FROM t1; SELECT * FROM t2;'
|
||||
self.assertEquals({'t1', 't2'}, self.extract_tables(query))
|
||||
|
|
|
|||
|
|
@ -25,15 +25,15 @@ class SqlLabTests(SupersetTestCase):
|
|||
db.session.query(Query).delete()
|
||||
db.session.commit()
|
||||
self.run_sql(
|
||||
"SELECT * FROM ab_user",
|
||||
'SELECT * FROM ab_user',
|
||||
client_id='client_id_1',
|
||||
user_name='admin')
|
||||
self.run_sql(
|
||||
"SELECT * FROM NO_TABLE",
|
||||
'SELECT * FROM NO_TABLE',
|
||||
client_id='client_id_3',
|
||||
user_name='admin')
|
||||
self.run_sql(
|
||||
"SELECT * FROM ab_permission",
|
||||
'SELECT * FROM ab_permission',
|
||||
client_id='client_id_2',
|
||||
user_name='gamma_sqllab')
|
||||
self.logout()
|
||||
|
|
@ -46,10 +46,10 @@ class SqlLabTests(SupersetTestCase):
|
|||
def test_sql_json(self):
|
||||
self.login('admin')
|
||||
|
||||
data = self.run_sql('SELECT * FROM ab_user', "1")
|
||||
data = self.run_sql('SELECT * FROM ab_user', '1')
|
||||
self.assertLess(0, len(data['data']))
|
||||
|
||||
data = self.run_sql('SELECT * FROM unexistant_table', "2")
|
||||
data = self.run_sql('SELECT * FROM unexistant_table', '2')
|
||||
self.assertLess(0, len(data['error']))
|
||||
|
||||
def test_sql_json_has_access(self):
|
||||
|
|
@ -64,7 +64,7 @@ class SqlLabTests(SupersetTestCase):
|
|||
.filter(ab_models.Permission.name == 'database_access')
|
||||
.first()
|
||||
)
|
||||
astronaut = sm.add_role("Astronaut")
|
||||
astronaut = sm.add_role('Astronaut')
|
||||
sm.add_permission_role(astronaut, main_db_permission_view)
|
||||
# Astronaut role is Gamma + sqllab + main db permissions
|
||||
for perm in sm.find_role('Gamma').permissions:
|
||||
|
|
@ -78,7 +78,7 @@ class SqlLabTests(SupersetTestCase):
|
|||
'gagarin', 'Iurii', 'Gagarin', 'gagarin@cosmos.ussr',
|
||||
astronaut,
|
||||
password='general')
|
||||
data = self.run_sql('SELECT * FROM ab_user', "3", user_name='gagarin')
|
||||
data = self.run_sql('SELECT * FROM ab_user', '3', user_name='gagarin')
|
||||
db.session.query(Query).delete()
|
||||
db.session.commit()
|
||||
self.assertLess(0, len(data['data']))
|
||||
|
|
@ -97,8 +97,8 @@ class SqlLabTests(SupersetTestCase):
|
|||
self.assertEquals(2, len(data))
|
||||
|
||||
# Run 2 more queries
|
||||
self.run_sql("SELECT * FROM ab_user LIMIT 1", client_id='client_id_4')
|
||||
self.run_sql("SELECT * FROM ab_user LIMIT 2", client_id='client_id_5')
|
||||
self.run_sql('SELECT * FROM ab_user LIMIT 1', client_id='client_id_4')
|
||||
self.run_sql('SELECT * FROM ab_user LIMIT 2', client_id='client_id_5')
|
||||
self.login('admin')
|
||||
data = self.get_json_resp('/superset/queries/0')
|
||||
self.assertEquals(4, len(data))
|
||||
|
|
@ -195,7 +195,7 @@ class SqlLabTests(SupersetTestCase):
|
|||
|
||||
def test_alias_duplicate(self):
|
||||
self.run_sql(
|
||||
"SELECT username as col, id as col, username FROM ab_user",
|
||||
'SELECT username as col, id as col, username FROM ab_user',
|
||||
client_id='2e2df3',
|
||||
user_name='admin',
|
||||
raise_on_error=True)
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ class UtilsTestCase(unittest.TestCase):
|
|||
assert json_int_dttm_ser(dttm + timedelta(milliseconds=1)) == (ts + 1)
|
||||
|
||||
with self.assertRaises(TypeError):
|
||||
json_int_dttm_ser("this is not a date")
|
||||
json_int_dttm_ser('this is not a date')
|
||||
|
||||
def test_json_iso_dttm_ser(self):
|
||||
dttm = datetime(2020, 1, 1)
|
||||
|
|
@ -35,7 +35,7 @@ class UtilsTestCase(unittest.TestCase):
|
|||
assert json_iso_dttm_ser(t) == t.isoformat()
|
||||
|
||||
with self.assertRaises(TypeError):
|
||||
json_iso_dttm_ser("this is not a date")
|
||||
json_iso_dttm_ser('this is not a date')
|
||||
|
||||
def test_base_json_conv(self):
|
||||
assert isinstance(base_json_conv(numpy.bool_(1)), bool) is True
|
||||
|
|
@ -50,7 +50,7 @@ class UtilsTestCase(unittest.TestCase):
|
|||
self.assertEquals(parse_human_timedelta('now'), timedelta(0))
|
||||
|
||||
def test_zlib_compression(self):
|
||||
json_str = """{"test": 1}"""
|
||||
json_str = '{"test": 1}'
|
||||
blob = zlib_compress(json_str)
|
||||
got_str = zlib_decompress_to_string(blob)
|
||||
self.assertEquals(json_str, got_str)
|
||||
|
|
|
|||
Loading…
Reference in New Issue