[flake8] Resolving C??? errors (#3787)

This commit is contained in:
John Bodley 2017-11-07 21:32:45 -08:00 committed by Maxime Beauchemin
parent 7453131858
commit 17623f71d4
26 changed files with 172 additions and 172 deletions

View File

@ -219,7 +219,7 @@ class BaseColumn(AuditMixinNullable, ImportMixin):
num_types = (
'DOUBLE', 'FLOAT', 'INT', 'BIGINT',
'LONG', 'REAL', 'NUMERIC', 'DECIMAL'
'LONG', 'REAL', 'NUMERIC', 'DECIMAL',
)
date_types = ('DATE', 'TIME', 'DATETIME')
str_types = ('VARCHAR', 'STRING', 'CHAR')

View File

@ -61,7 +61,7 @@ class ConnectorRegistry(object):
session.query(datasource_class)
.options(
subqueryload(datasource_class.columns),
subqueryload(datasource_class.metrics)
subqueryload(datasource_class.metrics),
)
.filter_by(id=datasource_id)
.one()

View File

@ -232,7 +232,7 @@ class DruidColumn(Model, BaseColumn):
export_fields = (
'datasource_name', 'column_name', 'is_active', 'type', 'groupby',
'count_distinct', 'sum', 'avg', 'max', 'min', 'filterable',
'description', 'dimension_spec_json'
'description', 'dimension_spec_json',
)
def __repr__(self):
@ -253,7 +253,7 @@ class DruidColumn(Model, BaseColumn):
metric_name='count',
verbose_name='COUNT(*)',
metric_type='count',
json=json.dumps({'type': 'count', 'name': 'count'})
json=json.dumps({'type': 'count', 'name': 'count'}),
)
# Somehow we need to reassign this for UDAFs
if self.type in ('DOUBLE', 'FLOAT'):
@ -269,7 +269,7 @@ class DruidColumn(Model, BaseColumn):
metric_type='sum',
verbose_name='SUM({})'.format(self.column_name),
json=json.dumps({
'type': mt, 'name': name, 'fieldName': self.column_name})
'type': mt, 'name': name, 'fieldName': self.column_name}),
)
if self.avg and self.is_num:
@ -280,7 +280,7 @@ class DruidColumn(Model, BaseColumn):
metric_type='avg',
verbose_name='AVG({})'.format(self.column_name),
json=json.dumps({
'type': mt, 'name': name, 'fieldName': self.column_name})
'type': mt, 'name': name, 'fieldName': self.column_name}),
)
if self.min and self.is_num:
@ -291,7 +291,7 @@ class DruidColumn(Model, BaseColumn):
metric_type='min',
verbose_name='MIN({})'.format(self.column_name),
json=json.dumps({
'type': mt, 'name': name, 'fieldName': self.column_name})
'type': mt, 'name': name, 'fieldName': self.column_name}),
)
if self.max and self.is_num:
mt = corrected_type.lower() + 'Max'
@ -301,7 +301,7 @@ class DruidColumn(Model, BaseColumn):
metric_type='max',
verbose_name='MAX({})'.format(self.column_name),
json=json.dumps({
'type': mt, 'name': name, 'fieldName': self.column_name})
'type': mt, 'name': name, 'fieldName': self.column_name}),
)
if self.count_distinct:
name = 'count_distinct__' + self.column_name
@ -313,8 +313,8 @@ class DruidColumn(Model, BaseColumn):
json=json.dumps({
'type': self.type,
'name': name,
'fieldName': self.column_name
})
'fieldName': self.column_name,
}),
)
else:
metrics[name] = DruidMetric(
@ -324,7 +324,7 @@ class DruidColumn(Model, BaseColumn):
json=json.dumps({
'type': 'cardinality',
'name': name,
'fieldNames': [self.column_name]})
'fieldNames': [self.column_name]}),
)
return metrics
@ -372,7 +372,7 @@ class DruidMetric(Model, BaseMetric):
export_fields = (
'metric_name', 'verbose_name', 'metric_type', 'datasource_name',
'json', 'description', 'is_restricted', 'd3format'
'json', 'description', 'is_restricted', 'd3format',
)
@property
@ -392,7 +392,7 @@ class DruidMetric(Model, BaseMetric):
return (
"{parent_name}.[{obj.metric_name}](id:{obj.id})"
).format(obj=self,
parent_name=self.datasource.full_name
parent_name=self.datasource.full_name,
) if self.datasource else None
@classmethod
@ -434,7 +434,7 @@ class DruidDatasource(Model, BaseDatasource):
export_fields = (
'datasource_name', 'is_hidden', 'description', 'default_endpoint',
'cluster_name', 'offset', 'cache_timeout', 'params'
'cluster_name', 'offset', 'cache_timeout', 'params',
)
@property
@ -491,7 +491,7 @@ class DruidDatasource(Model, BaseDatasource):
'week', 'week_starting_sunday', 'week_ending_saturday',
'month',
],
"time_grains": ['now']
"time_grains": ['now'],
}
def __repr__(self):
@ -815,11 +815,11 @@ class DruidDatasource(Model, BaseDatasource):
elif mconf.get('type') == 'constant':
post_aggs[metric_name] = Const(
mconf.get('value'),
output_name=mconf.get('name', '')
output_name=mconf.get('name', ''),
)
elif mconf.get('type') == 'hyperUniqueCardinality':
post_aggs[metric_name] = HyperUniqueCardinality(
mconf.get('name')
mconf.get('name'),
)
elif mconf.get('type') == 'arithmetic':
post_aggs[metric_name] = Postaggregator(
@ -936,7 +936,7 @@ class DruidDatasource(Model, BaseDatasource):
if rejected_metrics:
raise MetricPermException(
"Access to the metrics denied: " + ', '.join(rejected_metrics)
"Access to the metrics denied: " + ', '.join(rejected_metrics),
)
# the dimensions list with dimensionSpecs expanded
@ -1155,18 +1155,18 @@ class DruidDatasource(Model, BaseDatasource):
elif op == '>':
cond = Bound(
col, eq, None,
lowerStrict=True, alphaNumeric=is_numeric_col
lowerStrict=True, alphaNumeric=is_numeric_col,
)
elif op == '<':
cond = Bound(
col, None, eq,
upperStrict=True, alphaNumeric=is_numeric_col
upperStrict=True, alphaNumeric=is_numeric_col,
)
if filters:
filters = Filter(type="and", fields=[
cond,
filters
filters,
])
else:
filters = cond
@ -1192,7 +1192,7 @@ class DruidDatasource(Model, BaseDatasource):
reversed_op_map = {
'!=': '==',
'>=': '<',
'<=': '>'
'<=': '>',
}
for flt in raw_filters:

View File

@ -14,7 +14,7 @@ from superset.utils import has_access
from superset.views.base import (
BaseSupersetView, DatasourceFilter, DeleteMixin,
get_datasource_exist_error_mgs, ListWidgetWithCheckboxes, SupersetModelView,
validate_json
validate_json,
)
from . import models
@ -184,7 +184,7 @@ class DruidDatasourceModelView(DatasourceModelView, DeleteMixin): # noqa
'filter_select_enabled', 'fetch_values_from',
'default_endpoint', 'offset', 'cache_timeout']
search_columns = (
'datasource_name', 'cluster', 'description', 'owner'
'datasource_name', 'cluster', 'description', 'owner',
)
add_columns = edit_columns
show_columns = add_columns + ['perm']

View File

@ -44,7 +44,7 @@ class TableColumn(Model, BaseColumn):
'table_id', 'column_name', 'verbose_name', 'is_dttm', 'is_active',
'type', 'groupby', 'count_distinct', 'sum', 'avg', 'max', 'min',
'filterable', 'expression', 'description', 'python_date_format',
'database_expression'
'database_expression',
)
@property
@ -262,7 +262,7 @@ class SqlaTable(Model, BaseDatasource):
def time_column_grains(self):
return {
"time_columns": self.dttm_cols,
"time_grains": [grain.name for grain in self.database.grains()]
"time_grains": [grain.name for grain in self.database.grains()],
}
def get_col(self, col_name):
@ -322,8 +322,8 @@ class SqlaTable(Model, BaseDatasource):
sql = str(
qry.compile(
engine,
compile_kwargs={"literal_binds": True}
)
compile_kwargs={"literal_binds": True},
),
)
logging.info(sql)
sql = sqlparse.format(sql, reindent=True)
@ -622,35 +622,35 @@ class SqlaTable(Model, BaseDatasource):
metric_name='sum__' + dbcol.column_name,
verbose_name='sum__' + dbcol.column_name,
metric_type='sum',
expression="SUM({})".format(quoted)
expression="SUM({})".format(quoted),
))
if dbcol.avg:
metrics.append(M(
metric_name='avg__' + dbcol.column_name,
verbose_name='avg__' + dbcol.column_name,
metric_type='avg',
expression="AVG({})".format(quoted)
expression="AVG({})".format(quoted),
))
if dbcol.max:
metrics.append(M(
metric_name='max__' + dbcol.column_name,
verbose_name='max__' + dbcol.column_name,
metric_type='max',
expression="MAX({})".format(quoted)
expression="MAX({})".format(quoted),
))
if dbcol.min:
metrics.append(M(
metric_name='min__' + dbcol.column_name,
verbose_name='min__' + dbcol.column_name,
metric_type='min',
expression="MIN({})".format(quoted)
expression="MIN({})".format(quoted),
))
if dbcol.count_distinct:
metrics.append(M(
metric_name='count_distinct__' + dbcol.column_name,
verbose_name='count_distinct__' + dbcol.column_name,
metric_type='count_distinct',
expression="COUNT(DISTINCT {})".format(quoted)
expression="COUNT(DISTINCT {})".format(quoted),
))
dbcol.type = datatype
@ -658,7 +658,7 @@ class SqlaTable(Model, BaseDatasource):
metric_name='count',
verbose_name='COUNT(*)',
metric_type='count',
expression="COUNT(*)"
expression="COUNT(*)",
))
dbmetrics = db.session.query(M).filter(M.table_id == self.id).filter(

View File

@ -117,7 +117,7 @@ class SqlMetricInlineView(CompactCRUDMixin, SupersetModelView): # noqa
"(https://github.com/d3/d3-format/blob/master/README.md#format). "
"For instance, this default formatting applies in the Table "
"visualization and allow for different metric to use different "
"formats", True
"formats", True,
),
}
add_columns = edit_columns
@ -189,13 +189,13 @@ class TableModelView(DatasourceModelView, DeleteMixin): # noqa
"markdown</a>"),
'sql': _(
"This fields acts a Superset view, meaning that Superset will "
"run a query against this string as a subquery."
"run a query against this string as a subquery.",
),
'fetch_values_predicate': _(
"Predicate applied when fetching distinct value to "
"populate the filter control component. Supports "
"jinja template syntax. Applies only when "
"`Enable Filter Select` is on."
"`Enable Filter Select` is on.",
),
'default_endpoint': _(
"Redirects to this endpoint when clicking on the table "

View File

@ -93,7 +93,7 @@ def load_energy():
"viz_type": "sankey",
"where": ""
}
""")
"""),
)
misc_dash_slices.append(slc.slice_name)
merge_slice(slc)
@ -119,7 +119,7 @@ def load_energy():
"viz_type": "directed_force",
"where": ""
}
""")
"""),
)
misc_dash_slices.append(slc.slice_name)
merge_slice(slc)
@ -145,7 +145,7 @@ def load_energy():
"xscale_interval": "1",
"yscale_interval": "1"
}
""")
"""),
)
misc_dash_slices.append(slc.slice_name)
merge_slice(slc)
@ -971,7 +971,7 @@ def load_country_map_data():
'2012': BigInteger,
'2013': BigInteger,
'2014': BigInteger,
'date': Date()
'date': Date(),
},
index=False)
print("Done loading table!")

View File

@ -139,7 +139,7 @@ class SupersetDataFrame(object):
column.update({
'is_date': True,
'is_dim': False,
'agg': None
'agg': None,
})
# 'agg' is optional attribute
if not column['agg']:

View File

@ -486,7 +486,7 @@ class PrestoEngineSpec(BaseEngineSpec):
'cols': cols,
'latest': {col_name: latest_part},
'partitionQuery': pql,
}
},
}
@classmethod

View File

@ -30,7 +30,7 @@ def fetch_logs(self, max_rows=1024,
operationHandle=self._operationHandle,
orientation=ttypes.TFetchOrientation.FETCH_NEXT,
maxRows=self.arraysize,
fetchType=1 # 0: results, 1: logs
fetchType=1, # 0: results, 1: logs
)
response = self._connection.client.FetchResults(req)
hive._check_status(response)

View File

@ -199,7 +199,7 @@ class Slice(Model, AuditMixinNullable, ImportMixin):
form_data.update({
'slice_id': self.id,
'viz_type': self.viz_type,
'datasource': str(self.datasource_id) + '__' + self.datasource_type
'datasource': str(self.datasource_id) + '__' + self.datasource_type,
})
if self.cache_timeout:
form_data['cache_timeout'] = self.cache_timeout
@ -301,7 +301,7 @@ dashboard_user = Table(
'dashboard_user', metadata,
Column('id', Integer, primary_key=True),
Column('user_id', Integer, ForeignKey('ab_user.id')),
Column('dashboard_id', Integer, ForeignKey('dashboards.id'))
Column('dashboard_id', Integer, ForeignKey('dashboards.id')),
)
@ -687,7 +687,7 @@ class Database(Model, AuditMixinNullable):
select('*')
.select_from(
TextAsFrom(text(sql), ['*'])
.alias('inner_qry')
.alias('inner_qry'),
).limit(limit)
)
return self.compile_sqla_query(qry)

View File

@ -134,13 +134,13 @@ def merge_perm(sm, permission_name, view_menu_name, connection):
permission_table = sm.permission_model.__table__
connection.execute(
permission_table.insert()
.values(name=permission_name)
.values(name=permission_name),
)
if not view_menu:
view_menu_table = sm.viewmenu_model.__table__
connection.execute(
view_menu_table.insert()
.values(name=view_menu_name)
.values(name=view_menu_name),
)
permission = sm.find_permission(permission_name)
@ -155,8 +155,8 @@ def merge_perm(sm, permission_name, view_menu_name, connection):
permission_view_table.insert()
.values(
permission_id=permission.id,
view_menu_id=view_menu.id
)
view_menu_id=view_menu.id,
),
)
@ -167,7 +167,7 @@ def set_perm(mapper, connection, target): # noqa
connection.execute(
link_table.update()
.where(link_table.c.id == target.id)
.values(perm=target.get_perm())
.values(perm=target.get_perm()),
)
# add to view menu if not already exists

View File

@ -133,13 +133,13 @@ def is_gamma_pvm(pvm):
def is_sql_lab_pvm(pvm):
return pvm.view_menu.name in {'SQL Lab'} or pvm.permission.name in {
'can_sql_json', 'can_csv', 'can_search_queries'
'can_sql_json', 'can_csv', 'can_search_queries',
}
def is_granter_pvm(pvm):
return pvm.permission.name in {
'can_override_role_permissions', 'can_approve'
'can_override_role_permissions', 'can_approve',
}

View File

@ -163,7 +163,7 @@ class DimSelector(Having):
'type': 'dimSelector',
'dimension': args['dimension'],
'value': args['value'],
}
},
}

View File

@ -270,7 +270,7 @@ class DeleteMixin(object):
__("Delete"),
__("Delete all Really?"),
"fa-trash",
single=False
single=False,
)
def muldelete(self, items):
if not items:

View File

@ -151,8 +151,8 @@ class DashboardFilter(SupersetFilter):
db.session.query(Dash.id)
.distinct()
.join(Dash.slices)
.filter(Slice.id.in_(slice_ids_qry))
)
.filter(Slice.id.in_(slice_ids_qry)),
),
)
return query
@ -179,7 +179,7 @@ class DatabaseView(SupersetModelView, DeleteMixin): # noqa
'allow_dml', 'creator', 'modified']
order_columns = [
'database_name', 'allow_run_sync', 'allow_run_async', 'allow_dml',
'modified'
'modified',
]
add_columns = [
'database_name', 'sqlalchemy_uri', 'cache_timeout', 'extra',
@ -256,7 +256,7 @@ class DatabaseView(SupersetModelView, DeleteMixin): # noqa
'extra': _("Extra"),
'allow_run_sync': _("Allow Run Sync"),
'allow_run_async': _("Allow Run Async"),
'impersonate_user': _("Impersonate the logged on user")
'impersonate_user': _("Impersonate the logged on user"),
}
def pre_add(self, db):
@ -365,10 +365,10 @@ class SliceModelView(SupersetModelView, DeleteMixin): # noqa
"These parameters are generated dynamically when clicking "
"the save or overwrite button in the explore view. This JSON "
"object is exposed here for reference and for power users who may "
"want to alter specific parameters."),
'cache_timeout': _(
"Duration (in seconds) of the caching timeout for this slice."
"want to alter specific parameters.",
),
'cache_timeout': _(
"Duration (in seconds) of the caching timeout for this slice."),
}
base_filters = [['id', SliceFilter, lambda: []]]
label_columns = {
@ -532,7 +532,7 @@ class DashboardModelView(SupersetModelView, DeleteMixin): # noqa
mimetype="application/text")
return self.render_template(
'superset/export_dashboards.html',
dashboards_url='/dashboardmodelview/list'
dashboards_url='/dashboardmodelview/list',
)
@ -770,7 +770,7 @@ class Superset(BaseSupersetView):
db.session.commit()
return self.json_response({
'granted': granted_perms,
'requested': list(db_ds_names)
'requested': list(db_ds_names),
}, status=201)
@log_this
@ -1455,7 +1455,7 @@ class Superset(BaseSupersetView):
configuration.update(
db_engine.get_configuration_for_impersonation(uri,
impersonate_user,
username)
username),
)
connect_args = (
@ -1486,17 +1486,17 @@ class Superset(BaseSupersetView):
db.session.query(M.Log, M.Dashboard, M.Slice)
.outerjoin(
M.Dashboard,
M.Dashboard.id == M.Log.dashboard_id
M.Dashboard.id == M.Log.dashboard_id,
)
.outerjoin(
M.Slice,
M.Slice.id == M.Log.slice_id
M.Slice.id == M.Log.slice_id,
)
.filter(
sqla.and_(
~M.Log.action.in_(('queries', 'shortner', 'sql_json')),
M.Log.user_id == user_id,
)
),
)
.order_by(M.Log.dttm.desc())
.limit(1000)
@ -1553,10 +1553,10 @@ class Superset(BaseSupersetView):
models.FavStar.user_id == int(user_id),
models.FavStar.class_name == 'Dashboard',
models.Dashboard.id == models.FavStar.obj_id,
)
),
)
.order_by(
models.FavStar.dttm.desc()
models.FavStar.dttm.desc(),
)
)
payload = []
@ -1590,10 +1590,10 @@ class Superset(BaseSupersetView):
sqla.or_(
Dash.created_by_fk == user_id,
Dash.changed_by_fk == user_id,
)
),
)
.order_by(
Dash.changed_on.desc()
Dash.changed_on.desc(),
)
)
payload = [{
@ -1618,7 +1618,7 @@ class Superset(BaseSupersetView):
sqla.or_(
Slice.created_by_fk == user_id,
Slice.changed_by_fk == user_id,
)
),
)
.order_by(Slice.changed_on.desc())
)
@ -1647,10 +1647,10 @@ class Superset(BaseSupersetView):
models.FavStar.user_id == int(user_id),
models.FavStar.class_name == 'slice',
models.Slice.id == models.FavStar.obj_id,
)
),
)
.order_by(
models.FavStar.dttm.desc()
models.FavStar.dttm.desc(),
)
)
payload = []
@ -1733,8 +1733,8 @@ class Superset(BaseSupersetView):
class_name=class_name,
obj_id=obj_id,
user_id=g.user.get_id(),
dttm=datetime.now()
)
dttm=datetime.now(),
),
)
count = 1
elif action == 'unselect':
@ -1993,7 +1993,7 @@ class Superset(BaseSupersetView):
models.Database).filter_by(id=database_id).first()
return self.render_template(
"superset/ajah.html",
content=mydb.select_star(table_name, show_cols=True)
content=mydb.select_star(table_name, show_cols=True),
)
@expose("/theme/")
@ -2023,7 +2023,7 @@ class Superset(BaseSupersetView):
return json_error_response(
"Data could not be retrieved. "
"You may want to re-run the query.",
status=410
status=410,
)
query = db.session.query(Query).filter_by(results_key=key).one()
@ -2085,7 +2085,7 @@ class Superset(BaseSupersetView):
if select_as_cta and mydb.force_ctas_schema:
tmp_table_name = '{}.{}'.format(
mydb.force_ctas_schema,
tmp_table_name
tmp_table_name,
)
query = Query(
@ -2326,7 +2326,7 @@ class Superset(BaseSupersetView):
for perm in role.permissions:
if perm.permission and perm.view_menu:
perms.add(
(perm.permission.name, perm.view_menu.name)
(perm.permission.name, perm.view_menu.name),
)
if perm.permission.name in ('datasource_access', 'database_access'):
permissions[perm.permission.name].add(perm.view_menu.name)
@ -2354,7 +2354,7 @@ class Superset(BaseSupersetView):
title=user.username + "'s profile",
navbar_container=True,
entry='profile',
bootstrap_data=json.dumps(payload, default=utils.json_iso_dttm_ser)
bootstrap_data=json.dumps(payload, default=utils.json_iso_dttm_ser),
)
@has_access
@ -2368,7 +2368,7 @@ class Superset(BaseSupersetView):
return self.render_template(
'superset/basic.html',
entry='sqllab',
bootstrap_data=json.dumps(d, default=utils.json_iso_dttm_ser)
bootstrap_data=json.dumps(d, default=utils.json_iso_dttm_ser),
)
@api

View File

@ -197,7 +197,7 @@ class BaseViz(object):
'extras': extras,
'timeseries_limit_metric': timeseries_limit_metric,
'form_data': form_data,
'order_desc': order_desc
'order_desc': order_desc,
}
return d
@ -387,7 +387,7 @@ class TableViz(BaseViz):
if 'percent_metrics' in fd:
d['metrics'] = d['metrics'] + list(filter(
lambda m: m not in d['metrics'],
fd['percent_metrics']
fd['percent_metrics'],
))
d['is_timeseries'] = self.should_be_timeseries()
@ -416,7 +416,7 @@ class TableViz(BaseViz):
# Remove metrics that are not in the main metrics list
for m in filter(
lambda m: m not in fd['metrics'] and m in df.columns,
percent_metrics
percent_metrics,
):
del df[m]
@ -766,7 +766,7 @@ class BubbleViz(NVD3Viz):
form_data = self.form_data
d = super(BubbleViz, self).query_obj()
d['groupby'] = [
form_data.get('entity')
form_data.get('entity'),
]
if form_data.get('series'):
d['groupby'].append(form_data.get('series'))
@ -1090,7 +1090,7 @@ class NVD3DualLineViz(NVD3Viz):
chart_data = []
metrics = [
self.form_data.get('metric'),
self.form_data.get('metric_2')
self.form_data.get('metric_2'),
]
for i, m in enumerate(metrics):
ys = series[m]
@ -1105,7 +1105,7 @@ class NVD3DualLineViz(NVD3Viz):
for ds in df.index
],
"yAxis": i+1,
"type": "line"
"type": "line",
}
chart_data.append(d)
return chart_data
@ -1702,14 +1702,14 @@ class MapboxViz(BaseViz):
"geometry": {
"type": "Point",
"coordinates": [lon, lat],
}
},
}
for lon, lat, metric, point_radius
in zip(
df[fd.get('all_columns_x')],
df[fd.get('all_columns_y')],
metric_col, point_radius_col)
]
],
}
return {
@ -1912,7 +1912,7 @@ class PartitionViz(NVD3TimeSeriesViz):
'name': i,
'val': levels[level][metric][dims][i],
'children': self.nest_values(
levels, level + 1, metric, dims + (i,)
levels, level + 1, metric, dims + (i,),
),
} for i in levels[level][metric][dims].index]
@ -1933,7 +1933,7 @@ class PartitionViz(NVD3TimeSeriesViz):
return [{
'name': i,
'val': procs[level][dims][i][time],
'children': self.nest_procs(procs, level + 1, dims + (i,), time)
'children': self.nest_procs(procs, level + 1, dims + (i,), time),
} for i in procs[level][dims].columns]
def get_data(self, df):

View File

@ -23,9 +23,9 @@ ROLE_TABLES_PERM_DATA = {
'name': 'main',
'schema': [{
'name': '',
'datasources': ['birth_names']
}]
}]
'datasources': ['birth_names'],
}],
}],
}
ROLE_ALL_PERM_DATA = {
@ -35,17 +35,17 @@ ROLE_ALL_PERM_DATA = {
'name': 'main',
'schema': [{
'name': '',
'datasources': ['birth_names']
}]
'datasources': ['birth_names'],
}],
}, {
'datasource_type': 'druid',
'name': 'druid_test',
'schema': [{
'name': '',
'datasources': ['druid_ds_1', 'druid_ds_2']
}]
}
]
'datasources': ['druid_ds_1', 'druid_ds_2'],
}],
},
],
}
EXTEND_ROLE_REQUEST = (
@ -172,7 +172,7 @@ class RequestAccessTests(SupersetTestCase):
override_me.permissions.append(
sm.find_permission_view_menu(
view_menu_name=self.get_table_by_name('long_lat').perm,
permission_name='datasource_access')
permission_name='datasource_access'),
)
db.session.flush()
@ -550,11 +550,11 @@ class RequestAccessTests(SupersetTestCase):
'username': 'gamma',
'first_name': 'Gamma',
'last_name': 'Gamma',
'email': 'gamma@superset.com'
'email': 'gamma@superset.com',
}],
'role_name': update_role_str
'role_name': update_role_str,
}),
follow_redirects=True
follow_redirects=True,
)
update_role = sm.find_role(update_role_str)
self.assertEquals(
@ -568,16 +568,16 @@ class RequestAccessTests(SupersetTestCase):
'username': 'alpha',
'first_name': 'Alpha',
'last_name': 'Alpha',
'email': 'alpha@superset.com'
'email': 'alpha@superset.com',
}, {
'username': 'unknown',
'first_name': 'Unknown1',
'last_name': 'Unknown2',
'email': 'unknown@superset.com'
'email': 'unknown@superset.com',
}],
'role_name': update_role_str
'role_name': update_role_str,
}),
follow_redirects=True
follow_redirects=True,
)
self.assertEquals(resp.status_code, 201)
update_role = sm.find_role(update_role_str)

View File

@ -102,12 +102,12 @@ class SupersetTestCase(unittest.TestCase):
druid_datasource1 = DruidDatasource(
datasource_name='druid_ds_1',
cluster_name='druid_test'
cluster_name='druid_test',
)
session.add(druid_datasource1)
druid_datasource2 = DruidDatasource(
datasource_name='druid_ds_2',
cluster_name='druid_test'
cluster_name='druid_test',
)
session.add(druid_datasource2)
session.commit()

View File

@ -63,7 +63,7 @@ class UtilityFunctionTests(SupersetTestCase):
self.assertEqual(
"CREATE TABLE tmp AS \nSELECT * FROM planets WHERE\n"
"Luke_Father = 'Darth Vader'",
q.as_create_table("tmp")
q.as_create_table("tmp"),
)
@ -113,12 +113,12 @@ class CeleryTestCase(SupersetTestCase):
def tearDownClass(cls):
subprocess.call(
"ps auxww | grep 'celeryd' | awk '{print $2}' | xargs kill -9",
shell=True
shell=True,
)
subprocess.call(
"ps auxww | grep 'superset worker' | awk '{print $2}' | "
"xargs kill -9",
shell=True
shell=True,
)
def run_sql(self, db_id, sql, client_id, cta='false', tmp_table='tmp',
@ -148,7 +148,7 @@ class CeleryTestCase(SupersetTestCase):
# In addition some of the engines do not include OFFSET 0.
self.assertTrue(
"SELECT * FROM (SELECT * FROM outer_space;) AS inner_qry "
"LIMIT 100" in ' '.join(updated_select_query.split())
"LIMIT 100" in ' '.join(updated_select_query.split()),
)
select_query_no_semicolon = "SELECT * FROM outer_space"
@ -157,7 +157,7 @@ class CeleryTestCase(SupersetTestCase):
self.assertTrue(
"SELECT * FROM (SELECT * FROM outer_space) AS inner_qry "
"LIMIT 100" in
' '.join(updated_select_query_no_semicolon.split())
' '.join(updated_select_query_no_semicolon.split()),
)
multi_line_query = (
@ -167,7 +167,7 @@ class CeleryTestCase(SupersetTestCase):
self.assertTrue(
"SELECT * FROM (SELECT * FROM planets WHERE "
"Luke_Father = 'Darth Vader';) AS inner_qry LIMIT 100" in
' '.join(updated_multi_line_query.split())
' '.join(updated_multi_line_query.split()),
)
def test_run_sync_query_dont_exist(self):
@ -276,7 +276,7 @@ class CeleryTestCase(SupersetTestCase):
'is_dim': False},
{'is_date': False, 'type': 'STRING',
'name': 'string3', 'is_dim': True}], 'name')
, cols
, cols,
)
else:
self.assertEqual(self.dictify_list_of_dicts([
@ -296,7 +296,7 @@ class CeleryTestCase(SupersetTestCase):
'is_dim': False},
{'is_date': False, 'type': 'STRING',
'name': 'string3', 'is_dim': True}], 'name')
, cols
, cols,
)

View File

@ -153,8 +153,8 @@ class CoreTests(SupersetTestCase):
tbl_id,
copy_name,
'saveas',
json.dumps(form_data)
)
json.dumps(form_data),
),
)
slices = db.session.query(models.Slice) \
.filter_by(slice_name=copy_name).all()
@ -174,8 +174,8 @@ class CoreTests(SupersetTestCase):
tbl_id,
new_slice_name,
'overwrite',
json.dumps(form_data)
)
json.dumps(form_data),
),
)
slc = db.session.query(models.Slice).filter_by(id=new_slice_id).first()
assert slc.slice_name == new_slice_name
@ -281,7 +281,7 @@ class CoreTests(SupersetTestCase):
data = json.dumps({
'uri': database.safe_sqlalchemy_uri(),
'name': 'main',
'impersonate_user': False
'impersonate_user': False,
})
response = self.client.post('/superset/testconn', data=data, content_type='application/json')
assert response.status_code == 200
@ -291,7 +291,7 @@ class CoreTests(SupersetTestCase):
data = json.dumps({
'uri': database.sqlalchemy_uri_decrypted,
'name': 'main',
'impersonate_user': False
'impersonate_user': False,
})
response = self.client.post('/superset/testconn', data=data, content_type='application/json')
assert response.status_code == 200
@ -389,7 +389,7 @@ class CoreTests(SupersetTestCase):
'css': '',
'expanded_slices': {},
'positions': positions,
'dashboard_title': dash.dashboard_title
'dashboard_title': dash.dashboard_title,
}
url = '/superset/save_dash/{}/'.format(dash.id)
resp = self.get_resp(url, data=dict(data=json.dumps(data)))
@ -416,7 +416,7 @@ class CoreTests(SupersetTestCase):
'expanded_slices': {},
'positions': positions,
'dashboard_title': dash.dashboard_title,
'default_filters': default_filters
'default_filters': default_filters,
}
url = '/superset/save_dash/{}/'.format(dash.id)
@ -452,7 +452,7 @@ class CoreTests(SupersetTestCase):
'css': '',
'expanded_slices': {},
'positions': positions,
'dashboard_title': 'new title'
'dashboard_title': 'new title',
}
url = '/superset/save_dash/{}/'.format(dash.id)
self.get_resp(url, data=dict(data=json.dumps(data)))
@ -513,7 +513,7 @@ class CoreTests(SupersetTestCase):
slice_name="Name Cloud").first()
data = {
"slice_ids": [new_slice.data["slice_id"],
existing_slice.data["slice_id"]]
existing_slice.data["slice_id"]],
}
url = '/superset/add_slices/{}/'.format(dash.id)
resp = self.client.post(url, data=dict(data=json.dumps(data)))
@ -774,7 +774,7 @@ class CoreTests(SupersetTestCase):
fillna_columns = slc.viz.get_fillna_for_columns(results.df.columns)
self.assertDictEqual(
fillna_columns,
{'name': ' NULL', 'sum__num': 0}
{'name': ' NULL', 'sum__num': 0},
)

View File

@ -36,7 +36,7 @@ SEGMENT_METADATA = [{
"size": 100000, "cardinality": 1504, "errorMessage": None},
"metric1": {
"type": "FLOAT", "hasMultipleValues": False,
"size": 100000, "cardinality": None, "errorMessage": None}
"size": 100000, "cardinality": None, "errorMessage": None},
},
"aggregators": {
"metric1": {
@ -45,7 +45,7 @@ SEGMENT_METADATA = [{
"fieldName": "metric1"},
},
"size": 300000,
"numRows": 5000000
"numRows": 5000000,
}]
GB_RESULT_SET = [
@ -55,7 +55,7 @@ GB_RESULT_SET = [
"event": {
"dim1": 'Canada',
"metric1": 12345678,
}
},
},
{
"version": "v1",
@ -63,7 +63,7 @@ GB_RESULT_SET = [
"event": {
"dim1": 'USA',
"metric1": 12345678 / 2,
}
},
},
]
@ -195,10 +195,10 @@ class DruidTests(SupersetTestCase):
"ts_column": "d",
"sources": [{
"table": "clicks",
"partition": "d='{{ ds }}'"
}]
}
}
"partition": "d='{{ ds }}'",
}],
},
},
}
def check():
resp = self.client.post('/superset/sync_druid/', data=json.dumps(cfg))
@ -227,9 +227,9 @@ class DruidTests(SupersetTestCase):
"dimensions": ["affiliate_id", "second_seen"],
"metrics_spec": [
{"type": "bla", "name": "sum"},
{"type": "unique", "name": "unique"}
{"type": "unique", "name": "unique"},
],
}
},
}
resp = self.client.post('/superset/sync_druid/', data=json.dumps(cfg))
druid_ds = db.session.query(DruidDatasource).filter_by(
@ -308,7 +308,7 @@ class DruidTests(SupersetTestCase):
db.session.add(cluster)
cluster.get_datasources = PickableMock(
return_value=['test_datasource']
return_value=['test_datasource'],
)
cluster.get_druid_version = PickableMock(return_value='0.9.1')
@ -349,14 +349,14 @@ class DruidTests(SupersetTestCase):
verbose_name='APPROXIMATE_HISTOGRAM(*)',
metric_type='approxHistogramFold',
json=json.dumps(
{'type': 'approxHistogramFold', 'name': 'a_histogram'})
{'type': 'approxHistogramFold', 'name': 'a_histogram'}),
),
'aCustomMetric': DruidMetric(
metric_name='aCustomMetric',
verbose_name='MY_AWESOME_METRIC(*)',
metric_type='aCustomType',
json=json.dumps(
{'type': 'customMetric', 'name': 'aCustomMetric'})
{'type': 'customMetric', 'name': 'aCustomMetric'}),
),
'quantile_p95': DruidMetric(
metric_name='quantile_p95',
@ -424,7 +424,7 @@ class DruidTests(SupersetTestCase):
self.assertIn('field', res.filter['filter'])
self.assertEqual(
3,
len(res.filter['filter']['field'].filter['filter']['fields'])
len(res.filter['filter']['field'].filter['filter']['fields']),
)
def test_get_filters_constructs_filter_equals(self):
@ -440,7 +440,7 @@ class DruidTests(SupersetTestCase):
self.assertEqual('not', res.filter['filter']['type'])
self.assertEqual(
'h',
res.filter['filter']['field'].filter['filter']['value']
res.filter['filter']['field'].filter['filter']['value'],
)
def test_get_filters_constructs_bounds_filter(self):

View File

@ -77,7 +77,7 @@ class ImportExportTests(SupersetTestCase):
viz_type='bubble',
params=json.dumps(params),
datasource_id=ds_id,
id=id
id=id,
)
def create_dashboard(self, title, id=0, slcs=[]):
@ -88,7 +88,7 @@ class ImportExportTests(SupersetTestCase):
slices=slcs,
position_json='{"size_y": 2, "size_x": 2}',
slug='{}_imported'.format(title.lower()),
json_metadata=json.dumps(json_metadata)
json_metadata=json.dumps(json_metadata),
)
def create_table(
@ -98,7 +98,7 @@ class ImportExportTests(SupersetTestCase):
id=id,
schema=schema,
table_name=name,
params=json.dumps(params)
params=json.dumps(params),
)
for col_name in cols_names:
table.columns.append(
@ -114,7 +114,7 @@ class ImportExportTests(SupersetTestCase):
id=id,
datasource_name=name,
cluster_name='druid_test',
params=json.dumps(params)
params=json.dumps(params),
)
for col_name in cols_names:
datasource.columns.append(
@ -229,13 +229,13 @@ class ImportExportTests(SupersetTestCase):
self.assert_dash_equals(birth_dash, exported_dashboards[0])
self.assertEquals(
birth_dash.id,
json.loads(exported_dashboards[0].json_metadata)['remote_id']
json.loads(exported_dashboards[0].json_metadata)['remote_id'],
)
self.assert_dash_equals(world_health_dash, exported_dashboards[1])
self.assertEquals(
world_health_dash.id,
json.loads(exported_dashboards[1].json_metadata)['remote_id']
json.loads(exported_dashboards[1].json_metadata)['remote_id'],
)
exported_tables = sorted(
@ -337,8 +337,8 @@ class ImportExportTests(SupersetTestCase):
"filter_immune_slices": ["{}".format(e_slc.id)],
"expanded_slices": {
"{}".format(e_slc.id): True,
"{}".format(b_slc.id): False
}
"{}".format(b_slc.id): False,
},
})
imported_dash_id = models.Dashboard.import_obj(
@ -358,8 +358,8 @@ class ImportExportTests(SupersetTestCase):
"filter_immune_slices": ["{}".format(i_e_slc.id)],
"expanded_slices": {
'{}'.format(i_e_slc.id): True,
'{}'.format(i_b_slc.id): False
}
'{}'.format(i_b_slc.id): False,
},
}
self.assertEquals(expected_json_metadata,
json.loads(imported_dash.json_metadata))

View File

@ -68,18 +68,18 @@ class UtilsTestCase(unittest.TestCase):
# copy over extra filters into empty filters
form_data = {'extra_filters': [
{'col': 'a', 'op': 'in', 'val': 'someval'},
{'col': 'B', 'op': '==', 'val': ['c1', 'c2']}
{'col': 'B', 'op': '==', 'val': ['c1', 'c2']},
]}
expected = {'filters': [
{'col': 'a', 'op': 'in', 'val': 'someval'},
{'col': 'B', 'op': '==', 'val': ['c1', 'c2']}
{'col': 'B', 'op': '==', 'val': ['c1', 'c2']},
]}
merge_extra_filters(form_data)
self.assertEquals(form_data, expected)
# adds extra filters to existing filters
form_data = {'extra_filters': [
{'col': 'a', 'op': 'in', 'val': 'someval'},
{'col': 'B', 'op': '==', 'val': ['c1', 'c2']}
{'col': 'B', 'op': '==', 'val': ['c1', 'c2']},
], 'filters': [{'col': 'D', 'op': '!=', 'val': ['G1', 'g2']}]}
expected = {'filters': [
{'col': 'D', 'op': '!=', 'val': ['G1', 'g2']},

View File

@ -24,7 +24,7 @@ class BaseVizTestCase(unittest.TestCase):
test_viz = viz.BaseViz(datasource, form_data);
self.assertEqual(
test_viz.default_fillna,
test_viz.get_fillna_for_columns()
test_viz.get_fillna_for_columns(),
)
def test_get_df_returns_empty_df(self):
@ -164,13 +164,13 @@ class TableVizTestCase(unittest.TestCase):
}
test_viz = viz.TableViz(datasource, form_data)
f_query_obj = {
'metrics': form_data['metrics']
'metrics': form_data['metrics'],
}
super_query_obj.return_value = f_query_obj
query_obj = test_viz.query_obj()
self.assertEqual([
'sum__A', 'count', 'avg__C',
'avg__B', 'max__Y'
'avg__B', 'max__Y',
], query_obj['metrics'])
@patch('superset.viz.BaseViz.query_obj')
@ -195,7 +195,7 @@ class TableVizTestCase(unittest.TestCase):
datasource = Mock()
form_data = {
'all_columns': ['colA', 'colB', 'colC'],
'order_by_cols': ['["colA", "colB"]', '["colC"]']
'order_by_cols': ['["colA", "colB"]', '["colC"]'],
}
super_query_obj.return_value = {
'columns': ['colD', 'colC'],
@ -212,18 +212,18 @@ class TableVizTestCase(unittest.TestCase):
datasource = Mock()
form_data = {
'timeseries_limit_metric': '__time__',
'order_desc': False
'order_desc': False,
}
super_query_obj.return_value = {
'metrics': ['colA', 'colB']
'metrics': ['colA', 'colB'],
}
test_viz = viz.TableViz(datasource, form_data)
query_obj = test_viz.query_obj()
self.assertEqual([
'colA', 'colB', '__time__'
'colA', 'colB', '__time__',
], query_obj['metrics'])
self.assertEqual([(
'__time__', True
'__time__', True,
)], query_obj['orderby'])
def test_should_be_timeseries_raises_when_no_granularity(self):
@ -238,7 +238,7 @@ class PairedTTestTestCase(unittest.TestCase):
def test_get_data_transforms_dataframe(self):
form_data = {
'groupby': ['groupA', 'groupB', 'groupC'],
'metrics': ['metric1', 'metric2', 'metric3']
'metrics': ['metric1', 'metric2', 'metric3'],
}
datasource = {'type': 'table'}
# Test data
@ -330,7 +330,7 @@ class PairedTTestTestCase(unittest.TestCase):
def test_get_data_empty_null_keys(self):
form_data = {
'groupby': [],
'metrics': ['', None]
'metrics': ['', None],
}
datasource = {'type': 'table'}
# Test data
@ -548,7 +548,7 @@ class PartitionVizTestCase(unittest.TestCase):
len(nest[0]['children']
[0]['children']
[0]['children']
[0]['children'])
[0]['children']),
)
def test_get_data_calls_correct_method(self):

View File

@ -13,10 +13,10 @@ exclude =
.tox
docs
superset/assets
superset/data
superset/migrations
superset/templates
ignore =
C812
E111
E114
E116