Fix USA's state geojson for 'Country Map' visualization (#4121)

* Fix USA's state geojson for 'Country Map' visualization

Turns out the ISO codes were missing from the geojson file, this adds it
and uses human-readable indents.

* using proper ISO codes

* Linting

New linting rules started applying, I'm guessing a new version of
pylint?
This commit is contained in:
Maxime Beauchemin 2018-01-02 20:21:33 -08:00 committed by GitHub
parent e498f2fcb6
commit 37205099db
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 206436 additions and 74 deletions

File diff suppressed because one or more lines are too long

View File

@ -185,7 +185,8 @@ appbuilder.add_view(
icon='fa-cubes',
category='Sources',
category_label=__('Sources'),
category_icon='fa-database',)
category_icon='fa-database',
)
class DruidDatasourceModelView(DatasourceModelView, DeleteMixin, YamlExportMixin): # noqa
@ -348,4 +349,4 @@ appbuilder.add_link(
icon='fa-cog')
appbuilder.add_separator('Sources', )
appbuilder.add_separator('Sources')

View File

@ -349,8 +349,7 @@ class SqlaTable(Model, BaseDatasource):
engine = self.database.get_sqla_engine()
sql = '{}'.format(
qry.compile(
engine, compile_kwargs={'literal_binds': True}, ),
qry.compile(engine, compile_kwargs={'literal_binds': True}),
)
df = pd.read_sql_query(sql=sql, con=engine)

View File

@ -294,6 +294,7 @@ appbuilder.add_view(
label=__('Tables'),
category='Sources',
category_label=__('Sources'),
icon='fa-table',)
icon='fa-table',
)
appbuilder.add_separator('Sources')

View File

@ -240,7 +240,7 @@ def execute_sql(
limit=query.limit,
schema=database.force_ctas_schema,
show_cols=False,
latest_partition=False, ))
latest_partition=False))
query.end_time = utils.now_as_float()
session.merge(query)
session.flush()

View File

@ -187,7 +187,7 @@ class DatabaseView(SupersetModelView, DeleteMixin, YamlExportMixin): # noqa
'allow_ctas', 'allow_dml', 'force_ctas_schema', 'impersonate_user']
search_exclude_columns = (
'password', 'tables', 'created_by', 'changed_by', 'queries',
'saved_queries', )
'saved_queries')
edit_columns = add_columns
show_columns = [
'tables',
@ -281,7 +281,7 @@ appbuilder.add_link(
icon='fa-cloud-upload',
category='Manage',
category_label=__('Manage'),
category_icon='fa-wrench',)
category_icon='fa-wrench')
appbuilder.add_view(
@ -291,7 +291,7 @@ appbuilder.add_view(
icon='fa-database',
category='Sources',
category_label=__('Sources'),
category_icon='fa-database',)
category_icon='fa-database')
class DatabaseAsync(DatabaseView):
@ -400,7 +400,7 @@ appbuilder.add_view(
label=__('Access requests'),
category='Security',
category_label=__('Security'),
icon='fa-table',)
icon='fa-table')
class SliceModelView(SupersetModelView, DeleteMixin): # noqa
@ -488,7 +488,7 @@ appbuilder.add_view(
label=__('Charts'),
icon='fa-bar-chart',
category='',
category_icon='',)
category_icon='')
class SliceAsync(SliceModelView): # noqa
@ -615,7 +615,7 @@ appbuilder.add_view(
label=__('Dashboards'),
icon='fa-dashboard',
category='',
category_icon='',)
category_icon='')
class DashboardModelViewAsync(DashboardModelView): # noqa
@ -2594,7 +2594,7 @@ appbuilder.add_link(
icon='fa-upload',
category='Sources',
category_label=__('Sources'),
category_icon='fa-wrench',)
category_icon='fa-wrench')
appbuilder.add_separator('Sources')

View File

@ -833,18 +833,20 @@ class CoreTests(SupersetTestCase):
def test_dataframe_timezone(self):
tz = psycopg2.tz.FixedOffsetTimezone(offset=60, name=None)
data = [(datetime.datetime(2017, 11, 18, 21, 53, 0, 219225, tzinfo=tz),),
(datetime.datetime(2017, 11, 18, 22, 6, 30, 61810, tzinfo=tz,),)]
data = [
(datetime.datetime(2017, 11, 18, 21, 53, 0, 219225, tzinfo=tz),),
(datetime.datetime(2017, 11, 18, 22, 6, 30, 61810, tzinfo=tz),),
]
df = dataframe.SupersetDataFrame(pd.DataFrame(data=list(data),
columns=['data', ]))
columns=['data']))
data = df.data
self.assertDictEqual(
data[0],
{'data': pd.Timestamp('2017-11-18 21:53:00.219225+0100', tz=tz), },
{'data': pd.Timestamp('2017-11-18 21:53:00.219225+0100', tz=tz)},
)
self.assertDictEqual(
data[1],
{'data': pd.Timestamp('2017-11-18 22:06:30.061810+0100', tz=tz), },
{'data': pd.Timestamp('2017-11-18 22:06:30.061810+0100', tz=tz)},
)