[tests] Fix, removing deprecated function for future Python release compatibility. (#8353)

* removing deprecated function for future Python release compatibility.

* also removed assertNotEquals per @dpgaspar
This commit is contained in:
Marcus 2019-10-21 07:49:12 -07:00 committed by Daniel Vaz Gaspar
parent 1e533db3a2
commit 4cfd6b3ce1
18 changed files with 315 additions and 315 deletions

View File

@ -130,7 +130,7 @@ class RequestAccessTests(SupersetTestCase):
content_type="application/json",
follow_redirects=True,
)
self.assertNotEquals(405, response.status_code)
self.assertNotEqual(405, response.status_code)
def test_override_role_permissions_1_table(self):
response = self.client.post(
@ -138,15 +138,15 @@ class RequestAccessTests(SupersetTestCase):
data=json.dumps(ROLE_TABLES_PERM_DATA),
content_type="application/json",
)
self.assertEquals(201, response.status_code)
self.assertEqual(201, response.status_code)
updated_override_me = security_manager.find_role("override_me")
self.assertEquals(1, len(updated_override_me.permissions))
self.assertEqual(1, len(updated_override_me.permissions))
birth_names = self.get_table_by_name("birth_names")
self.assertEquals(
self.assertEqual(
birth_names.perm, updated_override_me.permissions[0].view_menu.name
)
self.assertEquals(
self.assertEqual(
"datasource_access", updated_override_me.permissions[0].permission.name
)
@ -156,26 +156,26 @@ class RequestAccessTests(SupersetTestCase):
data=json.dumps(ROLE_ALL_PERM_DATA),
content_type="application/json",
)
self.assertEquals(201, response.status_code)
self.assertEqual(201, response.status_code)
updated_role = security_manager.find_role("override_me")
perms = sorted(updated_role.permissions, key=lambda p: p.view_menu.name)
druid_ds_1 = self.get_druid_ds_by_name("druid_ds_1")
self.assertEquals(druid_ds_1.perm, perms[0].view_menu.name)
self.assertEquals("datasource_access", perms[0].permission.name)
self.assertEqual(druid_ds_1.perm, perms[0].view_menu.name)
self.assertEqual("datasource_access", perms[0].permission.name)
druid_ds_2 = self.get_druid_ds_by_name("druid_ds_2")
self.assertEquals(druid_ds_2.perm, perms[1].view_menu.name)
self.assertEquals(
self.assertEqual(druid_ds_2.perm, perms[1].view_menu.name)
self.assertEqual(
"datasource_access", updated_role.permissions[1].permission.name
)
birth_names = self.get_table_by_name("birth_names")
self.assertEquals(birth_names.perm, perms[2].view_menu.name)
self.assertEquals(
self.assertEqual(birth_names.perm, perms[2].view_menu.name)
self.assertEqual(
"datasource_access", updated_role.permissions[2].permission.name
)
self.assertEquals(3, len(perms))
self.assertEqual(3, len(perms))
def test_override_role_permissions_drops_absent_perms(self):
override_me = security_manager.find_role("override_me")
@ -192,14 +192,14 @@ class RequestAccessTests(SupersetTestCase):
data=json.dumps(ROLE_TABLES_PERM_DATA),
content_type="application/json",
)
self.assertEquals(201, response.status_code)
self.assertEqual(201, response.status_code)
updated_override_me = security_manager.find_role("override_me")
self.assertEquals(1, len(updated_override_me.permissions))
self.assertEqual(1, len(updated_override_me.permissions))
birth_names = self.get_table_by_name("birth_names")
self.assertEquals(
self.assertEqual(
birth_names.perm, updated_override_me.permissions[0].view_menu.name
)
self.assertEquals(
self.assertEqual(
"datasource_access", updated_override_me.permissions[0].permission.name
)

View File

@ -255,8 +255,8 @@ class SupersetTestCase(unittest.TestCase):
self.assertFalse(is_feature_enabled("FOO"))
def test_feature_flags(self):
self.assertEquals(is_feature_enabled("foo"), "bar")
self.assertEquals(is_feature_enabled("super"), "set")
self.assertEqual(is_feature_enabled("foo"), "bar")
self.assertEqual(is_feature_enabled("super"), "set")
def get_dash_by_slug(self, dash_slug):
sesh = db.session()

View File

@ -156,7 +156,7 @@ class CeleryTestCase(SupersetTestCase):
if backend != "postgresql":
# TODO This test won't work in Postgres
results = self.run_sql(db_id, query2.select_sql, "sdf2134")
self.assertEquals(results["status"], "success")
self.assertEqual(results["status"], "success")
self.assertGreater(len(results["data"]), 0)
def test_run_sync_query_cta_no_data(self):

View File

@ -70,7 +70,7 @@ class DashboardTests(SupersetTestCase):
resp = self.get_resp(url)
self.assertIn("[ untitled dashboard ]", resp)
dash_count_after = db.session.query(func.count(models.Dashboard.id)).first()[0]
self.assertEquals(dash_count_before + 1, dash_count_after)
self.assertEqual(dash_count_before + 1, dash_count_after)
def test_dashboard_modes(self):
self.login(username="admin")

View File

@ -26,16 +26,16 @@ from .base_tests import SupersetTestCase
class SupersetDataFrameTestCase(SupersetTestCase):
def test_dedup(self):
self.assertEquals(dedup(["foo", "bar"]), ["foo", "bar"])
self.assertEquals(
self.assertEqual(dedup(["foo", "bar"]), ["foo", "bar"])
self.assertEqual(
dedup(["foo", "bar", "foo", "bar", "Foo"]),
["foo", "bar", "foo__1", "bar__1", "Foo"],
)
self.assertEquals(
self.assertEqual(
dedup(["foo", "bar", "bar", "bar", "Bar"]),
["foo", "bar", "bar__1", "bar__2", "Bar"],
)
self.assertEquals(
self.assertEqual(
dedup(["foo", "bar", "bar", "bar", "Bar"], case_sensitive=False),
["foo", "bar", "bar__1", "bar__2", "Bar__3"],
)
@ -97,14 +97,14 @@ class SupersetDataFrameTestCase(SupersetTestCase):
def test_is_date(self):
f = SupersetDataFrame.is_date
self.assertEquals(f(np.dtype("M"), ""), True)
self.assertEquals(f(np.dtype("f"), "DATETIME"), True)
self.assertEquals(f(np.dtype("i"), "TIMESTAMP"), True)
self.assertEquals(f(None, "DATETIME"), True)
self.assertEquals(f(None, "TIMESTAMP"), True)
self.assertEqual(f(np.dtype("M"), ""), True)
self.assertEqual(f(np.dtype("f"), "DATETIME"), True)
self.assertEqual(f(np.dtype("i"), "TIMESTAMP"), True)
self.assertEqual(f(None, "DATETIME"), True)
self.assertEqual(f(None, "TIMESTAMP"), True)
self.assertEquals(f(None, ""), False)
self.assertEquals(f(np.dtype(np.int32), ""), False)
self.assertEqual(f(None, ""), False)
self.assertEqual(f(np.dtype(np.int32), ""), False)
def test_dedup_with_data(self):
data = [("a", 1), ("a", 2)]

View File

@ -37,7 +37,7 @@ class DatasourceTests(SupersetTestCase):
)
resp = self.get_json_resp(url)
col_names = {o.get("name") for o in resp}
self.assertEquals(
self.assertEqual(
col_names, {"sum_boys", "num", "gender", "name", "ds", "state", "sum_girls"}
)
@ -47,7 +47,7 @@ class DatasourceTests(SupersetTestCase):
obj2 = l2_lookup.get(obj1.get(key))
for k in obj1:
if k not in "id" and obj1.get(k):
self.assertEquals(obj1.get(k), obj2.get(k))
self.assertEqual(obj1.get(k), obj2.get(k))
def test_save(self):
self.login(username="admin")
@ -61,16 +61,16 @@ class DatasourceTests(SupersetTestCase):
elif k == "metrics":
self.compare_lists(datasource_post[k], resp[k], "metric_name")
else:
self.assertEquals(resp[k], datasource_post[k])
self.assertEqual(resp[k], datasource_post[k])
def test_get_datasource(self):
self.login(username="admin")
tbl = self.get_table_by_name("birth_names")
url = f"/datasource/get/{tbl.type}/{tbl.id}/"
resp = self.get_json_resp(url)
self.assertEquals(resp.get("type"), "table")
self.assertEqual(resp.get("type"), "table")
col_names = {o.get("column_name") for o in resp["columns"]}
self.assertEquals(
self.assertEqual(
col_names,
{
"sum_boys",
@ -88,4 +88,4 @@ class DatasourceTests(SupersetTestCase):
self.login(username="admin")
url = f"/datasource/get/druid/500000/"
resp = self.get_json_resp(url)
self.assertEquals(resp.get("error"), "This datasource does not exist")
self.assertEqual(resp.get("error"), "This datasource does not exist")

View File

@ -50,7 +50,7 @@ class DbEngineSpecsTestCase(SupersetTestCase):
""".split(
"\n"
)
self.assertEquals(0, HiveEngineSpec.progress(log))
self.assertEqual(0, HiveEngineSpec.progress(log))
def test_number_of_jobs_progress(self):
log = """
@ -58,7 +58,7 @@ class DbEngineSpecsTestCase(SupersetTestCase):
""".split(
"\n"
)
self.assertEquals(0, HiveEngineSpec.progress(log))
self.assertEqual(0, HiveEngineSpec.progress(log))
def test_job_1_launched_progress(self):
log = """
@ -67,7 +67,7 @@ class DbEngineSpecsTestCase(SupersetTestCase):
""".split(
"\n"
)
self.assertEquals(0, HiveEngineSpec.progress(log))
self.assertEqual(0, HiveEngineSpec.progress(log))
def test_job_1_launched_stage_1_0_progress(self):
log = """
@ -77,7 +77,7 @@ class DbEngineSpecsTestCase(SupersetTestCase):
""".split(
"\n"
)
self.assertEquals(0, HiveEngineSpec.progress(log))
self.assertEqual(0, HiveEngineSpec.progress(log))
def test_job_1_launched_stage_1_map_40_progress(self):
log = """
@ -88,7 +88,7 @@ class DbEngineSpecsTestCase(SupersetTestCase):
""".split(
"\n"
)
self.assertEquals(10, HiveEngineSpec.progress(log))
self.assertEqual(10, HiveEngineSpec.progress(log))
def test_job_1_launched_stage_1_map_80_reduce_40_progress(self):
log = """
@ -100,7 +100,7 @@ class DbEngineSpecsTestCase(SupersetTestCase):
""".split(
"\n"
)
self.assertEquals(30, HiveEngineSpec.progress(log))
self.assertEqual(30, HiveEngineSpec.progress(log))
def test_job_1_launched_stage_2_stages_progress(self):
log = """
@ -114,7 +114,7 @@ class DbEngineSpecsTestCase(SupersetTestCase):
""".split(
"\n"
)
self.assertEquals(12, HiveEngineSpec.progress(log))
self.assertEqual(12, HiveEngineSpec.progress(log))
def test_job_2_launched_stage_2_stages_progress(self):
log = """
@ -127,7 +127,7 @@ class DbEngineSpecsTestCase(SupersetTestCase):
""".split(
"\n"
)
self.assertEquals(60, HiveEngineSpec.progress(log))
self.assertEqual(60, HiveEngineSpec.progress(log))
def test_hive_error_msg(self):
msg = (
@ -136,7 +136,7 @@ class DbEngineSpecsTestCase(SupersetTestCase):
":5 Table not found 'fact_ridesfdslakj'\", statusCode=3, "
"sqlState='42S02', errorCode=10001)){...}"
)
self.assertEquals(
self.assertEqual(
(
"hive error: Error while compiling statement: FAILED: "
"SemanticException [Error 10001]: Line 4:5 "
@ -146,14 +146,14 @@ class DbEngineSpecsTestCase(SupersetTestCase):
)
e = Exception("Some string that doesn't match the regex")
self.assertEquals(f"hive error: {e}", HiveEngineSpec.extract_error_message(e))
self.assertEqual(f"hive error: {e}", HiveEngineSpec.extract_error_message(e))
msg = (
"errorCode=10001, "
'errorMessage="Error while compiling statement"), operationHandle'
'=None)"'
)
self.assertEquals(
self.assertEqual(
("hive error: Error while compiling statement"),
HiveEngineSpec.extract_error_message(Exception(msg)),
)
@ -166,7 +166,7 @@ class DbEngineSpecsTestCase(SupersetTestCase):
):
main = self.get_generic_database()
limited = engine_spec_class.apply_limit_to_sql(sql, limit, main)
self.assertEquals(expected_sql, limited)
self.assertEqual(expected_sql, limited)
def test_extract_limit_from_query(self, engine_spec_class=MySQLEngineSpec):
q0 = "select * from table"
@ -260,17 +260,17 @@ class DbEngineSpecsTestCase(SupersetTestCase):
SupersetTestCase.is_module_installed("MySQLdb"), "mysqlclient not installed"
)
def test_get_datatype_mysql(self):
self.assertEquals("TINY", MySQLEngineSpec.get_datatype(1))
self.assertEquals("VARCHAR", MySQLEngineSpec.get_datatype(15))
self.assertEqual("TINY", MySQLEngineSpec.get_datatype(1))
self.assertEqual("VARCHAR", MySQLEngineSpec.get_datatype(15))
@unittest.skipUnless(
SupersetTestCase.is_module_installed("pyhive"), "pyhive not installed"
)
def test_get_datatype_presto(self):
self.assertEquals("STRING", PrestoEngineSpec.get_datatype("string"))
self.assertEqual("STRING", PrestoEngineSpec.get_datatype("string"))
def test_get_datatype(self):
self.assertEquals("VARCHAR", BaseEngineSpec.get_datatype("VARCHAR"))
self.assertEqual("VARCHAR", BaseEngineSpec.get_datatype("VARCHAR"))
def test_limit_with_implicit_offset(self):
self.sql_limit_regex(
@ -343,7 +343,7 @@ class DbEngineSpecsTestCase(SupersetTestCase):
self.assertSetEqual(defined_grains, intersection, engine)
def test_presto_get_view_names_return_empty_list(self):
self.assertEquals(
self.assertEqual(
[], PrestoEngineSpec.get_view_names(mock.ANY, mock.ANY, mock.ANY)
)
@ -652,7 +652,7 @@ class DbEngineSpecsTestCase(SupersetTestCase):
self.assertEqual("SELECT \nWHERE ds = '01-01-19' AND hour = 1", query_result)
def test_hive_get_view_names_return_empty_list(self):
self.assertEquals(
self.assertEqual(
[], HiveEngineSpec.get_view_names(mock.ANY, mock.ANY, mock.ANY)
)
@ -807,4 +807,4 @@ class DbEngineSpecsTestCase(SupersetTestCase):
expected = ["VARCHAR(255)", "VARCHAR(255)", "DOUBLE PRECISION"]
else:
expected = ["VARCHAR(255)", "VARCHAR(255)", "FLOAT"]
self.assertEquals(col_names, expected)
self.assertEqual(col_names, expected)

View File

@ -116,33 +116,33 @@ class DictImportExportTests(SupersetTestCase):
def yaml_compare(self, obj_1, obj_2):
obj_1_str = yaml.safe_dump(obj_1, default_flow_style=False)
obj_2_str = yaml.safe_dump(obj_2, default_flow_style=False)
self.assertEquals(obj_1_str, obj_2_str)
self.assertEqual(obj_1_str, obj_2_str)
def assert_table_equals(self, expected_ds, actual_ds):
self.assertEquals(expected_ds.table_name, actual_ds.table_name)
self.assertEquals(expected_ds.main_dttm_col, actual_ds.main_dttm_col)
self.assertEquals(expected_ds.schema, actual_ds.schema)
self.assertEquals(len(expected_ds.metrics), len(actual_ds.metrics))
self.assertEquals(len(expected_ds.columns), len(actual_ds.columns))
self.assertEquals(
self.assertEqual(expected_ds.table_name, actual_ds.table_name)
self.assertEqual(expected_ds.main_dttm_col, actual_ds.main_dttm_col)
self.assertEqual(expected_ds.schema, actual_ds.schema)
self.assertEqual(len(expected_ds.metrics), len(actual_ds.metrics))
self.assertEqual(len(expected_ds.columns), len(actual_ds.columns))
self.assertEqual(
set([c.column_name for c in expected_ds.columns]),
set([c.column_name for c in actual_ds.columns]),
)
self.assertEquals(
self.assertEqual(
set([m.metric_name for m in expected_ds.metrics]),
set([m.metric_name for m in actual_ds.metrics]),
)
def assert_datasource_equals(self, expected_ds, actual_ds):
self.assertEquals(expected_ds.datasource_name, actual_ds.datasource_name)
self.assertEquals(expected_ds.main_dttm_col, actual_ds.main_dttm_col)
self.assertEquals(len(expected_ds.metrics), len(actual_ds.metrics))
self.assertEquals(len(expected_ds.columns), len(actual_ds.columns))
self.assertEquals(
self.assertEqual(expected_ds.datasource_name, actual_ds.datasource_name)
self.assertEqual(expected_ds.main_dttm_col, actual_ds.main_dttm_col)
self.assertEqual(len(expected_ds.metrics), len(actual_ds.metrics))
self.assertEqual(len(expected_ds.columns), len(actual_ds.columns))
self.assertEqual(
set([c.column_name for c in expected_ds.columns]),
set([c.column_name for c in actual_ds.columns]),
)
self.assertEquals(
self.assertEqual(
set([m.metric_name for m in expected_ds.metrics]),
set([m.metric_name for m in actual_ds.metrics]),
)
@ -167,7 +167,7 @@ class DictImportExportTests(SupersetTestCase):
db.session.commit()
imported = self.get_table(imported_table.id)
self.assert_table_equals(table, imported)
self.assertEquals(
self.assertEqual(
{DBREF: ID_PREFIX + 2, "database_name": "main"}, json.loads(imported.params)
)
self.yaml_compare(table.export_to_dict(), imported.export_to_dict())
@ -201,7 +201,7 @@ class DictImportExportTests(SupersetTestCase):
db.session.commit()
imported_over = self.get_table(imported_over_table.id)
self.assertEquals(imported_table.id, imported_over.id)
self.assertEqual(imported_table.id, imported_over.id)
expected_table, _ = self.create_table(
"table_override",
id=ID_PREFIX + 3,
@ -231,7 +231,7 @@ class DictImportExportTests(SupersetTestCase):
db.session.commit()
imported_over = self.get_table(imported_over_table.id)
self.assertEquals(imported_table.id, imported_over.id)
self.assertEqual(imported_table.id, imported_over.id)
expected_table, _ = self.create_table(
"table_override",
id=ID_PREFIX + 3,
@ -260,7 +260,7 @@ class DictImportExportTests(SupersetTestCase):
)
imported_copy_table = SqlaTable.import_from_dict(db.session, dict_copy_table)
db.session.commit()
self.assertEquals(imported_table.id, imported_copy_table.id)
self.assertEqual(imported_table.id, imported_copy_table.id)
self.assert_table_equals(copy_table, self.get_table(imported_table.id))
self.yaml_compare(
imported_copy_table.export_to_dict(), imported_table.export_to_dict()
@ -286,7 +286,7 @@ class DictImportExportTests(SupersetTestCase):
db.session.commit()
imported = self.get_datasource(imported_cluster.id)
self.assert_datasource_equals(datasource, imported)
self.assertEquals(
self.assertEqual(
{DBREF: ID_PREFIX + 2, "database_name": "druid_test"},
json.loads(imported.params),
)
@ -320,7 +320,7 @@ class DictImportExportTests(SupersetTestCase):
)
db.session.commit()
imported_over = self.get_datasource(imported_over_cluster.id)
self.assertEquals(imported_cluster.id, imported_over.id)
self.assertEqual(imported_cluster.id, imported_over.id)
expected_datasource, _ = self.create_druid_datasource(
"druid_override",
id=ID_PREFIX + 3,
@ -346,7 +346,7 @@ class DictImportExportTests(SupersetTestCase):
) # syncing metrics and columns
db.session.commit()
imported_over = self.get_datasource(imported_over_cluster.id)
self.assertEquals(imported_cluster.id, imported_over.id)
self.assertEqual(imported_cluster.id, imported_over.id)
expected_datasource, _ = self.create_druid_datasource(
"druid_override",
id=ID_PREFIX + 3,
@ -375,7 +375,7 @@ class DictImportExportTests(SupersetTestCase):
imported_copy = DruidDatasource.import_from_dict(db.session, dict_cp_datasource)
db.session.commit()
self.assertEquals(imported.id, imported_copy.id)
self.assertEqual(imported.id, imported_copy.id)
self.assert_datasource_equals(copy_datasource, self.get_datasource(imported.id))

View File

@ -493,17 +493,17 @@ class DruidTests(SupersetTestCase):
)
def test_urls(self):
cluster = self.get_test_cluster_obj()
self.assertEquals(
self.assertEqual(
cluster.get_base_url("localhost", "9999"), "http://localhost:9999"
)
self.assertEquals(
self.assertEqual(
cluster.get_base_url("http://localhost", "9999"), "http://localhost:9999"
)
self.assertEquals(
self.assertEqual(
cluster.get_base_url("https://localhost", "9999"), "https://localhost:9999"
)
self.assertEquals(
self.assertEqual(
cluster.get_base_broker_url(), "http://localhost:7980/druid/v2"
)
@ -581,7 +581,7 @@ class DruidTests(SupersetTestCase):
url = "/datasource/external_metadata/druid/{}/".format(datasource.id)
resp = self.get_json_resp(url)
col_names = {o.get("name") for o in resp}
self.assertEquals(col_names, {"__time", "dim1", "dim2", "metric1"})
self.assertEqual(col_names, {"__time", "dim1", "dim2", "metric1"})
if __name__ == "__main__":

View File

@ -144,41 +144,41 @@ class ImportExportTests(SupersetTestCase):
return db.session.query(SqlaTable).filter_by(table_name=name).first()
def assert_dash_equals(self, expected_dash, actual_dash, check_position=True):
self.assertEquals(expected_dash.slug, actual_dash.slug)
self.assertEquals(expected_dash.dashboard_title, actual_dash.dashboard_title)
self.assertEquals(len(expected_dash.slices), len(actual_dash.slices))
self.assertEqual(expected_dash.slug, actual_dash.slug)
self.assertEqual(expected_dash.dashboard_title, actual_dash.dashboard_title)
self.assertEqual(len(expected_dash.slices), len(actual_dash.slices))
expected_slices = sorted(expected_dash.slices, key=lambda s: s.slice_name or "")
actual_slices = sorted(actual_dash.slices, key=lambda s: s.slice_name or "")
for e_slc, a_slc in zip(expected_slices, actual_slices):
self.assert_slice_equals(e_slc, a_slc)
if check_position:
self.assertEquals(expected_dash.position_json, actual_dash.position_json)
self.assertEqual(expected_dash.position_json, actual_dash.position_json)
def assert_table_equals(self, expected_ds, actual_ds):
self.assertEquals(expected_ds.table_name, actual_ds.table_name)
self.assertEquals(expected_ds.main_dttm_col, actual_ds.main_dttm_col)
self.assertEquals(expected_ds.schema, actual_ds.schema)
self.assertEquals(len(expected_ds.metrics), len(actual_ds.metrics))
self.assertEquals(len(expected_ds.columns), len(actual_ds.columns))
self.assertEquals(
self.assertEqual(expected_ds.table_name, actual_ds.table_name)
self.assertEqual(expected_ds.main_dttm_col, actual_ds.main_dttm_col)
self.assertEqual(expected_ds.schema, actual_ds.schema)
self.assertEqual(len(expected_ds.metrics), len(actual_ds.metrics))
self.assertEqual(len(expected_ds.columns), len(actual_ds.columns))
self.assertEqual(
set([c.column_name for c in expected_ds.columns]),
set([c.column_name for c in actual_ds.columns]),
)
self.assertEquals(
self.assertEqual(
set([m.metric_name for m in expected_ds.metrics]),
set([m.metric_name for m in actual_ds.metrics]),
)
def assert_datasource_equals(self, expected_ds, actual_ds):
self.assertEquals(expected_ds.datasource_name, actual_ds.datasource_name)
self.assertEquals(expected_ds.main_dttm_col, actual_ds.main_dttm_col)
self.assertEquals(len(expected_ds.metrics), len(actual_ds.metrics))
self.assertEquals(len(expected_ds.columns), len(actual_ds.columns))
self.assertEquals(
self.assertEqual(expected_ds.datasource_name, actual_ds.datasource_name)
self.assertEqual(expected_ds.main_dttm_col, actual_ds.main_dttm_col)
self.assertEqual(len(expected_ds.metrics), len(actual_ds.metrics))
self.assertEqual(len(expected_ds.columns), len(actual_ds.columns))
self.assertEqual(
set([c.column_name for c in expected_ds.columns]),
set([c.column_name for c in actual_ds.columns]),
)
self.assertEquals(
self.assertEqual(
set([m.metric_name for m in expected_ds.metrics]),
set([m.metric_name for m in actual_ds.metrics]),
)
@ -187,9 +187,9 @@ class ImportExportTests(SupersetTestCase):
# to avoid bad slice data (no slice_name)
expected_slc_name = expected_slc.slice_name or ""
actual_slc_name = actual_slc.slice_name or ""
self.assertEquals(expected_slc_name, actual_slc_name)
self.assertEquals(expected_slc.datasource_type, actual_slc.datasource_type)
self.assertEquals(expected_slc.viz_type, actual_slc.viz_type)
self.assertEqual(expected_slc_name, actual_slc_name)
self.assertEqual(expected_slc.datasource_type, actual_slc.datasource_type)
self.assertEqual(expected_slc.viz_type, actual_slc.viz_type)
exp_params = json.loads(expected_slc.params)
actual_params = json.loads(actual_slc.params)
diff_params_keys = (
@ -204,7 +204,7 @@ class ImportExportTests(SupersetTestCase):
actual_params.pop(k)
if k in exp_params:
exp_params.pop(k)
self.assertEquals(exp_params, actual_params)
self.assertEqual(exp_params, actual_params)
def assert_only_exported_slc_fields(self, expected_dash, actual_dash):
""" only exported json has this params
@ -232,7 +232,7 @@ class ImportExportTests(SupersetTestCase):
birth_dash = self.get_dash_by_slug("births")
self.assert_only_exported_slc_fields(birth_dash, exported_dashboards[0])
self.assert_dash_equals(birth_dash, exported_dashboards[0])
self.assertEquals(
self.assertEqual(
birth_dash.id,
json.loads(
exported_dashboards[0].json_metadata,
@ -243,7 +243,7 @@ class ImportExportTests(SupersetTestCase):
exported_tables = json.loads(
resp.data.decode("utf-8"), object_hook=utils.decode_dashboards
)["datasources"]
self.assertEquals(1, len(exported_tables))
self.assertEqual(1, len(exported_tables))
self.assert_table_equals(
self.get_table_by_name("birth_names"), exported_tables[0]
)
@ -262,19 +262,19 @@ class ImportExportTests(SupersetTestCase):
exported_dashboards = sorted(
resp_data.get("dashboards"), key=lambda d: d.dashboard_title
)
self.assertEquals(2, len(exported_dashboards))
self.assertEqual(2, len(exported_dashboards))
birth_dash = self.get_dash_by_slug("births")
self.assert_only_exported_slc_fields(birth_dash, exported_dashboards[0])
self.assert_dash_equals(birth_dash, exported_dashboards[0])
self.assertEquals(
self.assertEqual(
birth_dash.id, json.loads(exported_dashboards[0].json_metadata)["remote_id"]
)
world_health_dash = self.get_dash_by_slug("world_health")
self.assert_only_exported_slc_fields(world_health_dash, exported_dashboards[1])
self.assert_dash_equals(world_health_dash, exported_dashboards[1])
self.assertEquals(
self.assertEqual(
world_health_dash.id,
json.loads(exported_dashboards[1].json_metadata)["remote_id"],
)
@ -282,7 +282,7 @@ class ImportExportTests(SupersetTestCase):
exported_tables = sorted(
resp_data.get("datasources"), key=lambda t: t.table_name
)
self.assertEquals(2, len(exported_tables))
self.assertEqual(2, len(exported_tables))
self.assert_table_equals(
self.get_table_by_name("birth_names"), exported_tables[0]
)
@ -294,11 +294,11 @@ class ImportExportTests(SupersetTestCase):
expected_slice = self.create_slice("Import Me", id=10001)
slc_id = models.Slice.import_obj(expected_slice, None, import_time=1989)
slc = self.get_slice(slc_id)
self.assertEquals(slc.datasource.perm, slc.perm)
self.assertEqual(slc.datasource.perm, slc.perm)
self.assert_slice_equals(expected_slice, slc)
table_id = self.get_table_by_name("wb_health_population").id
self.assertEquals(table_id, self.get_slice(slc_id).datasource_id)
self.assertEqual(table_id, self.get_slice(slc_id).datasource_id)
def test_import_2_slices_for_same_table(self):
table_id = self.get_table_by_name("wb_health_population").id
@ -310,13 +310,13 @@ class ImportExportTests(SupersetTestCase):
imported_slc_1 = self.get_slice(slc_id_1)
imported_slc_2 = self.get_slice(slc_id_2)
self.assertEquals(table_id, imported_slc_1.datasource_id)
self.assertEqual(table_id, imported_slc_1.datasource_id)
self.assert_slice_equals(slc_1, imported_slc_1)
self.assertEquals(imported_slc_1.datasource.perm, imported_slc_1.perm)
self.assertEqual(imported_slc_1.datasource.perm, imported_slc_1.perm)
self.assertEquals(table_id, imported_slc_2.datasource_id)
self.assertEqual(table_id, imported_slc_2.datasource_id)
self.assert_slice_equals(slc_2, imported_slc_2)
self.assertEquals(imported_slc_2.datasource.perm, imported_slc_2.perm)
self.assertEqual(imported_slc_2.datasource.perm, imported_slc_2.perm)
def test_import_slices_for_non_existent_table(self):
with self.assertRaises(AttributeError):
@ -332,7 +332,7 @@ class ImportExportTests(SupersetTestCase):
imported_slc_1 = self.get_slice(slc_1_id)
slc_2 = self.create_slice("Import Me New", id=10005)
slc_2_id = models.Slice.import_obj(slc_2, imported_slc_1, import_time=1990)
self.assertEquals(slc_1_id, slc_2_id)
self.assertEqual(slc_1_id, slc_2_id)
imported_slc_2 = self.get_slice(slc_2_id)
self.assert_slice_equals(slc, imported_slc_2)
@ -371,7 +371,7 @@ class ImportExportTests(SupersetTestCase):
expected_dash = self.create_dashboard("dash_with_1_slice", slcs=[slc], id=10002)
make_transient(expected_dash)
self.assert_dash_equals(expected_dash, imported_dash, check_position=False)
self.assertEquals(
self.assertEqual(
{"remote_id": 10002, "import_time": 1990},
json.loads(imported_dash.json_metadata),
)
@ -381,7 +381,7 @@ class ImportExportTests(SupersetTestCase):
# id from json is used only for updating position with new id
meta = expected_position["DASHBOARD_CHART_TYPE-10006"]["meta"]
meta["chartId"] = imported_dash.slices[0].id
self.assertEquals(expected_position, imported_dash.position)
self.assertEqual(expected_position, imported_dash.position)
def test_import_dashboard_2_slices(self):
e_slc = self.create_slice("e_slc", id=10007, table_name="energy_usage")
@ -421,7 +421,7 @@ class ImportExportTests(SupersetTestCase):
"{}".format(i_b_slc.id): False,
},
}
self.assertEquals(
self.assertEqual(
expected_json_metadata, json.loads(imported_dash.json_metadata)
)
@ -447,14 +447,14 @@ class ImportExportTests(SupersetTestCase):
)
# override doesn't change the id
self.assertEquals(imported_dash_id_1, imported_dash_id_2)
self.assertEqual(imported_dash_id_1, imported_dash_id_2)
expected_dash = self.create_dashboard(
"override_dashboard_new", slcs=[e_slc, b_slc, c_slc], id=10004
)
make_transient(expected_dash)
imported_dash = self.get_dash(imported_dash_id_2)
self.assert_dash_equals(expected_dash, imported_dash, check_position=False)
self.assertEquals(
self.assertEqual(
{"remote_id": 10004, "import_time": 1992},
json.loads(imported_dash.json_metadata),
)
@ -559,7 +559,7 @@ class ImportExportTests(SupersetTestCase):
imported_id = SqlaTable.import_obj(table, import_time=1990)
imported = self.get_table(imported_id)
self.assert_table_equals(table, imported)
self.assertEquals(
self.assertEqual(
{"remote_id": 10002, "import_time": 1990, "database_name": "examples"},
json.loads(imported.params),
)
@ -591,7 +591,7 @@ class ImportExportTests(SupersetTestCase):
imported_over_id = SqlaTable.import_obj(table_over, import_time=1992)
imported_over = self.get_table(imported_over_id)
self.assertEquals(imported_id, imported_over.id)
self.assertEqual(imported_id, imported_over.id)
expected_table = self.create_table(
"table_override",
id=10003,
@ -617,7 +617,7 @@ class ImportExportTests(SupersetTestCase):
)
imported_id_copy = SqlaTable.import_obj(copy_table, import_time=1994)
self.assertEquals(imported_id, imported_id_copy)
self.assertEqual(imported_id, imported_id_copy)
self.assert_table_equals(copy_table, self.get_table(imported_id))
def test_import_druid_no_metadata(self):
@ -633,7 +633,7 @@ class ImportExportTests(SupersetTestCase):
imported_id = DruidDatasource.import_obj(datasource, import_time=1990)
imported = self.get_datasource(imported_id)
self.assert_datasource_equals(datasource, imported)
self.assertEquals(
self.assertEqual(
{"remote_id": 10002, "import_time": 1990, "database_name": "druid_test"},
json.loads(imported.params),
)
@ -663,7 +663,7 @@ class ImportExportTests(SupersetTestCase):
imported_over_id = DruidDatasource.import_obj(table_over, import_time=1992)
imported_over = self.get_datasource(imported_over_id)
self.assertEquals(imported_id, imported_over.id)
self.assertEqual(imported_id, imported_over.id)
expected_datasource = self.create_druid_datasource(
"druid_override",
id=10004,
@ -689,7 +689,7 @@ class ImportExportTests(SupersetTestCase):
)
imported_id_copy = DruidDatasource.import_obj(copy_datasource, import_time=1994)
self.assertEquals(imported_id, imported_id_copy)
self.assertEqual(imported_id, imported_id_copy)
self.assert_datasource_equals(copy_datasource, self.get_datasource(imported_id))

View File

@ -37,4 +37,4 @@ class MigrationTestCase(SupersetTestCase):
self.assertIn("filter_configs", params)
cfg = params["filter_configs"][0]
self.assertEquals(cfg.get("metric"), "foo")
self.assertEqual(cfg.get("metric"), "foo")

View File

@ -36,29 +36,29 @@ class DatabaseModelTestCase(SupersetTestCase):
model = Database(database_name="test_database", sqlalchemy_uri=sqlalchemy_uri)
db = make_url(model.get_sqla_engine().url).database
self.assertEquals("hive/default", db)
self.assertEqual("hive/default", db)
db = make_url(model.get_sqla_engine(schema="core_db").url).database
self.assertEquals("hive/core_db", db)
self.assertEqual("hive/core_db", db)
sqlalchemy_uri = "presto://presto.airbnb.io:8080/hive"
model = Database(database_name="test_database", sqlalchemy_uri=sqlalchemy_uri)
db = make_url(model.get_sqla_engine().url).database
self.assertEquals("hive", db)
self.assertEqual("hive", db)
db = make_url(model.get_sqla_engine(schema="core_db").url).database
self.assertEquals("hive/core_db", db)
self.assertEqual("hive/core_db", db)
def test_database_schema_postgres(self):
sqlalchemy_uri = "postgresql+psycopg2://postgres.airbnb.io:5439/prod"
model = Database(database_name="test_database", sqlalchemy_uri=sqlalchemy_uri)
db = make_url(model.get_sqla_engine().url).database
self.assertEquals("prod", db)
self.assertEqual("prod", db)
db = make_url(model.get_sqla_engine(schema="foo").url).database
self.assertEquals("prod", db)
self.assertEqual("prod", db)
@unittest.skipUnless(
SupersetTestCase.is_module_installed("thrift"), "thrift not installed"
@ -70,10 +70,10 @@ class DatabaseModelTestCase(SupersetTestCase):
sqlalchemy_uri = "hive://hive@hive.airbnb.io:10000/default?auth=NOSASL"
model = Database(database_name="test_database", sqlalchemy_uri=sqlalchemy_uri)
db = make_url(model.get_sqla_engine().url).database
self.assertEquals("default", db)
self.assertEqual("default", db)
db = make_url(model.get_sqla_engine(schema="core_db").url).database
self.assertEquals("core_db", db)
self.assertEqual("core_db", db)
@unittest.skipUnless(
SupersetTestCase.is_module_installed("MySQLdb"), "mysqlclient not installed"
@ -83,10 +83,10 @@ class DatabaseModelTestCase(SupersetTestCase):
model = Database(database_name="test_database", sqlalchemy_uri=sqlalchemy_uri)
db = make_url(model.get_sqla_engine().url).database
self.assertEquals("superset", db)
self.assertEqual("superset", db)
db = make_url(model.get_sqla_engine(schema="staging").url).database
self.assertEquals("staging", db)
self.assertEqual("staging", db)
@unittest.skipUnless(
SupersetTestCase.is_module_installed("MySQLdb"), "mysqlclient not installed"
@ -98,11 +98,11 @@ class DatabaseModelTestCase(SupersetTestCase):
model.impersonate_user = True
user_name = make_url(model.get_sqla_engine(user_name=example_user).url).username
self.assertEquals(example_user, user_name)
self.assertEqual(example_user, user_name)
model.impersonate_user = False
user_name = make_url(model.get_sqla_engine(user_name=example_user).url).username
self.assertNotEquals(example_user, user_name)
self.assertNotEqual(example_user, user_name)
def test_select_star(self):
db = get_example_database()
@ -154,20 +154,20 @@ class DatabaseModelTestCase(SupersetTestCase):
if main_db.backend == "mysql":
df = main_db.get_df("SELECT 1", None)
self.assertEquals(df.iat[0, 0], 1)
self.assertEqual(df.iat[0, 0], 1)
df = main_db.get_df("SELECT 1;", None)
self.assertEquals(df.iat[0, 0], 1)
self.assertEqual(df.iat[0, 0], 1)
def test_multi_statement(self):
main_db = get_example_database()
if main_db.backend == "mysql":
df = main_db.get_df("USE superset; SELECT 1", None)
self.assertEquals(df.iat[0, 0], 1)
self.assertEqual(df.iat[0, 0], 1)
df = main_db.get_df("USE superset; SELECT ';';", None)
self.assertEquals(df.iat[0, 0], ";")
self.assertEqual(df.iat[0, 0], ";")
class SqlaTableModelTestCase(SupersetTestCase):
@ -175,19 +175,19 @@ class SqlaTableModelTestCase(SupersetTestCase):
tbl = self.get_table_by_name("birth_names")
ds_col = tbl.get_column("ds")
sqla_literal = ds_col.get_timestamp_expression(None)
self.assertEquals(str(sqla_literal.compile()), "ds")
self.assertEqual(str(sqla_literal.compile()), "ds")
sqla_literal = ds_col.get_timestamp_expression("P1D")
compiled = "{}".format(sqla_literal.compile())
if tbl.database.backend == "mysql":
self.assertEquals(compiled, "DATE(ds)")
self.assertEqual(compiled, "DATE(ds)")
prev_ds_expr = ds_col.expression
ds_col.expression = "DATE_ADD(ds, 1)"
sqla_literal = ds_col.get_timestamp_expression("P1D")
compiled = "{}".format(sqla_literal.compile())
if tbl.database.backend == "mysql":
self.assertEquals(compiled, "DATE(DATE_ADD(ds, 1))")
self.assertEqual(compiled, "DATE(DATE_ADD(ds, 1))")
ds_col.expression = prev_ds_expr
def test_get_timestamp_expression_epoch(self):
@ -199,20 +199,20 @@ class SqlaTableModelTestCase(SupersetTestCase):
sqla_literal = ds_col.get_timestamp_expression(None)
compiled = "{}".format(sqla_literal.compile())
if tbl.database.backend == "mysql":
self.assertEquals(compiled, "from_unixtime(ds)")
self.assertEqual(compiled, "from_unixtime(ds)")
ds_col.python_date_format = "epoch_s"
sqla_literal = ds_col.get_timestamp_expression("P1D")
compiled = "{}".format(sqla_literal.compile())
if tbl.database.backend == "mysql":
self.assertEquals(compiled, "DATE(from_unixtime(ds))")
self.assertEqual(compiled, "DATE(from_unixtime(ds))")
prev_ds_expr = ds_col.expression
ds_col.expression = "DATE_ADD(ds, 1)"
sqla_literal = ds_col.get_timestamp_expression("P1D")
compiled = "{}".format(sqla_literal.compile())
if tbl.database.backend == "mysql":
self.assertEquals(compiled, "DATE(from_unixtime(DATE_ADD(ds, 1)))")
self.assertEqual(compiled, "DATE(from_unixtime(DATE_ADD(ds, 1)))")
ds_col.expression = prev_ds_expr
def query_with_expr_helper(self, is_timeseries, inner_join=True):

View File

@ -219,7 +219,7 @@ class SchedulesTestCase(unittest.TestCase):
driver.screenshot.assert_not_called()
send_email_smtp.assert_called_once()
self.assertIsNone(send_email_smtp.call_args[1]["images"])
self.assertEquals(
self.assertEqual(
send_email_smtp.call_args[1]["data"]["screenshot.png"],
element.screenshot_as_png,
)
@ -254,8 +254,8 @@ class SchedulesTestCase(unittest.TestCase):
driver.screenshot.assert_called_once()
send_email_smtp.assert_called_once()
self.assertEquals(send_email_smtp.call_args[0][0], self.RECIPIENTS)
self.assertEquals(
self.assertEqual(send_email_smtp.call_args[0][0], self.RECIPIENTS)
self.assertEqual(
list(send_email_smtp.call_args[1]["images"].values())[0],
driver.screenshot.return_value,
)
@ -291,8 +291,8 @@ class SchedulesTestCase(unittest.TestCase):
mtime.sleep.assert_called_once()
driver.screenshot.assert_not_called()
self.assertEquals(send_email_smtp.call_count, 2)
self.assertEquals(send_email_smtp.call_args[1]["bcc"], self.BCC)
self.assertEqual(send_email_smtp.call_count, 2)
self.assertEqual(send_email_smtp.call_args[1]["bcc"], self.BCC)
@patch("superset.tasks.schedules.firefox.webdriver.WebDriver")
@patch("superset.tasks.schedules.send_email_smtp")
@ -323,7 +323,7 @@ class SchedulesTestCase(unittest.TestCase):
driver.screenshot.assert_not_called()
send_email_smtp.assert_called_once()
self.assertEquals(
self.assertEqual(
list(send_email_smtp.call_args[1]["images"].values())[0],
element.screenshot_as_png,
)
@ -357,7 +357,7 @@ class SchedulesTestCase(unittest.TestCase):
driver.screenshot.assert_not_called()
send_email_smtp.assert_called_once()
self.assertEquals(
self.assertEqual(
send_email_smtp.call_args[1]["data"]["screenshot.png"],
element.screenshot_as_png,
)
@ -388,7 +388,7 @@ class SchedulesTestCase(unittest.TestCase):
file_name = __("%(name)s.csv", name=schedule.slice.slice_name)
self.assertEquals(send_email_smtp.call_args[1]["data"][file_name], self.CSV)
self.assertEqual(send_email_smtp.call_args[1]["data"][file_name], self.CSV)
@patch("superset.tasks.schedules.urllib.request.urlopen")
@patch("superset.tasks.schedules.urllib.request.OpenerDirector.open")

View File

@ -26,80 +26,80 @@ class SupersetTestCase(unittest.TestCase):
def test_simple_select(self):
query = "SELECT * FROM tbname"
self.assertEquals({"tbname"}, self.extract_tables(query))
self.assertEqual({"tbname"}, self.extract_tables(query))
query = "SELECT * FROM tbname foo"
self.assertEquals({"tbname"}, self.extract_tables(query))
self.assertEqual({"tbname"}, self.extract_tables(query))
query = "SELECT * FROM tbname AS foo"
self.assertEquals({"tbname"}, self.extract_tables(query))
self.assertEqual({"tbname"}, self.extract_tables(query))
# underscores
query = "SELECT * FROM tb_name"
self.assertEquals({"tb_name"}, self.extract_tables(query))
self.assertEqual({"tb_name"}, self.extract_tables(query))
# quotes
query = 'SELECT * FROM "tbname"'
self.assertEquals({"tbname"}, self.extract_tables(query))
self.assertEqual({"tbname"}, self.extract_tables(query))
# unicode encoding
query = 'SELECT * FROM "tb_name" WHERE city = "Lübeck"'
self.assertEquals({"tb_name"}, self.extract_tables(query))
self.assertEqual({"tb_name"}, self.extract_tables(query))
# schema
self.assertEquals(
self.assertEqual(
{"schemaname.tbname"},
self.extract_tables("SELECT * FROM schemaname.tbname"),
)
self.assertEquals(
self.assertEqual(
{"schemaname.tbname"},
self.extract_tables('SELECT * FROM "schemaname"."tbname"'),
)
self.assertEquals(
self.assertEqual(
{"schemaname.tbname"},
self.extract_tables("SELECT * FROM schemaname.tbname foo"),
)
self.assertEquals(
self.assertEqual(
{"schemaname.tbname"},
self.extract_tables("SELECT * FROM schemaname.tbname AS foo"),
)
# cluster
self.assertEquals(
self.assertEqual(
{"clustername.schemaname.tbname"},
self.extract_tables("SELECT * FROM clustername.schemaname.tbname"),
)
# Ill-defined cluster/schema/table.
self.assertEquals(set(), self.extract_tables("SELECT * FROM schemaname."))
self.assertEqual(set(), self.extract_tables("SELECT * FROM schemaname."))
self.assertEquals(
self.assertEqual(
set(), self.extract_tables("SELECT * FROM clustername.schemaname.")
)
self.assertEquals(set(), self.extract_tables("SELECT * FROM clustername.."))
self.assertEqual(set(), self.extract_tables("SELECT * FROM clustername.."))
self.assertEquals(
self.assertEqual(
set(), self.extract_tables("SELECT * FROM clustername..tbname")
)
# quotes
query = "SELECT field1, field2 FROM tb_name"
self.assertEquals({"tb_name"}, self.extract_tables(query))
self.assertEqual({"tb_name"}, self.extract_tables(query))
query = "SELECT t1.f1, t2.f2 FROM t1, t2"
self.assertEquals({"t1", "t2"}, self.extract_tables(query))
self.assertEqual({"t1", "t2"}, self.extract_tables(query))
def test_select_named_table(self):
query = "SELECT a.date, a.field FROM left_table a LIMIT 10"
self.assertEquals({"left_table"}, self.extract_tables(query))
self.assertEqual({"left_table"}, self.extract_tables(query))
def test_reverse_select(self):
query = "FROM t1 SELECT field"
self.assertEquals({"t1"}, self.extract_tables(query))
self.assertEqual({"t1"}, self.extract_tables(query))
def test_subselect(self):
query = """
@ -111,7 +111,7 @@ class SupersetTestCase(unittest.TestCase):
) sub, s2.t2
WHERE sub.resolution = 'NONE'
"""
self.assertEquals({"s1.t1", "s2.t2"}, self.extract_tables(query))
self.assertEqual({"s1.t1", "s2.t2"}, self.extract_tables(query))
query = """
SELECT sub.*
@ -122,7 +122,7 @@ class SupersetTestCase(unittest.TestCase):
) sub
WHERE sub.resolution = 'NONE'
"""
self.assertEquals({"s1.t1"}, self.extract_tables(query))
self.assertEqual({"s1.t1"}, self.extract_tables(query))
query = """
SELECT * FROM t1
@ -133,21 +133,21 @@ class SupersetTestCase(unittest.TestCase):
WHERE ROW(5*t2.s1,77)=
(SELECT 50,11*s1 FROM t4)));
"""
self.assertEquals({"t1", "t2", "t3", "t4"}, self.extract_tables(query))
self.assertEqual({"t1", "t2", "t3", "t4"}, self.extract_tables(query))
def test_select_in_expression(self):
query = "SELECT f1, (SELECT count(1) FROM t2) FROM t1"
self.assertEquals({"t1", "t2"}, self.extract_tables(query))
self.assertEqual({"t1", "t2"}, self.extract_tables(query))
def test_union(self):
query = "SELECT * FROM t1 UNION SELECT * FROM t2"
self.assertEquals({"t1", "t2"}, self.extract_tables(query))
self.assertEqual({"t1", "t2"}, self.extract_tables(query))
query = "SELECT * FROM t1 UNION ALL SELECT * FROM t2"
self.assertEquals({"t1", "t2"}, self.extract_tables(query))
self.assertEqual({"t1", "t2"}, self.extract_tables(query))
query = "SELECT * FROM t1 INTERSECT ALL SELECT * FROM t2"
self.assertEquals({"t1", "t2"}, self.extract_tables(query))
self.assertEqual({"t1", "t2"}, self.extract_tables(query))
def test_select_from_values(self):
query = "SELECT * FROM VALUES (13, 42)"
@ -158,25 +158,25 @@ class SupersetTestCase(unittest.TestCase):
SELECT ARRAY[1, 2, 3] AS my_array
FROM t1 LIMIT 10
"""
self.assertEquals({"t1"}, self.extract_tables(query))
self.assertEqual({"t1"}, self.extract_tables(query))
def test_select_if(self):
query = """
SELECT IF(CARDINALITY(my_array) >= 3, my_array[3], NULL)
FROM t1 LIMIT 10
"""
self.assertEquals({"t1"}, self.extract_tables(query))
self.assertEqual({"t1"}, self.extract_tables(query))
# SHOW TABLES ((FROM | IN) qualifiedName)? (LIKE pattern=STRING)?
def test_show_tables(self):
query = "SHOW TABLES FROM s1 like '%order%'"
# TODO: figure out what should code do here
self.assertEquals({"s1"}, self.extract_tables(query))
self.assertEqual({"s1"}, self.extract_tables(query))
# SHOW COLUMNS (FROM | IN) qualifiedName
def test_show_columns(self):
query = "SHOW COLUMNS FROM t1"
self.assertEquals({"t1"}, self.extract_tables(query))
self.assertEqual({"t1"}, self.extract_tables(query))
def test_where_subquery(self):
query = """
@ -184,25 +184,25 @@ class SupersetTestCase(unittest.TestCase):
FROM t1
WHERE regionkey = (SELECT max(regionkey) FROM t2)
"""
self.assertEquals({"t1", "t2"}, self.extract_tables(query))
self.assertEqual({"t1", "t2"}, self.extract_tables(query))
query = """
SELECT name
FROM t1
WHERE regionkey IN (SELECT regionkey FROM t2)
"""
self.assertEquals({"t1", "t2"}, self.extract_tables(query))
self.assertEqual({"t1", "t2"}, self.extract_tables(query))
query = """
SELECT name
FROM t1
WHERE regionkey EXISTS (SELECT regionkey FROM t2)
"""
self.assertEquals({"t1", "t2"}, self.extract_tables(query))
self.assertEqual({"t1", "t2"}, self.extract_tables(query))
# DESCRIBE | DESC qualifiedName
def test_describe(self):
self.assertEquals({"t1"}, self.extract_tables("DESCRIBE t1"))
self.assertEqual({"t1"}, self.extract_tables("DESCRIBE t1"))
# SHOW PARTITIONS FROM qualifiedName (WHERE booleanExpression)?
# (ORDER BY sortItem (',' sortItem)*)? (LIMIT limit=(INTEGER_VALUE | ALL))?
@ -211,11 +211,11 @@ class SupersetTestCase(unittest.TestCase):
SHOW PARTITIONS FROM orders
WHERE ds >= '2013-01-01' ORDER BY ds DESC;
"""
self.assertEquals({"orders"}, self.extract_tables(query))
self.assertEqual({"orders"}, self.extract_tables(query))
def test_join(self):
query = "SELECT t1.*, t2.* FROM t1 JOIN t2 ON t1.a = t2.a;"
self.assertEquals({"t1", "t2"}, self.extract_tables(query))
self.assertEqual({"t1", "t2"}, self.extract_tables(query))
# subquery + join
query = """
@ -229,7 +229,7 @@ class SupersetTestCase(unittest.TestCase):
) b
ON a.date = b.date
"""
self.assertEquals({"left_table", "right_table"}, self.extract_tables(query))
self.assertEqual({"left_table", "right_table"}, self.extract_tables(query))
query = """
SELECT a.date, b.name FROM
@ -242,7 +242,7 @@ class SupersetTestCase(unittest.TestCase):
) b
ON a.date = b.date
"""
self.assertEquals({"left_table", "right_table"}, self.extract_tables(query))
self.assertEqual({"left_table", "right_table"}, self.extract_tables(query))
query = """
SELECT a.date, b.name FROM
@ -255,7 +255,7 @@ class SupersetTestCase(unittest.TestCase):
) b
ON a.date = b.date
"""
self.assertEquals({"left_table", "right_table"}, self.extract_tables(query))
self.assertEqual({"left_table", "right_table"}, self.extract_tables(query))
query = """
SELECT a.date, b.name FROM
@ -268,7 +268,7 @@ class SupersetTestCase(unittest.TestCase):
) b
ON a.date = b.date
"""
self.assertEquals({"left_table", "right_table"}, self.extract_tables(query))
self.assertEqual({"left_table", "right_table"}, self.extract_tables(query))
# TODO: add SEMI join support, SQL Parse does not handle it.
# query = """
@ -282,7 +282,7 @@ class SupersetTestCase(unittest.TestCase):
# ) b
# ON a.date = b.date
# """
# self.assertEquals({'left_table', 'right_table'},
# self.assertEqual({'left_table', 'right_table'},
# sql_parse.extract_tables(query))
def test_combinations(self):
@ -296,13 +296,13 @@ class SupersetTestCase(unittest.TestCase):
WHERE ROW(5*t3.s1,77)=
(SELECT 50,11*s1 FROM t4)));
"""
self.assertEquals({"t1", "t3", "t4", "t6"}, self.extract_tables(query))
self.assertEqual({"t1", "t3", "t4", "t6"}, self.extract_tables(query))
query = """
SELECT * FROM (SELECT * FROM (SELECT * FROM (SELECT * FROM EmployeeS)
AS S1) AS S2) AS S3;
"""
self.assertEquals({"EmployeeS"}, self.extract_tables(query))
self.assertEqual({"EmployeeS"}, self.extract_tables(query))
def test_with(self):
query = """
@ -312,7 +312,7 @@ class SupersetTestCase(unittest.TestCase):
z AS (SELECT b AS c FROM t3)
SELECT c FROM z;
"""
self.assertEquals({"t1", "t2", "t3"}, self.extract_tables(query))
self.assertEqual({"t1", "t2", "t3"}, self.extract_tables(query))
query = """
WITH
@ -321,7 +321,7 @@ class SupersetTestCase(unittest.TestCase):
z AS (SELECT b AS c FROM y)
SELECT c FROM z;
"""
self.assertEquals({"t1"}, self.extract_tables(query))
self.assertEqual({"t1"}, self.extract_tables(query))
def test_reusing_aliases(self):
query = """
@ -329,26 +329,26 @@ class SupersetTestCase(unittest.TestCase):
q2 as ( select key from src where key = '5')
select * from (select key from q1) a;
"""
self.assertEquals({"src"}, self.extract_tables(query))
self.assertEqual({"src"}, self.extract_tables(query))
def test_multistatement(self):
query = "SELECT * FROM t1; SELECT * FROM t2"
self.assertEquals({"t1", "t2"}, self.extract_tables(query))
self.assertEqual({"t1", "t2"}, self.extract_tables(query))
query = "SELECT * FROM t1; SELECT * FROM t2;"
self.assertEquals({"t1", "t2"}, self.extract_tables(query))
self.assertEqual({"t1", "t2"}, self.extract_tables(query))
def test_update_not_select(self):
sql = sql_parse.ParsedQuery("UPDATE t1 SET col1 = NULL")
self.assertEquals(False, sql.is_select())
self.assertEquals(False, sql.is_readonly())
self.assertEqual(False, sql.is_select())
self.assertEqual(False, sql.is_readonly())
def test_explain(self):
sql = sql_parse.ParsedQuery("EXPLAIN SELECT 1")
self.assertEquals(True, sql.is_explain())
self.assertEquals(False, sql.is_select())
self.assertEquals(True, sql.is_readonly())
self.assertEqual(True, sql.is_explain())
self.assertEqual(False, sql.is_select())
self.assertEqual(True, sql.is_readonly())
def test_complex_extract_tables(self):
query = """SELECT sum(m_examples) AS "sum__m_example"
@ -366,7 +366,7 @@ class SupersetTestCase(unittest.TestCase):
ORDER BY 2 ASC) AS "meh"
ORDER BY "sum__m_example" DESC
LIMIT 10;"""
self.assertEquals(
self.assertEqual(
{"my_l_table", "my_b_table", "my_t_table", "inner_table"},
self.extract_tables(query),
)
@ -375,13 +375,13 @@ class SupersetTestCase(unittest.TestCase):
query = """SELECT *
FROM table_a AS a, table_b AS b, table_c as c
WHERE a.id = b.id and b.id = c.id"""
self.assertEquals({"table_a", "table_b", "table_c"}, self.extract_tables(query))
self.assertEqual({"table_a", "table_b", "table_c"}, self.extract_tables(query))
def test_mixed_from_clause(self):
query = """SELECT *
FROM table_a AS a, (select * from table_b) AS b, table_c as c
WHERE a.id = b.id and b.id = c.id"""
self.assertEquals({"table_a", "table_b", "table_c"}, self.extract_tables(query))
self.assertEqual({"table_a", "table_b", "table_c"}, self.extract_tables(query))
def test_nested_selects(self):
query = """
@ -389,13 +389,13 @@ class SupersetTestCase(unittest.TestCase):
from INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_SCHEMA like "%bi%"),0x7e)));
"""
self.assertEquals({"INFORMATION_SCHEMA.COLUMNS"}, self.extract_tables(query))
self.assertEqual({"INFORMATION_SCHEMA.COLUMNS"}, self.extract_tables(query))
query = """
select (extractvalue(1,concat(0x7e,(select GROUP_CONCAT(COLUMN_NAME)
from INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_NAME="bi_achivement_daily"),0x7e)));
"""
self.assertEquals({"INFORMATION_SCHEMA.COLUMNS"}, self.extract_tables(query))
self.assertEqual({"INFORMATION_SCHEMA.COLUMNS"}, self.extract_tables(query))
def test_complex_extract_tables3(self):
query = """SELECT somecol AS somecol
@ -431,7 +431,7 @@ class SupersetTestCase(unittest.TestCase):
WHERE 2=2
GROUP BY last_col
LIMIT 50000;"""
self.assertEquals({"a", "b", "c", "d", "e", "f"}, self.extract_tables(query))
self.assertEqual({"a", "b", "c", "d", "e", "f"}, self.extract_tables(query))
def test_complex_cte_with_prefix(self):
query = """
@ -446,26 +446,26 @@ class SupersetTestCase(unittest.TestCase):
GROUP BY SalesYear, SalesPersonID
ORDER BY SalesPersonID, SalesYear;
"""
self.assertEquals({"SalesOrderHeader"}, self.extract_tables(query))
self.assertEqual({"SalesOrderHeader"}, self.extract_tables(query))
def test_get_query_with_new_limit_comment(self):
sql = "SELECT * FROM birth_names -- SOME COMMENT"
parsed = sql_parse.ParsedQuery(sql)
newsql = parsed.get_query_with_new_limit(1000)
self.assertEquals(newsql, sql + "\nLIMIT 1000")
self.assertEqual(newsql, sql + "\nLIMIT 1000")
def test_get_query_with_new_limit_comment_with_limit(self):
sql = "SELECT * FROM birth_names -- SOME COMMENT WITH LIMIT 555"
parsed = sql_parse.ParsedQuery(sql)
newsql = parsed.get_query_with_new_limit(1000)
self.assertEquals(newsql, sql + "\nLIMIT 1000")
self.assertEqual(newsql, sql + "\nLIMIT 1000")
def test_get_query_with_new_limit(self):
sql = "SELECT * FROM birth_names LIMIT 555"
parsed = sql_parse.ParsedQuery(sql)
newsql = parsed.get_query_with_new_limit(1000)
expected = "SELECT * FROM birth_names LIMIT 1000"
self.assertEquals(newsql, expected)
self.assertEqual(newsql, expected)
def test_basic_breakdown_statements(self):
multi_sql = """
@ -474,9 +474,9 @@ class SupersetTestCase(unittest.TestCase):
"""
parsed = sql_parse.ParsedQuery(multi_sql)
statements = parsed.get_statements()
self.assertEquals(len(statements), 2)
self.assertEqual(len(statements), 2)
expected = ["SELECT * FROM birth_names", "SELECT * FROM birth_names LIMIT 1"]
self.assertEquals(statements, expected)
self.assertEqual(statements, expected)
def test_messy_breakdown_statements(self):
multi_sql = """
@ -487,14 +487,14 @@ class SupersetTestCase(unittest.TestCase):
"""
parsed = sql_parse.ParsedQuery(multi_sql)
statements = parsed.get_statements()
self.assertEquals(len(statements), 4)
self.assertEqual(len(statements), 4)
expected = [
"SELECT 1",
"SELECT 2",
"SELECT * FROM birth_names",
"SELECT * FROM birth_names LIMIT 1",
]
self.assertEquals(statements, expected)
self.assertEqual(statements, expected)
def test_identifier_list_with_keyword_as_alias(self):
query = """
@ -503,4 +503,4 @@ class SupersetTestCase(unittest.TestCase):
match AS (SELECT * FROM f)
SELECT * FROM match
"""
self.assertEquals({"foo"}, self.extract_tables(query))
self.assertEqual({"foo"}, self.extract_tables(query))

View File

@ -25,22 +25,22 @@ class DatabaseModelTestCase(SupersetTestCase):
def test_is_time_druid_time_col(self):
"""Druid has a special __time column"""
col = TableColumn(column_name="__time", type="INTEGER")
self.assertEquals(col.is_dttm, None)
self.assertEqual(col.is_dttm, None)
DruidEngineSpec.alter_new_orm_column(col)
self.assertEquals(col.is_dttm, True)
self.assertEqual(col.is_dttm, True)
col = TableColumn(column_name="__not_time", type="INTEGER")
self.assertEquals(col.is_time, False)
self.assertEqual(col.is_time, False)
def test_is_time_by_type(self):
col = TableColumn(column_name="foo", type="DATE")
self.assertEquals(col.is_time, True)
self.assertEqual(col.is_time, True)
col = TableColumn(column_name="foo", type="DATETIME")
self.assertEquals(col.is_time, True)
self.assertEqual(col.is_time, True)
col = TableColumn(column_name="foo", type="STRING")
self.assertEquals(col.is_time, False)
self.assertEqual(col.is_time, False)
def test_has_extra_cache_keys(self):
query = "SELECT '{{ cache_key_wrapper('user_1') }}' as user"

View File

@ -113,19 +113,19 @@ class SqlLabTests(SupersetTestCase):
# Not logged in, should error out
resp = self.client.get("/superset/queries/0")
# Redirects to the login page
self.assertEquals(403, resp.status_code)
self.assertEqual(403, resp.status_code)
# Admin sees queries
self.login("admin")
data = self.get_json_resp("/superset/queries/0")
self.assertEquals(2, len(data))
self.assertEqual(2, len(data))
# Run 2 more queries
self.run_sql("SELECT * FROM birth_names LIMIT 1", client_id="client_id_4")
self.run_sql("SELECT * FROM birth_names LIMIT 2", client_id="client_id_5")
self.login("admin")
data = self.get_json_resp("/superset/queries/0")
self.assertEquals(4, len(data))
self.assertEqual(4, len(data))
now = datetime.now() + timedelta(days=1)
query = (
@ -139,12 +139,12 @@ class SqlLabTests(SupersetTestCase):
data = self.get_json_resp(
"/superset/queries/{}".format(int(datetime_to_epoch(now)) - 1000)
)
self.assertEquals(1, len(data))
self.assertEqual(1, len(data))
self.logout()
resp = self.client.get("/superset/queries/0")
# Redirects to the login page
self.assertEquals(403, resp.status_code)
self.assertEqual(403, resp.status_code)
def test_search_query_on_db_id(self):
self.run_some_queries()
@ -155,13 +155,13 @@ class SqlLabTests(SupersetTestCase):
data = self.get_json_resp(
f"/superset/search_queries?database_id={examples_dbid}"
)
self.assertEquals(3, len(data))
self.assertEqual(3, len(data))
db_ids = [k["dbId"] for k in data]
self.assertEquals([examples_dbid for i in range(3)], db_ids)
self.assertEqual([examples_dbid for i in range(3)], db_ids)
resp = self.get_resp("/superset/search_queries?database_id=-1")
data = json.loads(resp)
self.assertEquals(0, len(data))
self.assertEqual(0, len(data))
def test_search_query_on_user(self):
self.run_some_queries()
@ -170,15 +170,15 @@ class SqlLabTests(SupersetTestCase):
# Test search queries on user Id
user_id = security_manager.find_user("admin").id
data = self.get_json_resp("/superset/search_queries?user_id={}".format(user_id))
self.assertEquals(2, len(data))
self.assertEqual(2, len(data))
user_ids = {k["userId"] for k in data}
self.assertEquals(set([user_id]), user_ids)
self.assertEqual(set([user_id]), user_ids)
user_id = security_manager.find_user("gamma_sqllab").id
resp = self.get_resp("/superset/search_queries?user_id={}".format(user_id))
data = json.loads(resp)
self.assertEquals(1, len(data))
self.assertEquals(data[0]["userId"], user_id)
self.assertEqual(1, len(data))
self.assertEqual(data[0]["userId"], user_id)
def test_search_query_on_status(self):
self.run_some_queries()
@ -186,21 +186,21 @@ class SqlLabTests(SupersetTestCase):
# Test search queries on status
resp = self.get_resp("/superset/search_queries?status=success")
data = json.loads(resp)
self.assertEquals(2, len(data))
self.assertEqual(2, len(data))
states = [k["state"] for k in data]
self.assertEquals(["success", "success"], states)
self.assertEqual(["success", "success"], states)
resp = self.get_resp("/superset/search_queries?status=failed")
data = json.loads(resp)
self.assertEquals(1, len(data))
self.assertEquals(data[0]["state"], "failed")
self.assertEqual(1, len(data))
self.assertEqual(data[0]["state"], "failed")
def test_search_query_on_text(self):
self.run_some_queries()
self.login("admin")
url = "/superset/search_queries?search_text=birth"
data = self.get_json_resp(url)
self.assertEquals(2, len(data))
self.assertEqual(2, len(data))
self.assertIn("birth", data[0]["sql"])
def test_search_query_on_time(self):
@ -218,7 +218,7 @@ class SqlLabTests(SupersetTestCase):
params = [from_time, to_time]
resp = self.get_resp("/superset/search_queries?" + "&".join(params))
data = json.loads(resp)
self.assertEquals(2, len(data))
self.assertEqual(2, len(data))
def test_search_query_with_owner_only_perms(self) -> None:
"""
@ -242,9 +242,9 @@ class SqlLabTests(SupersetTestCase):
user_id = security_manager.find_user("admin").id
data = self.get_json_resp("/superset/search_queries")
self.assertEquals(2, len(data))
self.assertEqual(2, len(data))
user_ids = {k["userId"] for k in data}
self.assertEquals(set([user_id]), user_ids)
self.assertEqual(set([user_id]), user_ids)
# Remove can_only_access_owned_queries from Admin
owned_queries_view = security_manager.find_permission_view_menu(
@ -269,24 +269,24 @@ class SqlLabTests(SupersetTestCase):
data = [["a", 4, 4.0]]
cdf = SupersetDataFrame(data, cols, BaseEngineSpec)
self.assertEquals(len(data), cdf.size)
self.assertEquals(len(cols), len(cdf.columns))
self.assertEqual(len(data), cdf.size)
self.assertEqual(len(cols), len(cdf.columns))
def test_df_conversion_tuple(self):
cols = ["string_col", "int_col", "list_col", "float_col"]
data = [("Text", 111, [123], 1.0)]
cdf = SupersetDataFrame(data, cols, BaseEngineSpec)
self.assertEquals(len(data), cdf.size)
self.assertEquals(len(cols), len(cdf.columns))
self.assertEqual(len(data), cdf.size)
self.assertEqual(len(cols), len(cdf.columns))
def test_df_conversion_dict(self):
cols = ["string_col", "dict_col", "int_col"]
data = [["a", {"c1": 1, "c2": 2, "c3": 3}, 4]]
cdf = SupersetDataFrame(data, cols, BaseEngineSpec)
self.assertEquals(len(data), cdf.size)
self.assertEquals(len(cols), len(cdf.columns))
self.assertEqual(len(data), cdf.size)
self.assertEqual(len(cols), len(cdf.columns))
def test_sqllab_viz(self):
examples_dbid = get_example_database().id
@ -327,19 +327,19 @@ class SqlLabTests(SupersetTestCase):
data = self.run_sql(
"SELECT * FROM birth_names", client_id="sql_limit_2", query_limit=test_limit
)
self.assertEquals(len(data["data"]), test_limit)
self.assertEqual(len(data["data"]), test_limit)
data = self.run_sql(
"SELECT * FROM birth_names LIMIT {}".format(test_limit),
client_id="sql_limit_3",
query_limit=test_limit + 1,
)
self.assertEquals(len(data["data"]), test_limit)
self.assertEqual(len(data["data"]), test_limit)
data = self.run_sql(
"SELECT * FROM birth_names LIMIT {}".format(test_limit + 1),
client_id="sql_limit_4",
query_limit=test_limit,
)
self.assertEquals(len(data["data"]), test_limit)
self.assertEqual(len(data["data"]), test_limit)
def test_queryview_filter(self) -> None:
"""
@ -353,7 +353,7 @@ class SqlLabTests(SupersetTestCase):
data = self.get_json_resp(url)
admin = security_manager.find_user("admin")
gamma_sqllab = security_manager.find_user("gamma_sqllab")
self.assertEquals(3, len(data["result"]))
self.assertEqual(3, len(data["result"]))
user_queries = [result.get("username") for result in data["result"]]
assert admin.username in user_queries
assert gamma_sqllab.username in user_queries
@ -381,7 +381,7 @@ class SqlLabTests(SupersetTestCase):
url = "/queryview/api/read"
data = self.get_json_resp(url)
admin = security_manager.find_user("admin")
self.assertEquals(2, len(data["result"]))
self.assertEqual(2, len(data["result"]))
all_admin_user_queries = all(
[result.get("username") == admin.username for result in data["result"]]
)
@ -410,7 +410,7 @@ class SqlLabTests(SupersetTestCase):
"page_size": -1,
}
url = "api/v1/database/?{}={}".format("q", prison.dumps(arguments))
self.assertEquals(
self.assertEqual(
{"examples", "fake_db_100"},
{r.get("database_name") for r in self.get_json_resp(url)["result"]},
)

View File

@ -127,24 +127,24 @@ class UtilsTestCase(unittest.TestCase):
def test_parse_human_timedelta(self, mock_datetime):
mock_datetime.now.return_value = datetime(2019, 4, 1)
mock_datetime.side_effect = lambda *args, **kw: datetime(*args, **kw)
self.assertEquals(parse_human_timedelta("now"), timedelta(0))
self.assertEquals(parse_human_timedelta("1 year"), timedelta(366))
self.assertEquals(parse_human_timedelta("-1 year"), timedelta(-365))
self.assertEqual(parse_human_timedelta("now"), timedelta(0))
self.assertEqual(parse_human_timedelta("1 year"), timedelta(366))
self.assertEqual(parse_human_timedelta("-1 year"), timedelta(-365))
@patch("superset.utils.core.datetime")
def test_parse_past_timedelta(self, mock_datetime):
mock_datetime.now.return_value = datetime(2019, 4, 1)
mock_datetime.side_effect = lambda *args, **kw: datetime(*args, **kw)
self.assertEquals(parse_past_timedelta("1 year"), timedelta(365))
self.assertEquals(parse_past_timedelta("-1 year"), timedelta(365))
self.assertEquals(parse_past_timedelta("52 weeks"), timedelta(364))
self.assertEquals(parse_past_timedelta("1 month"), timedelta(31))
self.assertEqual(parse_past_timedelta("1 year"), timedelta(365))
self.assertEqual(parse_past_timedelta("-1 year"), timedelta(365))
self.assertEqual(parse_past_timedelta("52 weeks"), timedelta(364))
self.assertEqual(parse_past_timedelta("1 month"), timedelta(31))
def test_zlib_compression(self):
json_str = '{"test": 1}'
blob = zlib_compress(json_str)
got_str = zlib_decompress(blob)
self.assertEquals(json_str, got_str)
self.assertEqual(json_str, got_str)
@patch("superset.utils.core.to_adhoc", mock_to_adhoc)
def test_merge_extra_filters(self):
@ -152,12 +152,12 @@ class UtilsTestCase(unittest.TestCase):
form_data = {"A": 1, "B": 2, "c": "test"}
expected = {"A": 1, "B": 2, "c": "test"}
merge_extra_filters(form_data)
self.assertEquals(form_data, expected)
self.assertEqual(form_data, expected)
# empty extra_filters
form_data = {"A": 1, "B": 2, "c": "test", "extra_filters": []}
expected = {"A": 1, "B": 2, "c": "test", "adhoc_filters": []}
merge_extra_filters(form_data)
self.assertEquals(form_data, expected)
self.assertEqual(form_data, expected)
# copy over extra filters into empty filters
form_data = {
"extra_filters": [
@ -184,7 +184,7 @@ class UtilsTestCase(unittest.TestCase):
]
}
merge_extra_filters(form_data)
self.assertEquals(form_data, expected)
self.assertEqual(form_data, expected)
# adds extra filters to existing filters
form_data = {
"extra_filters": [
@ -227,7 +227,7 @@ class UtilsTestCase(unittest.TestCase):
]
}
merge_extra_filters(form_data)
self.assertEquals(form_data, expected)
self.assertEqual(form_data, expected)
# adds extra filters to existing filters and sets time options
form_data = {
"extra_filters": [
@ -256,7 +256,7 @@ class UtilsTestCase(unittest.TestCase):
"druid_time_origin": "now",
}
merge_extra_filters(form_data)
self.assertEquals(form_data, expected)
self.assertEqual(form_data, expected)
@patch("superset.utils.core.to_adhoc", mock_to_adhoc)
def test_merge_extra_filters_ignores_empty_filters(self):
@ -268,7 +268,7 @@ class UtilsTestCase(unittest.TestCase):
}
expected = {"adhoc_filters": []}
merge_extra_filters(form_data)
self.assertEquals(form_data, expected)
self.assertEqual(form_data, expected)
@patch("superset.utils.core.to_adhoc", mock_to_adhoc)
def test_merge_extra_filters_ignores_nones(self):
@ -296,7 +296,7 @@ class UtilsTestCase(unittest.TestCase):
]
}
merge_extra_filters(form_data)
self.assertEquals(form_data, expected)
self.assertEqual(form_data, expected)
@patch("superset.utils.core.to_adhoc", mock_to_adhoc)
def test_merge_extra_filters_ignores_equal_filters(self):
@ -356,7 +356,7 @@ class UtilsTestCase(unittest.TestCase):
]
}
merge_extra_filters(form_data)
self.assertEquals(form_data, expected)
self.assertEqual(form_data, expected)
@patch("superset.utils.core.to_adhoc", mock_to_adhoc)
def test_merge_extra_filters_merges_different_val_types(self):
@ -408,7 +408,7 @@ class UtilsTestCase(unittest.TestCase):
]
}
merge_extra_filters(form_data)
self.assertEquals(form_data, expected)
self.assertEqual(form_data, expected)
form_data = {
"extra_filters": [
{"col": "a", "op": "in", "val": "someval"},
@ -457,7 +457,7 @@ class UtilsTestCase(unittest.TestCase):
]
}
merge_extra_filters(form_data)
self.assertEquals(form_data, expected)
self.assertEqual(form_data, expected)
@patch("superset.utils.core.to_adhoc", mock_to_adhoc)
def test_merge_extra_filters_adds_unequal_lists(self):
@ -516,7 +516,7 @@ class UtilsTestCase(unittest.TestCase):
]
}
merge_extra_filters(form_data)
self.assertEquals(form_data, expected)
self.assertEqual(form_data, expected)
def test_merge_request_params(self):
form_data = {"since": "2000", "until": "now"}
@ -527,25 +527,25 @@ class UtilsTestCase(unittest.TestCase):
self.assertNotIn("form_data", form_data.keys())
def test_datetime_f(self):
self.assertEquals(
self.assertEqual(
datetime_f(datetime(1990, 9, 21, 19, 11, 19, 626096)),
"<nobr>1990-09-21T19:11:19.626096</nobr>",
)
self.assertEquals(len(datetime_f(datetime.now())), 28)
self.assertEquals(datetime_f(None), "<nobr>None</nobr>")
self.assertEqual(len(datetime_f(datetime.now())), 28)
self.assertEqual(datetime_f(None), "<nobr>None</nobr>")
iso = datetime.now().isoformat()[:10].split("-")
[a, b, c] = [int(v) for v in iso]
self.assertEquals(datetime_f(datetime(a, b, c)), "<nobr>00:00:00</nobr>")
self.assertEqual(datetime_f(datetime(a, b, c)), "<nobr>00:00:00</nobr>")
def test_format_timedelta(self):
self.assertEquals(format_timedelta(timedelta(0)), "0:00:00")
self.assertEquals(format_timedelta(timedelta(days=1)), "1 day, 0:00:00")
self.assertEquals(format_timedelta(timedelta(minutes=-6)), "-0:06:00")
self.assertEquals(
self.assertEqual(format_timedelta(timedelta(0)), "0:00:00")
self.assertEqual(format_timedelta(timedelta(days=1)), "1 day, 0:00:00")
self.assertEqual(format_timedelta(timedelta(minutes=-6)), "-0:06:00")
self.assertEqual(
format_timedelta(timedelta(0) - timedelta(days=1, hours=5, minutes=6)),
"-1 day, 5:06:00",
)
self.assertEquals(
self.assertEqual(
format_timedelta(timedelta(0) - timedelta(days=16, hours=4, minutes=3)),
"-16 days, 4:03:00",
)
@ -557,7 +557,7 @@ class UtilsTestCase(unittest.TestCase):
resp = jsonObj.process_bind_param(obj, "dialect")
self.assertIn('"a": 5', resp)
self.assertIn('"b": ["a", "g", 5]', resp)
self.assertEquals(jsonObj.process_result_value(val, "dialect"), obj)
self.assertEqual(jsonObj.process_result_value(val, "dialect"), obj)
def test_validate_json(self):
invalid = '{"a": 5, "b": [1, 5, ["g", "h]]}'
@ -574,8 +574,8 @@ class UtilsTestCase(unittest.TestCase):
result1 = test_function(1, 2, 3)
result2 = test_function(1, 2, 3)
self.assertEquals(result1, result2)
self.assertEquals(watcher["val"], 1)
self.assertEqual(result1, result2)
self.assertEqual(watcher["val"], 1)
def test_memoized_on_methods(self):
class test_class:
@ -591,10 +591,10 @@ class UtilsTestCase(unittest.TestCase):
instance = test_class(5)
result1 = instance.test_method(1, 2, 3)
result2 = instance.test_method(1, 2, 3)
self.assertEquals(result1, result2)
self.assertEquals(instance.watcher, 1)
self.assertEqual(result1, result2)
self.assertEqual(instance.watcher, 1)
instance.num = 10
self.assertEquals(result2, instance.test_method(1, 2, 3))
self.assertEqual(result2, instance.test_method(1, 2, 3))
def test_memoized_on_methods_with_watches(self):
class test_class:
@ -611,13 +611,13 @@ class UtilsTestCase(unittest.TestCase):
instance = test_class(3, 12)
result1 = instance.test_method(1, 2, 3)
result2 = instance.test_method(1, 2, 3)
self.assertEquals(result1, result2)
self.assertEquals(instance.watcher, 1)
self.assertEqual(result1, result2)
self.assertEqual(instance.watcher, 1)
result3 = instance.test_method(2, 3, 4)
self.assertEquals(instance.watcher, 2)
self.assertEqual(instance.watcher, 2)
result4 = instance.test_method(2, 3, 4)
self.assertEquals(instance.watcher, 2)
self.assertEquals(result3, result4)
self.assertEqual(instance.watcher, 2)
self.assertEqual(result3, result4)
self.assertNotEqual(result3, result1)
instance.x = 1
result5 = instance.test_method(2, 3, 4)
@ -707,7 +707,7 @@ class UtilsTestCase(unittest.TestCase):
]
}
convert_legacy_filters_into_adhoc(form_data)
self.assertEquals(form_data, expected)
self.assertEqual(form_data, expected)
@patch("superset.utils.core.to_adhoc", mock_to_adhoc)
def test_convert_legacy_filters_into_adhoc_filters(self):
@ -724,7 +724,7 @@ class UtilsTestCase(unittest.TestCase):
]
}
convert_legacy_filters_into_adhoc(form_data)
self.assertEquals(form_data, expected)
self.assertEqual(form_data, expected)
@patch("superset.utils.core.to_adhoc", mock_to_adhoc)
def test_convert_legacy_filters_into_adhoc_having(self):
@ -739,7 +739,7 @@ class UtilsTestCase(unittest.TestCase):
]
}
convert_legacy_filters_into_adhoc(form_data)
self.assertEquals(form_data, expected)
self.assertEqual(form_data, expected)
@patch("superset.utils.core.to_adhoc", mock_to_adhoc)
def test_convert_legacy_filters_into_adhoc_having_filters(self):
@ -756,7 +756,7 @@ class UtilsTestCase(unittest.TestCase):
]
}
convert_legacy_filters_into_adhoc(form_data)
self.assertEquals(form_data, expected)
self.assertEqual(form_data, expected)
@patch("superset.utils.core.to_adhoc", mock_to_adhoc)
def test_convert_legacy_filters_into_adhoc_present_and_empty(self):
@ -767,7 +767,7 @@ class UtilsTestCase(unittest.TestCase):
]
}
convert_legacy_filters_into_adhoc(form_data)
self.assertEquals(form_data, expected)
self.assertEqual(form_data, expected)
@patch("superset.utils.core.to_adhoc", mock_to_adhoc)
def test_convert_legacy_filters_into_adhoc_present_and_nonempty(self):
@ -785,7 +785,7 @@ class UtilsTestCase(unittest.TestCase):
]
}
convert_legacy_filters_into_adhoc(form_data)
self.assertEquals(form_data, expected)
self.assertEqual(form_data, expected)
def test_parse_js_uri_path_items_eval_undefined(self):
self.assertIsNone(parse_js_uri_path_item("undefined", eval_undefined=True))

View File

@ -930,14 +930,14 @@ class BaseDeckGLVizTestCase(SupersetTestCase):
viz_instance = viz.BaseDeckGLViz(datasource, form_data)
coord = viz_instance.parse_coordinates("1.23, 3.21")
self.assertEquals(coord, (1.23, 3.21))
self.assertEqual(coord, (1.23, 3.21))
coord = viz_instance.parse_coordinates("1.23 3.21")
self.assertEquals(coord, (1.23, 3.21))
self.assertEqual(coord, (1.23, 3.21))
self.assertEquals(viz_instance.parse_coordinates(None), None)
self.assertEqual(viz_instance.parse_coordinates(None), None)
self.assertEquals(viz_instance.parse_coordinates(""), None)
self.assertEqual(viz_instance.parse_coordinates(""), None)
def test_parse_coordinates_raises(self):
form_data = load_fixture("deck_path_form_data.json")