import_export_tests.py 28 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707
  1. # Licensed to the Apache Software Foundation (ASF) under one
  2. # or more contributor license agreements. See the NOTICE file
  3. # distributed with this work for additional information
  4. # regarding copyright ownership. The ASF licenses this file
  5. # to you under the Apache License, Version 2.0 (the
  6. # "License"); you may not use this file except in compliance
  7. # with the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing,
  12. # software distributed under the License is distributed on an
  13. # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
  14. # KIND, either express or implied. See the License for the
  15. # specific language governing permissions and limitations
  16. # under the License.
  17. # isort:skip_file
  18. """Unit tests for Superset"""
  19. import json
  20. import unittest
  21. from flask import g
  22. from sqlalchemy.orm.session import make_transient
  23. from tests.test_app import app
  24. from superset.utils.dashboard_import_export import decode_dashboards
  25. from superset import db, security_manager
  26. from superset.connectors.druid.models import (
  27. DruidColumn,
  28. DruidDatasource,
  29. DruidMetric,
  30. DruidCluster,
  31. )
  32. from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn
  33. from superset.models.dashboard import Dashboard
  34. from superset.models.slice import Slice
  35. from .base_tests import SupersetTestCase
  36. class ImportExportTests(SupersetTestCase):
  37. """Testing export import functionality for dashboards"""
  38. @classmethod
  39. def delete_imports(cls):
  40. with app.app_context():
  41. # Imported data clean up
  42. session = db.session
  43. for slc in session.query(Slice):
  44. if "remote_id" in slc.params_dict:
  45. session.delete(slc)
  46. for dash in session.query(Dashboard):
  47. if "remote_id" in dash.params_dict:
  48. session.delete(dash)
  49. for table in session.query(SqlaTable):
  50. if "remote_id" in table.params_dict:
  51. session.delete(table)
  52. for datasource in session.query(DruidDatasource):
  53. if "remote_id" in datasource.params_dict:
  54. session.delete(datasource)
  55. session.commit()
  56. @classmethod
  57. def setUpClass(cls):
  58. cls.delete_imports()
  59. cls.create_druid_test_objects()
  60. @classmethod
  61. def tearDownClass(cls):
  62. cls.delete_imports()
  63. def create_slice(
  64. self,
  65. name,
  66. ds_id=None,
  67. id=None,
  68. db_name="examples",
  69. table_name="wb_health_population",
  70. ):
  71. params = {
  72. "num_period_compare": "10",
  73. "remote_id": id,
  74. "datasource_name": table_name,
  75. "database_name": db_name,
  76. "schema": "",
  77. # Test for trailing commas
  78. "metrics": ["sum__signup_attempt_email", "sum__signup_attempt_facebook"],
  79. }
  80. if table_name and not ds_id:
  81. table = self.get_table_by_name(table_name)
  82. if table:
  83. ds_id = table.id
  84. return Slice(
  85. slice_name=name,
  86. datasource_type="table",
  87. viz_type="bubble",
  88. params=json.dumps(params),
  89. datasource_id=ds_id,
  90. id=id,
  91. )
  92. def create_dashboard(self, title, id=0, slcs=[]):
  93. json_metadata = {"remote_id": id}
  94. return Dashboard(
  95. id=id,
  96. dashboard_title=title,
  97. slices=slcs,
  98. position_json='{"size_y": 2, "size_x": 2}',
  99. slug="{}_imported".format(title.lower()),
  100. json_metadata=json.dumps(json_metadata),
  101. )
  102. def create_table(self, name, schema="", id=0, cols_names=[], metric_names=[]):
  103. params = {"remote_id": id, "database_name": "examples"}
  104. table = SqlaTable(
  105. id=id, schema=schema, table_name=name, params=json.dumps(params)
  106. )
  107. for col_name in cols_names:
  108. table.columns.append(TableColumn(column_name=col_name))
  109. for metric_name in metric_names:
  110. table.metrics.append(SqlMetric(metric_name=metric_name, expression=""))
  111. return table
  112. def create_druid_datasource(self, name, id=0, cols_names=[], metric_names=[]):
  113. cluster_name = "druid_test"
  114. cluster = self.get_or_create(
  115. DruidCluster, {"cluster_name": cluster_name}, db.session
  116. )
  117. params = {"remote_id": id, "database_name": cluster_name}
  118. datasource = DruidDatasource(
  119. id=id,
  120. datasource_name=name,
  121. cluster_id=cluster.id,
  122. params=json.dumps(params),
  123. )
  124. for col_name in cols_names:
  125. datasource.columns.append(DruidColumn(column_name=col_name))
  126. for metric_name in metric_names:
  127. datasource.metrics.append(DruidMetric(metric_name=metric_name, json="{}"))
  128. return datasource
  129. def get_slice(self, slc_id):
  130. return db.session.query(Slice).filter_by(id=slc_id).first()
  131. def get_slice_by_name(self, name):
  132. return db.session.query(Slice).filter_by(slice_name=name).first()
  133. def get_dash(self, dash_id):
  134. return db.session.query(Dashboard).filter_by(id=dash_id).first()
  135. def get_datasource(self, datasource_id):
  136. return db.session.query(DruidDatasource).filter_by(id=datasource_id).first()
  137. def get_table_by_name(self, name):
  138. return db.session.query(SqlaTable).filter_by(table_name=name).first()
  139. def assert_dash_equals(self, expected_dash, actual_dash, check_position=True):
  140. self.assertEqual(expected_dash.slug, actual_dash.slug)
  141. self.assertEqual(expected_dash.dashboard_title, actual_dash.dashboard_title)
  142. self.assertEqual(len(expected_dash.slices), len(actual_dash.slices))
  143. expected_slices = sorted(expected_dash.slices, key=lambda s: s.slice_name or "")
  144. actual_slices = sorted(actual_dash.slices, key=lambda s: s.slice_name or "")
  145. for e_slc, a_slc in zip(expected_slices, actual_slices):
  146. self.assert_slice_equals(e_slc, a_slc)
  147. if check_position:
  148. self.assertEqual(expected_dash.position_json, actual_dash.position_json)
  149. def assert_table_equals(self, expected_ds, actual_ds):
  150. self.assertEqual(expected_ds.table_name, actual_ds.table_name)
  151. self.assertEqual(expected_ds.main_dttm_col, actual_ds.main_dttm_col)
  152. self.assertEqual(expected_ds.schema, actual_ds.schema)
  153. self.assertEqual(len(expected_ds.metrics), len(actual_ds.metrics))
  154. self.assertEqual(len(expected_ds.columns), len(actual_ds.columns))
  155. self.assertEqual(
  156. set([c.column_name for c in expected_ds.columns]),
  157. set([c.column_name for c in actual_ds.columns]),
  158. )
  159. self.assertEqual(
  160. set([m.metric_name for m in expected_ds.metrics]),
  161. set([m.metric_name for m in actual_ds.metrics]),
  162. )
  163. def assert_datasource_equals(self, expected_ds, actual_ds):
  164. self.assertEqual(expected_ds.datasource_name, actual_ds.datasource_name)
  165. self.assertEqual(expected_ds.main_dttm_col, actual_ds.main_dttm_col)
  166. self.assertEqual(len(expected_ds.metrics), len(actual_ds.metrics))
  167. self.assertEqual(len(expected_ds.columns), len(actual_ds.columns))
  168. self.assertEqual(
  169. set([c.column_name for c in expected_ds.columns]),
  170. set([c.column_name for c in actual_ds.columns]),
  171. )
  172. self.assertEqual(
  173. set([m.metric_name for m in expected_ds.metrics]),
  174. set([m.metric_name for m in actual_ds.metrics]),
  175. )
  176. def assert_slice_equals(self, expected_slc, actual_slc):
  177. # to avoid bad slice data (no slice_name)
  178. expected_slc_name = expected_slc.slice_name or ""
  179. actual_slc_name = actual_slc.slice_name or ""
  180. self.assertEqual(expected_slc_name, actual_slc_name)
  181. self.assertEqual(expected_slc.datasource_type, actual_slc.datasource_type)
  182. self.assertEqual(expected_slc.viz_type, actual_slc.viz_type)
  183. exp_params = json.loads(expected_slc.params)
  184. actual_params = json.loads(actual_slc.params)
  185. diff_params_keys = (
  186. "schema",
  187. "database_name",
  188. "datasource_name",
  189. "remote_id",
  190. "import_time",
  191. )
  192. for k in diff_params_keys:
  193. if k in actual_params:
  194. actual_params.pop(k)
  195. if k in exp_params:
  196. exp_params.pop(k)
  197. self.assertEqual(exp_params, actual_params)
  198. def assert_only_exported_slc_fields(self, expected_dash, actual_dash):
  199. """ only exported json has this params
  200. imported/created dashboard has relationships to other models instead
  201. """
  202. expected_slices = sorted(expected_dash.slices, key=lambda s: s.slice_name or "")
  203. actual_slices = sorted(actual_dash.slices, key=lambda s: s.slice_name or "")
  204. for e_slc, a_slc in zip(expected_slices, actual_slices):
  205. params = a_slc.params_dict
  206. self.assertEqual(e_slc.datasource.name, params["datasource_name"])
  207. self.assertEqual(e_slc.datasource.schema, params["schema"])
  208. self.assertEqual(e_slc.datasource.database.name, params["database_name"])
  209. def test_export_1_dashboard(self):
  210. self.login("admin")
  211. birth_dash = self.get_dash_by_slug("births")
  212. id_ = birth_dash.id
  213. export_dash_url = f"/dashboard/export_dashboards_form?id={id_}&action=go"
  214. resp = self.client.get(export_dash_url)
  215. exported_dashboards = json.loads(
  216. resp.data.decode("utf-8"), object_hook=decode_dashboards
  217. )["dashboards"]
  218. birth_dash = self.get_dash_by_slug("births")
  219. self.assert_only_exported_slc_fields(birth_dash, exported_dashboards[0])
  220. self.assert_dash_equals(birth_dash, exported_dashboards[0])
  221. self.assertEqual(
  222. id_,
  223. json.loads(
  224. exported_dashboards[0].json_metadata, object_hook=decode_dashboards
  225. )["remote_id"],
  226. )
  227. exported_tables = json.loads(
  228. resp.data.decode("utf-8"), object_hook=decode_dashboards
  229. )["datasources"]
  230. self.assertEqual(1, len(exported_tables))
  231. self.assert_table_equals(
  232. self.get_table_by_name("birth_names"), exported_tables[0]
  233. )
  234. def test_export_2_dashboards(self):
  235. self.login("admin")
  236. birth_dash = self.get_dash_by_slug("births")
  237. world_health_dash = self.get_dash_by_slug("world_health")
  238. export_dash_url = "/dashboard/export_dashboards_form?id={}&id={}&action=go".format(
  239. birth_dash.id, world_health_dash.id
  240. )
  241. resp = self.client.get(export_dash_url)
  242. resp_data = json.loads(resp.data.decode("utf-8"), object_hook=decode_dashboards)
  243. exported_dashboards = sorted(
  244. resp_data.get("dashboards"), key=lambda d: d.dashboard_title
  245. )
  246. self.assertEqual(2, len(exported_dashboards))
  247. birth_dash = self.get_dash_by_slug("births")
  248. self.assert_only_exported_slc_fields(birth_dash, exported_dashboards[0])
  249. self.assert_dash_equals(birth_dash, exported_dashboards[0])
  250. self.assertEqual(
  251. birth_dash.id, json.loads(exported_dashboards[0].json_metadata)["remote_id"]
  252. )
  253. world_health_dash = self.get_dash_by_slug("world_health")
  254. self.assert_only_exported_slc_fields(world_health_dash, exported_dashboards[1])
  255. self.assert_dash_equals(world_health_dash, exported_dashboards[1])
  256. self.assertEqual(
  257. world_health_dash.id,
  258. json.loads(exported_dashboards[1].json_metadata)["remote_id"],
  259. )
  260. exported_tables = sorted(
  261. resp_data.get("datasources"), key=lambda t: t.table_name
  262. )
  263. self.assertEqual(2, len(exported_tables))
  264. self.assert_table_equals(
  265. self.get_table_by_name("birth_names"), exported_tables[0]
  266. )
  267. self.assert_table_equals(
  268. self.get_table_by_name("wb_health_population"), exported_tables[1]
  269. )
  270. def test_import_1_slice(self):
  271. expected_slice = self.create_slice("Import Me", id=10001)
  272. slc_id = Slice.import_obj(expected_slice, None, import_time=1989)
  273. slc = self.get_slice(slc_id)
  274. self.assertEqual(slc.datasource.perm, slc.perm)
  275. self.assert_slice_equals(expected_slice, slc)
  276. table_id = self.get_table_by_name("wb_health_population").id
  277. self.assertEqual(table_id, self.get_slice(slc_id).datasource_id)
  278. def test_import_2_slices_for_same_table(self):
  279. table_id = self.get_table_by_name("wb_health_population").id
  280. # table_id != 666, import func will have to find the table
  281. slc_1 = self.create_slice("Import Me 1", ds_id=666, id=10002)
  282. slc_id_1 = Slice.import_obj(slc_1, None)
  283. slc_2 = self.create_slice("Import Me 2", ds_id=666, id=10003)
  284. slc_id_2 = Slice.import_obj(slc_2, None)
  285. imported_slc_1 = self.get_slice(slc_id_1)
  286. imported_slc_2 = self.get_slice(slc_id_2)
  287. self.assertEqual(table_id, imported_slc_1.datasource_id)
  288. self.assert_slice_equals(slc_1, imported_slc_1)
  289. self.assertEqual(imported_slc_1.datasource.perm, imported_slc_1.perm)
  290. self.assertEqual(table_id, imported_slc_2.datasource_id)
  291. self.assert_slice_equals(slc_2, imported_slc_2)
  292. self.assertEqual(imported_slc_2.datasource.perm, imported_slc_2.perm)
  293. def test_import_slices_for_non_existent_table(self):
  294. with self.assertRaises(AttributeError):
  295. Slice.import_obj(
  296. self.create_slice("Import Me 3", id=10004, table_name="non_existent"),
  297. None,
  298. )
  299. def test_import_slices_override(self):
  300. slc = self.create_slice("Import Me New", id=10005)
  301. slc_1_id = Slice.import_obj(slc, None, import_time=1990)
  302. slc.slice_name = "Import Me New"
  303. imported_slc_1 = self.get_slice(slc_1_id)
  304. slc_2 = self.create_slice("Import Me New", id=10005)
  305. slc_2_id = Slice.import_obj(slc_2, imported_slc_1, import_time=1990)
  306. self.assertEqual(slc_1_id, slc_2_id)
  307. imported_slc_2 = self.get_slice(slc_2_id)
  308. self.assert_slice_equals(slc, imported_slc_2)
  309. def test_import_empty_dashboard(self):
  310. empty_dash = self.create_dashboard("empty_dashboard", id=10001)
  311. imported_dash_id = Dashboard.import_obj(empty_dash, import_time=1989)
  312. imported_dash = self.get_dash(imported_dash_id)
  313. self.assert_dash_equals(empty_dash, imported_dash, check_position=False)
  314. def test_import_dashboard_1_slice(self):
  315. slc = self.create_slice("health_slc", id=10006)
  316. dash_with_1_slice = self.create_dashboard(
  317. "dash_with_1_slice", slcs=[slc], id=10002
  318. )
  319. dash_with_1_slice.position_json = """
  320. {{"DASHBOARD_VERSION_KEY": "v2",
  321. "DASHBOARD_CHART_TYPE-{0}": {{
  322. "type": "DASHBOARD_CHART_TYPE",
  323. "id": {0},
  324. "children": [],
  325. "meta": {{
  326. "width": 4,
  327. "height": 50,
  328. "chartId": {0}
  329. }}
  330. }}
  331. }}
  332. """.format(
  333. slc.id
  334. )
  335. imported_dash_id = Dashboard.import_obj(dash_with_1_slice, import_time=1990)
  336. imported_dash = self.get_dash(imported_dash_id)
  337. expected_dash = self.create_dashboard("dash_with_1_slice", slcs=[slc], id=10002)
  338. make_transient(expected_dash)
  339. self.assert_dash_equals(expected_dash, imported_dash, check_position=False)
  340. self.assertEqual(
  341. {"remote_id": 10002, "import_time": 1990},
  342. json.loads(imported_dash.json_metadata),
  343. )
  344. expected_position = dash_with_1_slice.position
  345. # new slice id (auto-incremental) assigned on insert
  346. # id from json is used only for updating position with new id
  347. meta = expected_position["DASHBOARD_CHART_TYPE-10006"]["meta"]
  348. meta["chartId"] = imported_dash.slices[0].id
  349. self.assertEqual(expected_position, imported_dash.position)
  350. def test_import_dashboard_2_slices(self):
  351. e_slc = self.create_slice("e_slc", id=10007, table_name="energy_usage")
  352. b_slc = self.create_slice("b_slc", id=10008, table_name="birth_names")
  353. dash_with_2_slices = self.create_dashboard(
  354. "dash_with_2_slices", slcs=[e_slc, b_slc], id=10003
  355. )
  356. dash_with_2_slices.json_metadata = json.dumps(
  357. {
  358. "remote_id": 10003,
  359. "expanded_slices": {
  360. "{}".format(e_slc.id): True,
  361. "{}".format(b_slc.id): False,
  362. },
  363. # mocked filter_scope metadata
  364. "filter_scopes": {
  365. str(e_slc.id): {
  366. "region": {"scope": ["ROOT_ID"], "immune": [b_slc.id]}
  367. }
  368. },
  369. }
  370. )
  371. imported_dash_id = Dashboard.import_obj(dash_with_2_slices, import_time=1991)
  372. imported_dash = self.get_dash(imported_dash_id)
  373. expected_dash = self.create_dashboard(
  374. "dash_with_2_slices", slcs=[e_slc, b_slc], id=10003
  375. )
  376. make_transient(expected_dash)
  377. self.assert_dash_equals(imported_dash, expected_dash, check_position=False)
  378. i_e_slc = self.get_slice_by_name("e_slc")
  379. i_b_slc = self.get_slice_by_name("b_slc")
  380. expected_json_metadata = {
  381. "remote_id": 10003,
  382. "import_time": 1991,
  383. "filter_scopes": {
  384. str(i_e_slc.id): {
  385. "region": {"scope": ["ROOT_ID"], "immune": [i_b_slc.id]}
  386. }
  387. },
  388. "expanded_slices": {
  389. "{}".format(i_e_slc.id): True,
  390. "{}".format(i_b_slc.id): False,
  391. },
  392. }
  393. self.assertEqual(
  394. expected_json_metadata, json.loads(imported_dash.json_metadata)
  395. )
  396. def test_import_override_dashboard_2_slices(self):
  397. e_slc = self.create_slice("e_slc", id=10009, table_name="energy_usage")
  398. b_slc = self.create_slice("b_slc", id=10010, table_name="birth_names")
  399. dash_to_import = self.create_dashboard(
  400. "override_dashboard", slcs=[e_slc, b_slc], id=10004
  401. )
  402. imported_dash_id_1 = Dashboard.import_obj(dash_to_import, import_time=1992)
  403. # create new instances of the slices
  404. e_slc = self.create_slice("e_slc", id=10009, table_name="energy_usage")
  405. b_slc = self.create_slice("b_slc", id=10010, table_name="birth_names")
  406. c_slc = self.create_slice("c_slc", id=10011, table_name="birth_names")
  407. dash_to_import_override = self.create_dashboard(
  408. "override_dashboard_new", slcs=[e_slc, b_slc, c_slc], id=10004
  409. )
  410. imported_dash_id_2 = Dashboard.import_obj(
  411. dash_to_import_override, import_time=1992
  412. )
  413. # override doesn't change the id
  414. self.assertEqual(imported_dash_id_1, imported_dash_id_2)
  415. expected_dash = self.create_dashboard(
  416. "override_dashboard_new", slcs=[e_slc, b_slc, c_slc], id=10004
  417. )
  418. make_transient(expected_dash)
  419. imported_dash = self.get_dash(imported_dash_id_2)
  420. self.assert_dash_equals(expected_dash, imported_dash, check_position=False)
  421. self.assertEqual(
  422. {"remote_id": 10004, "import_time": 1992},
  423. json.loads(imported_dash.json_metadata),
  424. )
  425. def test_import_new_dashboard_slice_reset_ownership(self):
  426. admin_user = security_manager.find_user(username="admin")
  427. self.assertTrue(admin_user)
  428. gamma_user = security_manager.find_user(username="gamma")
  429. self.assertTrue(gamma_user)
  430. g.user = gamma_user
  431. dash_with_1_slice = self._create_dashboard_for_import(id_=10200)
  432. # set another user as an owner of importing dashboard
  433. dash_with_1_slice.created_by = admin_user
  434. dash_with_1_slice.changed_by = admin_user
  435. dash_with_1_slice.owners = [admin_user]
  436. imported_dash_id = Dashboard.import_obj(dash_with_1_slice)
  437. imported_dash = self.get_dash(imported_dash_id)
  438. self.assertEqual(imported_dash.created_by, gamma_user)
  439. self.assertEqual(imported_dash.changed_by, gamma_user)
  440. self.assertEqual(imported_dash.owners, [gamma_user])
  441. imported_slc = imported_dash.slices[0]
  442. self.assertEqual(imported_slc.created_by, gamma_user)
  443. self.assertEqual(imported_slc.changed_by, gamma_user)
  444. self.assertEqual(imported_slc.owners, [gamma_user])
  445. def test_import_override_dashboard_slice_reset_ownership(self):
  446. admin_user = security_manager.find_user(username="admin")
  447. self.assertTrue(admin_user)
  448. gamma_user = security_manager.find_user(username="gamma")
  449. self.assertTrue(gamma_user)
  450. g.user = gamma_user
  451. dash_with_1_slice = self._create_dashboard_for_import(id_=10300)
  452. imported_dash_id = Dashboard.import_obj(dash_with_1_slice)
  453. imported_dash = self.get_dash(imported_dash_id)
  454. self.assertEqual(imported_dash.created_by, gamma_user)
  455. self.assertEqual(imported_dash.changed_by, gamma_user)
  456. self.assertEqual(imported_dash.owners, [gamma_user])
  457. imported_slc = imported_dash.slices[0]
  458. self.assertEqual(imported_slc.created_by, gamma_user)
  459. self.assertEqual(imported_slc.changed_by, gamma_user)
  460. self.assertEqual(imported_slc.owners, [gamma_user])
  461. # re-import with another user shouldn't change the permissions
  462. g.user = admin_user
  463. dash_with_1_slice = self._create_dashboard_for_import(id_=10300)
  464. imported_dash_id = Dashboard.import_obj(dash_with_1_slice)
  465. imported_dash = self.get_dash(imported_dash_id)
  466. self.assertEqual(imported_dash.created_by, gamma_user)
  467. self.assertEqual(imported_dash.changed_by, gamma_user)
  468. self.assertEqual(imported_dash.owners, [gamma_user])
  469. imported_slc = imported_dash.slices[0]
  470. self.assertEqual(imported_slc.created_by, gamma_user)
  471. self.assertEqual(imported_slc.changed_by, gamma_user)
  472. self.assertEqual(imported_slc.owners, [gamma_user])
  473. def _create_dashboard_for_import(self, id_=10100):
  474. slc = self.create_slice("health_slc" + str(id_), id=id_ + 1)
  475. dash_with_1_slice = self.create_dashboard(
  476. "dash_with_1_slice" + str(id_), slcs=[slc], id=id_ + 2
  477. )
  478. dash_with_1_slice.position_json = """
  479. {{"DASHBOARD_VERSION_KEY": "v2",
  480. "DASHBOARD_CHART_TYPE-{0}": {{
  481. "type": "DASHBOARD_CHART_TYPE",
  482. "id": {0},
  483. "children": [],
  484. "meta": {{
  485. "width": 4,
  486. "height": 50,
  487. "chartId": {0}
  488. }}
  489. }}
  490. }}
  491. """.format(
  492. slc.id
  493. )
  494. return dash_with_1_slice
  495. def test_import_table_no_metadata(self):
  496. table = self.create_table("pure_table", id=10001)
  497. imported_id = SqlaTable.import_obj(table, import_time=1989)
  498. imported = self.get_table(imported_id)
  499. self.assert_table_equals(table, imported)
  500. def test_import_table_1_col_1_met(self):
  501. table = self.create_table(
  502. "table_1_col_1_met", id=10002, cols_names=["col1"], metric_names=["metric1"]
  503. )
  504. imported_id = SqlaTable.import_obj(table, import_time=1990)
  505. imported = self.get_table(imported_id)
  506. self.assert_table_equals(table, imported)
  507. self.assertEqual(
  508. {"remote_id": 10002, "import_time": 1990, "database_name": "examples"},
  509. json.loads(imported.params),
  510. )
  511. def test_import_table_2_col_2_met(self):
  512. table = self.create_table(
  513. "table_2_col_2_met",
  514. id=10003,
  515. cols_names=["c1", "c2"],
  516. metric_names=["m1", "m2"],
  517. )
  518. imported_id = SqlaTable.import_obj(table, import_time=1991)
  519. imported = self.get_table(imported_id)
  520. self.assert_table_equals(table, imported)
  521. def test_import_table_override(self):
  522. table = self.create_table(
  523. "table_override", id=10003, cols_names=["col1"], metric_names=["m1"]
  524. )
  525. imported_id = SqlaTable.import_obj(table, import_time=1991)
  526. table_over = self.create_table(
  527. "table_override",
  528. id=10003,
  529. cols_names=["new_col1", "col2", "col3"],
  530. metric_names=["new_metric1"],
  531. )
  532. imported_over_id = SqlaTable.import_obj(table_over, import_time=1992)
  533. imported_over = self.get_table(imported_over_id)
  534. self.assertEqual(imported_id, imported_over.id)
  535. expected_table = self.create_table(
  536. "table_override",
  537. id=10003,
  538. metric_names=["new_metric1", "m1"],
  539. cols_names=["col1", "new_col1", "col2", "col3"],
  540. )
  541. self.assert_table_equals(expected_table, imported_over)
  542. def test_import_table_override_identical(self):
  543. table = self.create_table(
  544. "copy_cat",
  545. id=10004,
  546. cols_names=["new_col1", "col2", "col3"],
  547. metric_names=["new_metric1"],
  548. )
  549. imported_id = SqlaTable.import_obj(table, import_time=1993)
  550. copy_table = self.create_table(
  551. "copy_cat",
  552. id=10004,
  553. cols_names=["new_col1", "col2", "col3"],
  554. metric_names=["new_metric1"],
  555. )
  556. imported_id_copy = SqlaTable.import_obj(copy_table, import_time=1994)
  557. self.assertEqual(imported_id, imported_id_copy)
  558. self.assert_table_equals(copy_table, self.get_table(imported_id))
  559. def test_import_druid_no_metadata(self):
  560. datasource = self.create_druid_datasource("pure_druid", id=10001)
  561. imported_id = DruidDatasource.import_obj(datasource, import_time=1989)
  562. imported = self.get_datasource(imported_id)
  563. self.assert_datasource_equals(datasource, imported)
  564. def test_import_druid_1_col_1_met(self):
  565. datasource = self.create_druid_datasource(
  566. "druid_1_col_1_met", id=10002, cols_names=["col1"], metric_names=["metric1"]
  567. )
  568. imported_id = DruidDatasource.import_obj(datasource, import_time=1990)
  569. imported = self.get_datasource(imported_id)
  570. self.assert_datasource_equals(datasource, imported)
  571. self.assertEqual(
  572. {"remote_id": 10002, "import_time": 1990, "database_name": "druid_test"},
  573. json.loads(imported.params),
  574. )
  575. def test_import_druid_2_col_2_met(self):
  576. datasource = self.create_druid_datasource(
  577. "druid_2_col_2_met",
  578. id=10003,
  579. cols_names=["c1", "c2"],
  580. metric_names=["m1", "m2"],
  581. )
  582. imported_id = DruidDatasource.import_obj(datasource, import_time=1991)
  583. imported = self.get_datasource(imported_id)
  584. self.assert_datasource_equals(datasource, imported)
  585. def test_import_druid_override(self):
  586. datasource = self.create_druid_datasource(
  587. "druid_override", id=10004, cols_names=["col1"], metric_names=["m1"]
  588. )
  589. imported_id = DruidDatasource.import_obj(datasource, import_time=1991)
  590. table_over = self.create_druid_datasource(
  591. "druid_override",
  592. id=10004,
  593. cols_names=["new_col1", "col2", "col3"],
  594. metric_names=["new_metric1"],
  595. )
  596. imported_over_id = DruidDatasource.import_obj(table_over, import_time=1992)
  597. imported_over = self.get_datasource(imported_over_id)
  598. self.assertEqual(imported_id, imported_over.id)
  599. expected_datasource = self.create_druid_datasource(
  600. "druid_override",
  601. id=10004,
  602. metric_names=["new_metric1", "m1"],
  603. cols_names=["col1", "new_col1", "col2", "col3"],
  604. )
  605. self.assert_datasource_equals(expected_datasource, imported_over)
  606. def test_import_druid_override_identical(self):
  607. datasource = self.create_druid_datasource(
  608. "copy_cat",
  609. id=10005,
  610. cols_names=["new_col1", "col2", "col3"],
  611. metric_names=["new_metric1"],
  612. )
  613. imported_id = DruidDatasource.import_obj(datasource, import_time=1993)
  614. copy_datasource = self.create_druid_datasource(
  615. "copy_cat",
  616. id=10005,
  617. cols_names=["new_col1", "col2", "col3"],
  618. metric_names=["new_metric1"],
  619. )
  620. imported_id_copy = DruidDatasource.import_obj(copy_datasource, import_time=1994)
  621. self.assertEqual(imported_id, imported_id_copy)
  622. self.assert_datasource_equals(copy_datasource, self.get_datasource(imported_id))
  623. if __name__ == "__main__":
  624. unittest.main()