dashboard_import_export.py 2.7 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576
  1. # Licensed to the Apache Software Foundation (ASF) under one
  2. # or more contributor license agreements. See the NOTICE file
  3. # distributed with this work for additional information
  4. # regarding copyright ownership. The ASF licenses this file
  5. # to you under the Apache License, Version 2.0 (the
  6. # "License"); you may not use this file except in compliance
  7. # with the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing,
  12. # software distributed under the License is distributed on an
  13. # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
  14. # KIND, either express or implied. See the License for the
  15. # specific language governing permissions and limitations
  16. # under the License.
  17. # pylint: disable=C,R,W
  18. import json
  19. import logging
  20. import time
  21. from datetime import datetime
  22. from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn
  23. from superset.models.dashboard import Dashboard
  24. from superset.models.slice import Slice
  25. logger = logging.getLogger(__name__)
  26. def decode_dashboards(o):
  27. """
  28. Function to be passed into json.loads obj_hook parameter
  29. Recreates the dashboard object from a json representation.
  30. """
  31. import superset.models.core as models
  32. if "__Dashboard__" in o:
  33. return Dashboard(**o["__Dashboard__"])
  34. elif "__Slice__" in o:
  35. return Slice(**o["__Slice__"])
  36. elif "__TableColumn__" in o:
  37. return TableColumn(**o["__TableColumn__"])
  38. elif "__SqlaTable__" in o:
  39. return SqlaTable(**o["__SqlaTable__"])
  40. elif "__SqlMetric__" in o:
  41. return SqlMetric(**o["__SqlMetric__"])
  42. elif "__datetime__" in o:
  43. return datetime.strptime(o["__datetime__"], "%Y-%m-%dT%H:%M:%S")
  44. else:
  45. return o
  46. def import_dashboards(session, data_stream, import_time=None):
  47. """Imports dashboards from a stream to databases"""
  48. current_tt = int(time.time())
  49. import_time = current_tt if import_time is None else import_time
  50. data = json.loads(data_stream.read(), object_hook=decode_dashboards)
  51. # TODO: import DRUID datasources
  52. for table in data["datasources"]:
  53. type(table).import_obj(table, import_time=import_time)
  54. session.commit()
  55. for dashboard in data["dashboards"]:
  56. Dashboard.import_obj(dashboard, import_time=import_time)
  57. session.commit()
  58. def export_dashboards(session):
  59. """Returns all dashboards metadata as a json dump"""
  60. logger.info("Starting export")
  61. dashboards = session.query(Dashboard)
  62. dashboard_ids = []
  63. for dashboard in dashboards:
  64. dashboard_ids.append(dashboard.id)
  65. data = Dashboard.export_dashboards(dashboard_ids)
  66. return data