import_datasource.py 3.1 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394
  1. # Licensed to the Apache Software Foundation (ASF) under one
  2. # or more contributor license agreements. See the NOTICE file
  3. # distributed with this work for additional information
  4. # regarding copyright ownership. The ASF licenses this file
  5. # to you under the Apache License, Version 2.0 (the
  6. # "License"); you may not use this file except in compliance
  7. # with the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing,
  12. # software distributed under the License is distributed on an
  13. # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
  14. # KIND, either express or implied. See the License for the
  15. # specific language governing permissions and limitations
  16. # under the License.
  17. import logging
  18. from sqlalchemy.orm.session import make_transient
  19. logger = logging.getLogger(__name__)
  20. def import_datasource(
  21. session, i_datasource, lookup_database, lookup_datasource, import_time
  22. ):
  23. """Imports the datasource from the object to the database.
  24. Metrics and columns and datasource will be overrided if exists.
  25. This function can be used to import/export datasources between multiple
  26. superset instances. Audit metadata isn't copies over.
  27. """
  28. make_transient(i_datasource)
  29. logger.info("Started import of the datasource: %s", i_datasource.to_json())
  30. i_datasource.id = None
  31. i_datasource.database_id = lookup_database(i_datasource).id
  32. i_datasource.alter_params(import_time=import_time)
  33. # override the datasource
  34. datasource = lookup_datasource(i_datasource)
  35. if datasource:
  36. datasource.override(i_datasource)
  37. session.flush()
  38. else:
  39. datasource = i_datasource.copy()
  40. session.add(datasource)
  41. session.flush()
  42. for metric in i_datasource.metrics:
  43. new_m = metric.copy()
  44. new_m.table_id = datasource.id
  45. logger.info(
  46. "Importing metric %s from the datasource: %s",
  47. new_m.to_json(),
  48. i_datasource.full_name,
  49. )
  50. imported_m = i_datasource.metric_class.import_obj(new_m)
  51. if imported_m.metric_name not in [m.metric_name for m in datasource.metrics]:
  52. datasource.metrics.append(imported_m)
  53. for column in i_datasource.columns:
  54. new_c = column.copy()
  55. new_c.table_id = datasource.id
  56. logger.info(
  57. "Importing column %s from the datasource: %s",
  58. new_c.to_json(),
  59. i_datasource.full_name,
  60. )
  61. imported_c = i_datasource.column_class.import_obj(new_c)
  62. if imported_c.column_name not in [c.column_name for c in datasource.columns]:
  63. datasource.columns.append(imported_c)
  64. session.flush()
  65. return datasource.id
  66. def import_simple_obj(session, i_obj, lookup_obj):
  67. make_transient(i_obj)
  68. i_obj.id = None
  69. i_obj.table = None
  70. # find if the column was already imported
  71. existing_column = lookup_obj(i_obj)
  72. i_obj.table = None
  73. if existing_column:
  74. existing_column.override(i_obj)
  75. session.flush()
  76. return existing_column
  77. session.add(i_obj)
  78. session.flush()
  79. return i_obj