core.py 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684
  1. # Licensed to the Apache Software Foundation (ASF) under one
  2. # or more contributor license agreements. See the NOTICE file
  3. # distributed with this work for additional information
  4. # regarding copyright ownership. The ASF licenses this file
  5. # to you under the Apache License, Version 2.0 (the
  6. # "License"); you may not use this file except in compliance
  7. # with the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing,
  12. # software distributed under the License is distributed on an
  13. # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
  14. # KIND, either express or implied. See the License for the
  15. # specific language governing permissions and limitations
  16. # under the License.
  17. # pylint: disable=line-too-long,unused-argument,ungrouped-imports
  18. """A collection of ORM sqlalchemy models for Superset"""
  19. import json
  20. import logging
  21. import textwrap
  22. from contextlib import closing
  23. from copy import deepcopy
  24. from datetime import datetime
  25. from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Type
  26. import numpy
  27. import pandas as pd
  28. import sqlalchemy as sqla
  29. import sqlparse
  30. from flask import g, request
  31. from flask_appbuilder import Model
  32. from sqlalchemy import (
  33. Boolean,
  34. Column,
  35. create_engine,
  36. DateTime,
  37. ForeignKey,
  38. Integer,
  39. MetaData,
  40. String,
  41. Table,
  42. Text,
  43. )
  44. from sqlalchemy.engine import Dialect, Engine, url
  45. from sqlalchemy.engine.reflection import Inspector
  46. from sqlalchemy.engine.url import make_url, URL
  47. from sqlalchemy.orm import relationship
  48. from sqlalchemy.pool import NullPool
  49. from sqlalchemy.schema import UniqueConstraint
  50. from sqlalchemy.sql import Select
  51. from sqlalchemy_utils import EncryptedType
  52. from superset import app, db_engine_specs, is_feature_enabled, security_manager
  53. from superset.db_engine_specs.base import TimeGrain
  54. from superset.models.dashboard import Dashboard
  55. from superset.models.helpers import AuditMixinNullable, ImportMixin
  56. from superset.models.tags import DashboardUpdater, FavStarUpdater
  57. from superset.utils import cache as cache_util, core as utils
  58. config = app.config
  59. custom_password_store = config["SQLALCHEMY_CUSTOM_PASSWORD_STORE"]
  60. stats_logger = config["STATS_LOGGER"]
  61. log_query = config["QUERY_LOGGER"]
  62. metadata = Model.metadata # pylint: disable=no-member
  63. logger = logging.getLogger(__name__)
  64. PASSWORD_MASK = "X" * 10
  65. DB_CONNECTION_MUTATOR = config["DB_CONNECTION_MUTATOR"]
  66. class Url(Model, AuditMixinNullable):
  67. """Used for the short url feature"""
  68. __tablename__ = "url"
  69. id = Column(Integer, primary_key=True) # pylint: disable=invalid-name
  70. url = Column(Text)
  71. class KeyValue(Model): # pylint: disable=too-few-public-methods
  72. """Used for any type of key-value store"""
  73. __tablename__ = "keyvalue"
  74. id = Column(Integer, primary_key=True) # pylint: disable=invalid-name
  75. value = Column(Text, nullable=False)
  76. class CssTemplate(Model, AuditMixinNullable):
  77. """CSS templates for dashboards"""
  78. __tablename__ = "css_templates"
  79. id = Column(Integer, primary_key=True) # pylint: disable=invalid-name
  80. template_name = Column(String(250))
  81. css = Column(Text, default="")
  82. class Database(
  83. Model, AuditMixinNullable, ImportMixin
  84. ): # pylint: disable=too-many-public-methods
  85. """An ORM object that stores Database related information"""
  86. __tablename__ = "dbs"
  87. type = "table"
  88. __table_args__ = (UniqueConstraint("database_name"),)
  89. id = Column(Integer, primary_key=True) # pylint: disable=invalid-name
  90. verbose_name = Column(String(250), unique=True)
  91. # short unique name, used in permissions
  92. database_name = Column(String(250), unique=True, nullable=False)
  93. sqlalchemy_uri = Column(String(1024), nullable=False)
  94. password = Column(EncryptedType(String(1024), config["SECRET_KEY"]))
  95. cache_timeout = Column(Integer)
  96. select_as_create_table_as = Column(Boolean, default=False)
  97. expose_in_sqllab = Column(Boolean, default=True)
  98. allow_run_async = Column(Boolean, default=False)
  99. allow_csv_upload = Column(Boolean, default=False)
  100. allow_ctas = Column(Boolean, default=False)
  101. allow_dml = Column(Boolean, default=False)
  102. force_ctas_schema = Column(String(250))
  103. allow_multi_schema_metadata_fetch = Column( # pylint: disable=invalid-name
  104. Boolean, default=False
  105. )
  106. extra = Column(
  107. Text,
  108. default=textwrap.dedent(
  109. """\
  110. {
  111. "metadata_params": {},
  112. "engine_params": {},
  113. "metadata_cache_timeout": {},
  114. "schemas_allowed_for_csv_upload": []
  115. }
  116. """
  117. ),
  118. )
  119. encrypted_extra = Column(EncryptedType(Text, config["SECRET_KEY"]), nullable=True)
  120. perm = Column(String(1000))
  121. impersonate_user = Column(Boolean, default=False)
  122. export_fields = [
  123. "database_name",
  124. "sqlalchemy_uri",
  125. "cache_timeout",
  126. "expose_in_sqllab",
  127. "allow_run_async",
  128. "allow_ctas",
  129. "allow_csv_upload",
  130. "extra",
  131. ]
  132. export_children = ["tables"]
  133. def __repr__(self):
  134. return self.name
  135. @property
  136. def name(self) -> str:
  137. return self.verbose_name if self.verbose_name else self.database_name
  138. @property
  139. def allows_subquery(self) -> bool:
  140. return self.db_engine_spec.allows_subqueries
  141. @property
  142. def function_names(self) -> List[str]:
  143. return self.db_engine_spec.get_function_names(self)
  144. @property
  145. def allows_cost_estimate(self) -> bool:
  146. extra = self.get_extra()
  147. database_version = extra.get("version")
  148. cost_estimate_enabled: bool = extra.get("cost_estimate_enabled") # type: ignore
  149. return (
  150. self.db_engine_spec.get_allow_cost_estimate(database_version)
  151. and cost_estimate_enabled
  152. )
  153. @property
  154. def data(self) -> Dict[str, Any]:
  155. return {
  156. "id": self.id,
  157. "name": self.database_name,
  158. "backend": self.backend,
  159. "allow_multi_schema_metadata_fetch": self.allow_multi_schema_metadata_fetch,
  160. "allows_subquery": self.allows_subquery,
  161. "allows_cost_estimate": self.allows_cost_estimate,
  162. }
  163. @property
  164. def unique_name(self) -> str:
  165. return self.database_name
  166. @property
  167. def url_object(self) -> URL:
  168. return make_url(self.sqlalchemy_uri_decrypted)
  169. @property
  170. def backend(self) -> str:
  171. sqlalchemy_url = make_url(self.sqlalchemy_uri_decrypted)
  172. return sqlalchemy_url.get_backend_name()
  173. @property
  174. def metadata_cache_timeout(self) -> Dict[str, Any]:
  175. return self.get_extra().get("metadata_cache_timeout", {})
  176. @property
  177. def schema_cache_enabled(self) -> bool:
  178. return "schema_cache_timeout" in self.metadata_cache_timeout
  179. @property
  180. def schema_cache_timeout(self) -> Optional[int]:
  181. return self.metadata_cache_timeout.get("schema_cache_timeout")
  182. @property
  183. def table_cache_enabled(self) -> bool:
  184. return "table_cache_timeout" in self.metadata_cache_timeout
  185. @property
  186. def table_cache_timeout(self) -> Optional[int]:
  187. return self.metadata_cache_timeout.get("table_cache_timeout")
  188. @property
  189. def default_schemas(self) -> List[str]:
  190. return self.get_extra().get("default_schemas", [])
  191. @classmethod
  192. def get_password_masked_url_from_uri(cls, uri: str): # pylint: disable=invalid-name
  193. sqlalchemy_url = make_url(uri)
  194. return cls.get_password_masked_url(sqlalchemy_url)
  195. @classmethod
  196. def get_password_masked_url(
  197. cls, url: URL # pylint: disable=redefined-outer-name
  198. ) -> URL:
  199. url_copy = deepcopy(url)
  200. if url_copy.password is not None:
  201. url_copy.password = PASSWORD_MASK
  202. return url_copy
  203. def set_sqlalchemy_uri(self, uri: str) -> None:
  204. conn = sqla.engine.url.make_url(uri.strip())
  205. if conn.password != PASSWORD_MASK and not custom_password_store:
  206. # do not over-write the password with the password mask
  207. self.password = conn.password
  208. conn.password = PASSWORD_MASK if conn.password else None
  209. self.sqlalchemy_uri = str(conn) # hides the password
  210. def get_effective_user(
  211. self,
  212. url: URL, # pylint: disable=redefined-outer-name
  213. user_name: Optional[str] = None,
  214. ) -> Optional[str]:
  215. """
  216. Get the effective user, especially during impersonation.
  217. :param url: SQL Alchemy URL object
  218. :param user_name: Default username
  219. :return: The effective username
  220. """
  221. effective_username = None
  222. if self.impersonate_user:
  223. effective_username = url.username
  224. if user_name:
  225. effective_username = user_name
  226. elif (
  227. hasattr(g, "user")
  228. and hasattr(g.user, "username")
  229. and g.user.username is not None
  230. ):
  231. effective_username = g.user.username
  232. return effective_username
  233. @utils.memoized(watch=("impersonate_user", "sqlalchemy_uri_decrypted", "extra"))
  234. def get_sqla_engine(
  235. self,
  236. schema: Optional[str] = None,
  237. nullpool: bool = True,
  238. user_name: Optional[str] = None,
  239. source: Optional[utils.QuerySource] = None,
  240. ) -> Engine:
  241. extra = self.get_extra()
  242. sqlalchemy_url = make_url(self.sqlalchemy_uri_decrypted)
  243. self.db_engine_spec.adjust_database_uri(sqlalchemy_url, schema)
  244. effective_username = self.get_effective_user(sqlalchemy_url, user_name)
  245. # If using MySQL or Presto for example, will set url.username
  246. # If using Hive, will not do anything yet since that relies on a
  247. # configuration parameter instead.
  248. self.db_engine_spec.modify_url_for_impersonation(
  249. sqlalchemy_url, self.impersonate_user, effective_username
  250. )
  251. masked_url = self.get_password_masked_url(sqlalchemy_url)
  252. logger.info("Database.get_sqla_engine(). Masked URL: %s", str(masked_url))
  253. params = extra.get("engine_params", {})
  254. if nullpool:
  255. params["poolclass"] = NullPool
  256. connect_args = params.get("connect_args", {})
  257. configuration = connect_args.get("configuration", {})
  258. # If using Hive, this will set hive.server2.proxy.user=$effective_username
  259. configuration.update(
  260. self.db_engine_spec.get_configuration_for_impersonation(
  261. str(sqlalchemy_url), self.impersonate_user, effective_username
  262. )
  263. )
  264. if configuration:
  265. connect_args["configuration"] = configuration
  266. params["connect_args"] = connect_args
  267. params.update(self.get_encrypted_extra())
  268. if DB_CONNECTION_MUTATOR:
  269. if not source and request and request.referrer:
  270. if "/superset/dashboard/" in request.referrer:
  271. source = utils.QuerySource.DASHBOARD
  272. elif "/superset/explore/" in request.referrer:
  273. source = utils.QuerySource.CHART
  274. elif "/superset/sqllab/" in request.referrer:
  275. source = utils.QuerySource.SQL_LAB
  276. sqlalchemy_url, params = DB_CONNECTION_MUTATOR(
  277. sqlalchemy_url, params, effective_username, security_manager, source
  278. )
  279. return create_engine(sqlalchemy_url, **params)
  280. def get_reserved_words(self) -> Set[str]:
  281. return self.get_dialect().preparer.reserved_words
  282. def get_quoter(self):
  283. return self.get_dialect().identifier_preparer.quote
  284. def get_df( # pylint: disable=too-many-locals
  285. self, sql: str, schema: Optional[str] = None, mutator: Optional[Callable] = None
  286. ) -> pd.DataFrame:
  287. sqls = [str(s).strip(" ;") for s in sqlparse.parse(sql)]
  288. engine = self.get_sqla_engine(schema=schema)
  289. username = utils.get_username()
  290. def needs_conversion(df_series: pd.Series) -> bool:
  291. return not df_series.empty and isinstance(df_series[0], (list, dict))
  292. def _log_query(sql: str) -> None:
  293. if log_query:
  294. log_query(engine.url, sql, schema, username, __name__, security_manager)
  295. with closing(engine.raw_connection()) as conn:
  296. with closing(conn.cursor()) as cursor:
  297. for sql_ in sqls[:-1]:
  298. _log_query(sql_)
  299. self.db_engine_spec.execute(cursor, sql_)
  300. cursor.fetchall()
  301. _log_query(sqls[-1])
  302. self.db_engine_spec.execute(cursor, sqls[-1])
  303. if cursor.description is not None:
  304. columns = [col_desc[0] for col_desc in cursor.description]
  305. else:
  306. columns = []
  307. df = pd.DataFrame.from_records(
  308. data=list(cursor.fetchall()), columns=columns, coerce_float=True
  309. )
  310. if mutator:
  311. mutator(df)
  312. for k, v in df.dtypes.items():
  313. if v.type == numpy.object_ and needs_conversion(df[k]):
  314. df[k] = df[k].apply(utils.json_dumps_w_dates)
  315. return df
  316. def compile_sqla_query(self, qry: Select, schema: Optional[str] = None) -> str:
  317. engine = self.get_sqla_engine(schema=schema)
  318. sql = str(qry.compile(engine, compile_kwargs={"literal_binds": True}))
  319. if (
  320. engine.dialect.identifier_preparer._double_percents # pylint: disable=protected-access
  321. ):
  322. sql = sql.replace("%%", "%")
  323. return sql
  324. def select_star( # pylint: disable=too-many-arguments
  325. self,
  326. table_name: str,
  327. schema: Optional[str] = None,
  328. limit: int = 100,
  329. show_cols: bool = False,
  330. indent: bool = True,
  331. latest_partition: bool = False,
  332. cols: Optional[List[Dict[str, Any]]] = None,
  333. ):
  334. """Generates a ``select *`` statement in the proper dialect"""
  335. eng = self.get_sqla_engine(schema=schema, source=utils.QuerySource.SQL_LAB)
  336. return self.db_engine_spec.select_star(
  337. self,
  338. table_name,
  339. schema=schema,
  340. engine=eng,
  341. limit=limit,
  342. show_cols=show_cols,
  343. indent=indent,
  344. latest_partition=latest_partition,
  345. cols=cols,
  346. )
  347. def apply_limit_to_sql(self, sql: str, limit: int = 1000) -> str:
  348. return self.db_engine_spec.apply_limit_to_sql(sql, limit, self)
  349. def safe_sqlalchemy_uri(self) -> str:
  350. return self.sqlalchemy_uri
  351. @property
  352. def inspector(self) -> Inspector:
  353. engine = self.get_sqla_engine()
  354. return sqla.inspect(engine)
  355. @cache_util.memoized_func(
  356. key=lambda *args, **kwargs: "db:{}:schema:None:table_list",
  357. attribute_in_key="id",
  358. )
  359. def get_all_table_names_in_database(
  360. self, cache: bool = False, cache_timeout: Optional[bool] = None, force=False
  361. ) -> List[utils.DatasourceName]:
  362. """Parameters need to be passed as keyword arguments."""
  363. if not self.allow_multi_schema_metadata_fetch:
  364. return []
  365. return self.db_engine_spec.get_all_datasource_names(self, "table")
  366. @cache_util.memoized_func(
  367. key=lambda *args, **kwargs: "db:{}:schema:None:view_list",
  368. attribute_in_key="id", # type: ignore
  369. )
  370. def get_all_view_names_in_database(
  371. self,
  372. cache: bool = False,
  373. cache_timeout: Optional[bool] = None,
  374. force: bool = False,
  375. ) -> List[utils.DatasourceName]:
  376. """Parameters need to be passed as keyword arguments."""
  377. if not self.allow_multi_schema_metadata_fetch:
  378. return []
  379. return self.db_engine_spec.get_all_datasource_names(self, "view")
  380. @cache_util.memoized_func(
  381. key=lambda *args, **kwargs: f"db:{{}}:schema:{kwargs.get('schema')}:table_list", # type: ignore
  382. attribute_in_key="id",
  383. )
  384. def get_all_table_names_in_schema(
  385. self,
  386. schema: str,
  387. cache: bool = False,
  388. cache_timeout: int = None,
  389. force: bool = False,
  390. ) -> List[utils.DatasourceName]:
  391. """Parameters need to be passed as keyword arguments.
  392. For unused parameters, they are referenced in
  393. cache_util.memoized_func decorator.
  394. :param schema: schema name
  395. :param cache: whether cache is enabled for the function
  396. :param cache_timeout: timeout in seconds for the cache
  397. :param force: whether to force refresh the cache
  398. :return: list of tables
  399. """
  400. try:
  401. tables = self.db_engine_spec.get_table_names(
  402. database=self, inspector=self.inspector, schema=schema
  403. )
  404. return [
  405. utils.DatasourceName(table=table, schema=schema) for table in tables
  406. ]
  407. except Exception as e: # pylint: disable=broad-except
  408. logger.exception(e)
  409. @cache_util.memoized_func(
  410. key=lambda *args, **kwargs: f"db:{{}}:schema:{kwargs.get('schema')}:view_list", # type: ignore
  411. attribute_in_key="id",
  412. )
  413. def get_all_view_names_in_schema(
  414. self,
  415. schema: str,
  416. cache: bool = False,
  417. cache_timeout: int = None,
  418. force: bool = False,
  419. ) -> List[utils.DatasourceName]:
  420. """Parameters need to be passed as keyword arguments.
  421. For unused parameters, they are referenced in
  422. cache_util.memoized_func decorator.
  423. :param schema: schema name
  424. :param cache: whether cache is enabled for the function
  425. :param cache_timeout: timeout in seconds for the cache
  426. :param force: whether to force refresh the cache
  427. :return: list of views
  428. """
  429. try:
  430. views = self.db_engine_spec.get_view_names(
  431. database=self, inspector=self.inspector, schema=schema
  432. )
  433. return [utils.DatasourceName(table=view, schema=schema) for view in views]
  434. except Exception as e: # pylint: disable=broad-except
  435. logger.exception(e)
  436. @cache_util.memoized_func(
  437. key=lambda *args, **kwargs: "db:{}:schema_list", attribute_in_key="id"
  438. )
  439. def get_all_schema_names(
  440. self,
  441. cache: bool = False,
  442. cache_timeout: Optional[int] = None,
  443. force: bool = False,
  444. ) -> List[str]:
  445. """Parameters need to be passed as keyword arguments.
  446. For unused parameters, they are referenced in
  447. cache_util.memoized_func decorator.
  448. :param cache: whether cache is enabled for the function
  449. :param cache_timeout: timeout in seconds for the cache
  450. :param force: whether to force refresh the cache
  451. :return: schema list
  452. """
  453. return self.db_engine_spec.get_schema_names(self.inspector)
  454. @property
  455. def db_engine_spec(self) -> Type[db_engine_specs.BaseEngineSpec]:
  456. return db_engine_specs.engines.get(self.backend, db_engine_specs.BaseEngineSpec)
  457. @classmethod
  458. def get_db_engine_spec_for_backend(
  459. cls, backend
  460. ) -> Type[db_engine_specs.BaseEngineSpec]:
  461. return db_engine_specs.engines.get(backend, db_engine_specs.BaseEngineSpec)
  462. def grains(self) -> Tuple[TimeGrain, ...]:
  463. """Defines time granularity database-specific expressions.
  464. The idea here is to make it easy for users to change the time grain
  465. from a datetime (maybe the source grain is arbitrary timestamps, daily
  466. or 5 minutes increments) to another, "truncated" datetime. Since
  467. each database has slightly different but similar datetime functions,
  468. this allows a mapping between database engines and actual functions.
  469. """
  470. return self.db_engine_spec.get_time_grains()
  471. def get_extra(self) -> Dict[str, Any]:
  472. extra: Dict[str, Any] = {}
  473. if self.extra:
  474. try:
  475. extra = json.loads(self.extra)
  476. except json.JSONDecodeError as e:
  477. logger.error(e)
  478. raise e
  479. return extra
  480. def get_encrypted_extra(self):
  481. encrypted_extra = {}
  482. if self.encrypted_extra:
  483. try:
  484. encrypted_extra = json.loads(self.encrypted_extra)
  485. except json.JSONDecodeError as e:
  486. logger.error(e)
  487. raise e
  488. return encrypted_extra
  489. def get_table(self, table_name: str, schema: Optional[str] = None) -> Table:
  490. extra = self.get_extra()
  491. meta = MetaData(**extra.get("metadata_params", {}))
  492. return Table(
  493. table_name,
  494. meta,
  495. schema=schema or None,
  496. autoload=True,
  497. autoload_with=self.get_sqla_engine(),
  498. )
  499. def get_columns(
  500. self, table_name: str, schema: Optional[str] = None
  501. ) -> List[Dict[str, Any]]:
  502. return self.db_engine_spec.get_columns(self.inspector, table_name, schema)
  503. def get_indexes(
  504. self, table_name: str, schema: Optional[str] = None
  505. ) -> List[Dict[str, Any]]:
  506. return self.inspector.get_indexes(table_name, schema)
  507. def get_pk_constraint(
  508. self, table_name: str, schema: Optional[str] = None
  509. ) -> Dict[str, Any]:
  510. return self.inspector.get_pk_constraint(table_name, schema)
  511. def get_foreign_keys(
  512. self, table_name: str, schema: Optional[str] = None
  513. ) -> List[Dict[str, Any]]:
  514. return self.inspector.get_foreign_keys(table_name, schema)
  515. def get_schema_access_for_csv_upload( # pylint: disable=invalid-name
  516. self,
  517. ) -> List[str]:
  518. return self.get_extra().get("schemas_allowed_for_csv_upload", [])
  519. @property
  520. def sqlalchemy_uri_decrypted(self) -> str:
  521. conn = sqla.engine.url.make_url(self.sqlalchemy_uri)
  522. if custom_password_store:
  523. conn.password = custom_password_store(conn)
  524. else:
  525. conn.password = self.password
  526. return str(conn)
  527. @property
  528. def sql_url(self) -> str:
  529. return f"/superset/sql/{self.id}/"
  530. def get_perm(self) -> str:
  531. return f"[{self.database_name}].(id:{self.id})"
  532. def has_table(self, table: Table) -> bool:
  533. engine = self.get_sqla_engine()
  534. return engine.has_table(table.table_name, table.schema or None)
  535. def has_table_by_name(self, table_name: str, schema: Optional[str] = None) -> bool:
  536. engine = self.get_sqla_engine()
  537. return engine.has_table(table_name, schema)
  538. @utils.memoized
  539. def get_dialect(self) -> Dialect:
  540. sqla_url = url.make_url(self.sqlalchemy_uri_decrypted)
  541. return sqla_url.get_dialect()()
  542. sqla.event.listen(Database, "after_insert", security_manager.set_perm)
  543. sqla.event.listen(Database, "after_update", security_manager.set_perm)
  544. class Log(Model): # pylint: disable=too-few-public-methods
  545. """ORM object used to log Superset actions to the database"""
  546. __tablename__ = "logs"
  547. id = Column(Integer, primary_key=True) # pylint: disable=invalid-name
  548. action = Column(String(512))
  549. user_id = Column(Integer, ForeignKey("ab_user.id"))
  550. dashboard_id = Column(Integer)
  551. slice_id = Column(Integer)
  552. json = Column(Text)
  553. user = relationship(
  554. security_manager.user_model, backref="logs", foreign_keys=[user_id]
  555. )
  556. dttm = Column(DateTime, default=datetime.utcnow)
  557. duration_ms = Column(Integer)
  558. referrer = Column(String(1024))
  559. class FavStar(Model): # pylint: disable=too-few-public-methods
  560. __tablename__ = "favstar"
  561. id = Column(Integer, primary_key=True) # pylint: disable=invalid-name
  562. user_id = Column(Integer, ForeignKey("ab_user.id"))
  563. class_name = Column(String(50))
  564. obj_id = Column(Integer)
  565. dttm = Column(DateTime, default=datetime.utcnow)
  566. # events for updating tags
  567. if is_feature_enabled("TAGGING_SYSTEM"):
  568. sqla.event.listen(Dashboard, "after_insert", DashboardUpdater.after_insert)
  569. sqla.event.listen(Dashboard, "after_update", DashboardUpdater.after_update)
  570. sqla.event.listen(Dashboard, "after_delete", DashboardUpdater.after_delete)
  571. sqla.event.listen(FavStar, "after_insert", FavStarUpdater.after_insert)
  572. sqla.event.listen(FavStar, "after_delete", FavStarUpdater.after_delete)