core.py 98 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755
  1. # Licensed to the Apache Software Foundation (ASF) under one
  2. # or more contributor license agreements. See the NOTICE file
  3. # distributed with this work for additional information
  4. # regarding copyright ownership. The ASF licenses this file
  5. # to you under the Apache License, Version 2.0 (the
  6. # "License"); you may not use this file except in compliance
  7. # with the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing,
  12. # software distributed under the License is distributed on an
  13. # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
  14. # KIND, either express or implied. See the License for the
  15. # specific language governing permissions and limitations
  16. # under the License.
  17. # pylint: disable=C,R,W
  18. import logging
  19. import re
  20. from contextlib import closing
  21. from datetime import datetime, timedelta
  22. from typing import Any, cast, Dict, List, Optional, Union
  23. from urllib import parse
  24. import backoff
  25. import msgpack
  26. import pandas as pd
  27. import pyarrow as pa
  28. import simplejson as json
  29. from flask import abort, flash, g, Markup, redirect, render_template, request, Response
  30. from flask_appbuilder import expose
  31. from flask_appbuilder.models.sqla.interface import SQLAInterface
  32. from flask_appbuilder.security.decorators import has_access, has_access_api
  33. from flask_appbuilder.security.sqla import models as ab_models
  34. from flask_babel import gettext as __, lazy_gettext as _
  35. from sqlalchemy import and_, Integer, or_, select
  36. from sqlalchemy.exc import SQLAlchemyError
  37. from sqlalchemy.orm.session import Session
  38. from werkzeug.urls import Href
  39. import superset.models.core as models
  40. from superset import (
  41. app,
  42. appbuilder,
  43. cache,
  44. conf,
  45. dataframe,
  46. db,
  47. event_logger,
  48. get_feature_flags,
  49. is_feature_enabled,
  50. result_set,
  51. results_backend,
  52. results_backend_use_msgpack,
  53. security_manager,
  54. sql_lab,
  55. talisman,
  56. viz,
  57. )
  58. from superset.connectors.connector_registry import ConnectorRegistry
  59. from superset.connectors.sqla.models import AnnotationDatasource
  60. from superset.constants import RouteMethod
  61. from superset.exceptions import (
  62. DatabaseNotFound,
  63. SupersetException,
  64. SupersetSecurityException,
  65. SupersetTimeoutException,
  66. )
  67. from superset.jinja_context import get_template_processor
  68. from superset.models.dashboard import Dashboard
  69. from superset.models.datasource_access_request import DatasourceAccessRequest
  70. from superset.models.slice import Slice
  71. from superset.models.sql_lab import Query, TabState
  72. from superset.models.user_attributes import UserAttribute
  73. from superset.sql_parse import ParsedQuery
  74. from superset.sql_validators import get_validator_by_name
  75. from superset.utils import core as utils, dashboard_import_export
  76. from superset.utils.dates import now_as_float
  77. from superset.utils.decorators import etag_cache, stats_timing
  78. from superset.views.database.filters import DatabaseFilter
  79. from .base import (
  80. api,
  81. BaseSupersetView,
  82. check_ownership,
  83. common_bootstrap_payload,
  84. CsvResponse,
  85. data_payload_response,
  86. DeleteMixin,
  87. generate_download_headers,
  88. get_error_msg,
  89. get_user_roles,
  90. handle_api_exception,
  91. json_error_response,
  92. json_success,
  93. SupersetModelView,
  94. )
  95. from .utils import (
  96. apply_display_max_row_limit,
  97. bootstrap_user_data,
  98. get_datasource_info,
  99. get_form_data,
  100. get_viz,
  101. )
  102. config = app.config
  103. CACHE_DEFAULT_TIMEOUT = config["CACHE_DEFAULT_TIMEOUT"]
  104. SQLLAB_QUERY_COST_ESTIMATE_TIMEOUT = config["SQLLAB_QUERY_COST_ESTIMATE_TIMEOUT"]
  105. stats_logger = config["STATS_LOGGER"]
  106. DAR = DatasourceAccessRequest
  107. QueryStatus = utils.QueryStatus
  108. logger = logging.getLogger(__name__)
  109. DATABASE_KEYS = [
  110. "allow_csv_upload",
  111. "allow_ctas",
  112. "allow_dml",
  113. "allow_multi_schema_metadata_fetch",
  114. "allow_run_async",
  115. "allows_subquery",
  116. "backend",
  117. "database_name",
  118. "expose_in_sqllab",
  119. "force_ctas_schema",
  120. "id",
  121. ]
  122. ALL_DATASOURCE_ACCESS_ERR = __(
  123. "This endpoint requires the `all_datasource_access` permission"
  124. )
  125. DATASOURCE_MISSING_ERR = __("The data source seems to have been deleted")
  126. ACCESS_REQUEST_MISSING_ERR = __("The access requests seem to have been deleted")
  127. USER_MISSING_ERR = __("The user seems to have been deleted")
  128. FORM_DATA_KEY_BLACKLIST: List[str] = []
  129. if not config["ENABLE_JAVASCRIPT_CONTROLS"]:
  130. FORM_DATA_KEY_BLACKLIST = ["js_tooltip", "js_onclick_href", "js_data_mutator"]
  131. def get_database_access_error_msg(database_name):
  132. return __(
  133. "This view requires the database %(name)s or "
  134. "`all_datasource_access` permission",
  135. name=database_name,
  136. )
  137. def is_owner(obj, user):
  138. """ Check if user is owner of the slice """
  139. return obj and user in obj.owners
  140. def check_datasource_perms(
  141. self, datasource_type: Optional[str] = None, datasource_id: Optional[int] = None
  142. ) -> None:
  143. """
  144. Check if user can access a cached response from explore_json.
  145. This function takes `self` since it must have the same signature as the
  146. the decorated method.
  147. :param datasource_type: The datasource type, i.e., 'druid' or 'table'
  148. :param datasource_id: The datasource ID
  149. :raises SupersetSecurityException: If the user cannot access the resource
  150. """
  151. form_data = get_form_data()[0]
  152. try:
  153. datasource_id, datasource_type = get_datasource_info(
  154. datasource_id, datasource_type, form_data
  155. )
  156. except SupersetException as e:
  157. raise SupersetSecurityException(str(e))
  158. viz_obj = get_viz(
  159. datasource_type=datasource_type,
  160. datasource_id=datasource_id,
  161. form_data=form_data,
  162. force=False,
  163. )
  164. security_manager.assert_viz_permission(viz_obj)
  165. def check_slice_perms(self, slice_id):
  166. """
  167. Check if user can access a cached response from slice_json.
  168. This function takes `self` since it must have the same signature as the
  169. the decorated method.
  170. """
  171. form_data, slc = get_form_data(slice_id, use_slice_data=True)
  172. viz_obj = get_viz(
  173. datasource_type=slc.datasource.type,
  174. datasource_id=slc.datasource.id,
  175. form_data=form_data,
  176. force=False,
  177. )
  178. security_manager.assert_viz_permission(viz_obj)
  179. def _deserialize_results_payload(
  180. payload: Union[bytes, str], query, use_msgpack: Optional[bool] = False
  181. ) -> dict:
  182. logger.debug(f"Deserializing from msgpack: {use_msgpack}")
  183. if use_msgpack:
  184. with stats_timing(
  185. "sqllab.query.results_backend_msgpack_deserialize", stats_logger
  186. ):
  187. ds_payload = msgpack.loads(payload, raw=False)
  188. with stats_timing("sqllab.query.results_backend_pa_deserialize", stats_logger):
  189. pa_table = pa.deserialize(ds_payload["data"])
  190. df = result_set.SupersetResultSet.convert_table_to_df(pa_table)
  191. ds_payload["data"] = dataframe.df_to_records(df) or []
  192. db_engine_spec = query.database.db_engine_spec
  193. all_columns, data, expanded_columns = db_engine_spec.expand_data(
  194. ds_payload["selected_columns"], ds_payload["data"]
  195. )
  196. ds_payload.update(
  197. {"data": data, "columns": all_columns, "expanded_columns": expanded_columns}
  198. )
  199. return ds_payload
  200. else:
  201. with stats_timing(
  202. "sqllab.query.results_backend_json_deserialize", stats_logger
  203. ):
  204. return json.loads(payload) # type: ignore
  205. class AccessRequestsModelView(SupersetModelView, DeleteMixin):
  206. datamodel = SQLAInterface(DAR)
  207. include_route_methods = RouteMethod.CRUD_SET
  208. list_columns = [
  209. "username",
  210. "user_roles",
  211. "datasource_link",
  212. "roles_with_datasource",
  213. "created_on",
  214. ]
  215. order_columns = ["created_on"]
  216. base_order = ("changed_on", "desc")
  217. label_columns = {
  218. "username": _("User"),
  219. "user_roles": _("User Roles"),
  220. "database": _("Database URL"),
  221. "datasource_link": _("Datasource"),
  222. "roles_with_datasource": _("Roles to grant"),
  223. "created_on": _("Created On"),
  224. }
  225. @talisman(force_https=False)
  226. @app.route("/health")
  227. def health():
  228. return "OK"
  229. @talisman(force_https=False)
  230. @app.route("/healthcheck")
  231. def healthcheck():
  232. return "OK"
  233. @talisman(force_https=False)
  234. @app.route("/ping")
  235. def ping():
  236. return "OK"
  237. class KV(BaseSupersetView):
  238. """Used for storing and retrieving key value pairs"""
  239. @event_logger.log_this
  240. @has_access_api
  241. @expose("/store/", methods=["POST"])
  242. def store(self):
  243. try:
  244. value = request.form.get("data")
  245. obj = models.KeyValue(value=value)
  246. db.session.add(obj)
  247. db.session.commit()
  248. except Exception as e:
  249. return json_error_response(e)
  250. return Response(json.dumps({"id": obj.id}), status=200)
  251. @event_logger.log_this
  252. @has_access_api
  253. @expose("/<key_id>/", methods=["GET"])
  254. def get_value(self, key_id):
  255. try:
  256. kv = db.session.query(models.KeyValue).filter_by(id=key_id).scalar()
  257. if not kv:
  258. return Response(status=404, content_type="text/plain")
  259. except Exception as e:
  260. return json_error_response(e)
  261. return Response(kv.value, status=200, content_type="text/plain")
  262. class R(BaseSupersetView):
  263. """used for short urls"""
  264. @event_logger.log_this
  265. @expose("/<url_id>")
  266. def index(self, url_id):
  267. url = db.session.query(models.Url).get(url_id)
  268. if url and url.url:
  269. explore_url = "//superset/explore/?"
  270. if url.url.startswith(explore_url):
  271. explore_url += f"r={url_id}"
  272. return redirect(explore_url[1:])
  273. else:
  274. return redirect(url.url[1:])
  275. else:
  276. flash("URL to nowhere...", "danger")
  277. return redirect("/")
  278. @event_logger.log_this
  279. @has_access_api
  280. @expose("/shortner/", methods=["POST"])
  281. def shortner(self):
  282. url = request.form.get("data")
  283. obj = models.Url(url=url)
  284. db.session.add(obj)
  285. db.session.commit()
  286. return Response(
  287. "{scheme}://{request.headers[Host]}/r/{obj.id}".format(
  288. scheme=request.scheme, request=request, obj=obj
  289. ),
  290. mimetype="text/plain",
  291. )
  292. class Superset(BaseSupersetView):
  293. """The base views for Superset!"""
  294. logger = logging.getLogger(__name__)
  295. @has_access_api
  296. @expose("/datasources/")
  297. def datasources(self):
  298. datasources = ConnectorRegistry.get_all_datasources(db.session)
  299. datasources = [o.short_data for o in datasources if o.short_data.get("name")]
  300. datasources = sorted(datasources, key=lambda o: o["name"])
  301. return self.json_response(datasources)
  302. @has_access_api
  303. @expose("/override_role_permissions/", methods=["POST"])
  304. def override_role_permissions(self):
  305. """Updates the role with the give datasource permissions.
  306. Permissions not in the request will be revoked. This endpoint should
  307. be available to admins only. Expects JSON in the format:
  308. {
  309. 'role_name': '{role_name}',
  310. 'database': [{
  311. 'datasource_type': '{table|druid}',
  312. 'name': '{database_name}',
  313. 'schema': [{
  314. 'name': '{schema_name}',
  315. 'datasources': ['{datasource name}, {datasource name}']
  316. }]
  317. }]
  318. }
  319. """
  320. data = request.get_json(force=True)
  321. role_name = data["role_name"]
  322. databases = data["database"]
  323. db_ds_names = set()
  324. for dbs in databases:
  325. for schema in dbs["schema"]:
  326. for ds_name in schema["datasources"]:
  327. fullname = utils.get_datasource_full_name(
  328. dbs["name"], ds_name, schema=schema["name"]
  329. )
  330. db_ds_names.add(fullname)
  331. existing_datasources = ConnectorRegistry.get_all_datasources(db.session)
  332. datasources = [d for d in existing_datasources if d.full_name in db_ds_names]
  333. role = security_manager.find_role(role_name)
  334. # remove all permissions
  335. role.permissions = []
  336. # grant permissions to the list of datasources
  337. granted_perms = []
  338. for datasource in datasources:
  339. view_menu_perm = security_manager.find_permission_view_menu(
  340. view_menu_name=datasource.perm, permission_name="datasource_access"
  341. )
  342. # prevent creating empty permissions
  343. if view_menu_perm and view_menu_perm.view_menu:
  344. role.permissions.append(view_menu_perm)
  345. granted_perms.append(view_menu_perm.view_menu.name)
  346. db.session.commit()
  347. return self.json_response(
  348. {"granted": granted_perms, "requested": list(db_ds_names)}, status=201
  349. )
  350. @event_logger.log_this
  351. @has_access
  352. @expose("/request_access/")
  353. def request_access(self):
  354. datasources = set()
  355. dashboard_id = request.args.get("dashboard_id")
  356. if dashboard_id:
  357. dash = db.session.query(Dashboard).filter_by(id=int(dashboard_id)).one()
  358. datasources |= dash.datasources
  359. datasource_id = request.args.get("datasource_id")
  360. datasource_type = request.args.get("datasource_type")
  361. if datasource_id:
  362. ds_class = ConnectorRegistry.sources.get(datasource_type)
  363. datasource = (
  364. db.session.query(ds_class).filter_by(id=int(datasource_id)).one()
  365. )
  366. datasources.add(datasource)
  367. has_access = all(
  368. (
  369. datasource and security_manager.datasource_access(datasource)
  370. for datasource in datasources
  371. )
  372. )
  373. if has_access:
  374. return redirect("/superset/dashboard/{}".format(dashboard_id))
  375. if request.args.get("action") == "go":
  376. for datasource in datasources:
  377. access_request = DAR(
  378. datasource_id=datasource.id, datasource_type=datasource.type
  379. )
  380. db.session.add(access_request)
  381. db.session.commit()
  382. flash(__("Access was requested"), "info")
  383. return redirect("/")
  384. return self.render_template(
  385. "superset/request_access.html",
  386. datasources=datasources,
  387. datasource_names=", ".join([o.name for o in datasources]),
  388. )
  389. @event_logger.log_this
  390. @has_access
  391. @expose("/approve")
  392. def approve(self):
  393. def clean_fulfilled_requests(session):
  394. for r in session.query(DAR).all():
  395. datasource = ConnectorRegistry.get_datasource(
  396. r.datasource_type, r.datasource_id, session
  397. )
  398. if not datasource or security_manager.datasource_access(datasource):
  399. # datasource does not exist anymore
  400. session.delete(r)
  401. session.commit()
  402. datasource_type = request.args.get("datasource_type")
  403. datasource_id = request.args.get("datasource_id")
  404. created_by_username = request.args.get("created_by")
  405. role_to_grant = request.args.get("role_to_grant")
  406. role_to_extend = request.args.get("role_to_extend")
  407. session = db.session
  408. datasource = ConnectorRegistry.get_datasource(
  409. datasource_type, datasource_id, session
  410. )
  411. if not datasource:
  412. flash(DATASOURCE_MISSING_ERR, "alert")
  413. return json_error_response(DATASOURCE_MISSING_ERR)
  414. requested_by = security_manager.find_user(username=created_by_username)
  415. if not requested_by:
  416. flash(USER_MISSING_ERR, "alert")
  417. return json_error_response(USER_MISSING_ERR)
  418. requests = (
  419. session.query(DAR)
  420. .filter(
  421. DAR.datasource_id == datasource_id,
  422. DAR.datasource_type == datasource_type,
  423. DAR.created_by_fk == requested_by.id,
  424. )
  425. .all()
  426. )
  427. if not requests:
  428. flash(ACCESS_REQUEST_MISSING_ERR, "alert")
  429. return json_error_response(ACCESS_REQUEST_MISSING_ERR)
  430. # check if you can approve
  431. if security_manager.all_datasource_access() or check_ownership(
  432. datasource, raise_if_false=False
  433. ):
  434. # can by done by admin only
  435. if role_to_grant:
  436. role = security_manager.find_role(role_to_grant)
  437. requested_by.roles.append(role)
  438. msg = __(
  439. "%(user)s was granted the role %(role)s that gives access "
  440. "to the %(datasource)s",
  441. user=requested_by.username,
  442. role=role_to_grant,
  443. datasource=datasource.full_name,
  444. )
  445. utils.notify_user_about_perm_udate(
  446. g.user,
  447. requested_by,
  448. role,
  449. datasource,
  450. "email/role_granted.txt",
  451. app.config,
  452. )
  453. flash(msg, "info")
  454. if role_to_extend:
  455. perm_view = security_manager.find_permission_view_menu(
  456. "email/datasource_access", datasource.perm
  457. )
  458. role = security_manager.find_role(role_to_extend)
  459. security_manager.add_permission_role(role, perm_view)
  460. msg = __(
  461. "Role %(r)s was extended to provide the access to "
  462. "the datasource %(ds)s",
  463. r=role_to_extend,
  464. ds=datasource.full_name,
  465. )
  466. utils.notify_user_about_perm_udate(
  467. g.user,
  468. requested_by,
  469. role,
  470. datasource,
  471. "email/role_extended.txt",
  472. app.config,
  473. )
  474. flash(msg, "info")
  475. clean_fulfilled_requests(session)
  476. else:
  477. flash(__("You have no permission to approve this request"), "danger")
  478. return redirect("/accessrequestsmodelview/list/")
  479. for r in requests:
  480. session.delete(r)
  481. session.commit()
  482. return redirect("/accessrequestsmodelview/list/")
  483. def get_viz(
  484. self,
  485. slice_id=None,
  486. form_data=None,
  487. datasource_type=None,
  488. datasource_id=None,
  489. force=False,
  490. ):
  491. if slice_id:
  492. slc = db.session.query(Slice).filter_by(id=slice_id).one()
  493. return slc.get_viz()
  494. else:
  495. viz_type = form_data.get("viz_type", "table")
  496. datasource = ConnectorRegistry.get_datasource(
  497. datasource_type, datasource_id, db.session
  498. )
  499. viz_obj = viz.viz_types[viz_type](
  500. datasource, form_data=form_data, force=force
  501. )
  502. return viz_obj
  503. @has_access
  504. @expose("/slice/<slice_id>/")
  505. def slice(self, slice_id):
  506. form_data, slc = get_form_data(slice_id, use_slice_data=True)
  507. if not slc:
  508. abort(404)
  509. endpoint = "/superset/explore/?form_data={}".format(
  510. parse.quote(json.dumps({"slice_id": slice_id}))
  511. )
  512. param = utils.ReservedUrlParameters.STANDALONE.value
  513. if request.args.get(param) == "true":
  514. endpoint += f"&{param}=true"
  515. return redirect(endpoint)
  516. def get_query_string_response(self, viz_obj):
  517. query = None
  518. try:
  519. query_obj = viz_obj.query_obj()
  520. if query_obj:
  521. query = viz_obj.datasource.get_query_str(query_obj)
  522. except Exception as e:
  523. logger.exception(e)
  524. return json_error_response(e)
  525. if not query:
  526. query = "No query."
  527. return self.json_response(
  528. {"query": query, "language": viz_obj.datasource.query_language}
  529. )
  530. def get_raw_results(self, viz_obj):
  531. return self.json_response(
  532. {"data": viz_obj.get_df_payload()["df"].to_dict("records")}
  533. )
  534. def get_samples(self, viz_obj):
  535. return self.json_response({"data": viz_obj.get_samples()})
  536. def generate_json(
  537. self, viz_obj, csv=False, query=False, results=False, samples=False
  538. ):
  539. if csv:
  540. return CsvResponse(
  541. viz_obj.get_csv(),
  542. status=200,
  543. headers=generate_download_headers("csv"),
  544. mimetype="application/csv",
  545. )
  546. if query:
  547. return self.get_query_string_response(viz_obj)
  548. if results:
  549. return self.get_raw_results(viz_obj)
  550. if samples:
  551. return self.get_samples(viz_obj)
  552. payload = viz_obj.get_payload()
  553. return data_payload_response(*viz_obj.payload_json_and_has_error(payload))
  554. @event_logger.log_this
  555. @api
  556. @has_access_api
  557. @expose("/slice_json/<slice_id>")
  558. @etag_cache(CACHE_DEFAULT_TIMEOUT, check_perms=check_slice_perms)
  559. def slice_json(self, slice_id):
  560. form_data, slc = get_form_data(slice_id, use_slice_data=True)
  561. datasource_type = slc.datasource.type
  562. datasource_id = slc.datasource.id
  563. viz_obj = get_viz(
  564. datasource_type=datasource_type,
  565. datasource_id=datasource_id,
  566. form_data=form_data,
  567. force=False,
  568. )
  569. return self.generate_json(viz_obj)
  570. @event_logger.log_this
  571. @api
  572. @has_access_api
  573. @expose("/annotation_json/<layer_id>")
  574. def annotation_json(self, layer_id):
  575. form_data = get_form_data()[0]
  576. form_data["layer_id"] = layer_id
  577. form_data["filters"] = [{"col": "layer_id", "op": "==", "val": layer_id}]
  578. datasource = AnnotationDatasource()
  579. viz_obj = viz.viz_types["table"](datasource, form_data=form_data, force=False)
  580. payload = viz_obj.get_payload()
  581. return data_payload_response(*viz_obj.payload_json_and_has_error(payload))
  582. EXPLORE_JSON_METHODS = ["POST"]
  583. if not is_feature_enabled("ENABLE_EXPLORE_JSON_CSRF_PROTECTION"):
  584. EXPLORE_JSON_METHODS.append("GET")
  585. @event_logger.log_this
  586. @api
  587. @has_access_api
  588. @handle_api_exception
  589. @expose(
  590. "/explore_json/<datasource_type>/<datasource_id>/", methods=EXPLORE_JSON_METHODS
  591. )
  592. @expose("/explore_json/", methods=EXPLORE_JSON_METHODS)
  593. @etag_cache(CACHE_DEFAULT_TIMEOUT, check_perms=check_datasource_perms)
  594. def explore_json(self, datasource_type=None, datasource_id=None):
  595. """Serves all request that GET or POST form_data
  596. This endpoint evolved to be the entry point of many different
  597. requests that GETs or POSTs a form_data.
  598. `self.generate_json` receives this input and returns different
  599. payloads based on the request args in the first block
  600. TODO: break into one endpoint for each return shape"""
  601. csv = request.args.get("csv") == "true"
  602. query = request.args.get("query") == "true"
  603. results = request.args.get("results") == "true"
  604. samples = request.args.get("samples") == "true"
  605. force = request.args.get("force") == "true"
  606. form_data = get_form_data()[0]
  607. try:
  608. datasource_id, datasource_type = get_datasource_info(
  609. datasource_id, datasource_type, form_data
  610. )
  611. except SupersetException as e:
  612. return json_error_response(utils.error_msg_from_exception(e))
  613. viz_obj = get_viz(
  614. datasource_type=datasource_type,
  615. datasource_id=datasource_id,
  616. form_data=form_data,
  617. force=force,
  618. )
  619. return self.generate_json(
  620. viz_obj, csv=csv, query=query, results=results, samples=samples
  621. )
  622. @event_logger.log_this
  623. @has_access
  624. @expose("/import_dashboards", methods=["GET", "POST"])
  625. def import_dashboards(self):
  626. """Overrides the dashboards using json instances from the file."""
  627. f = request.files.get("file")
  628. if request.method == "POST" and f:
  629. try:
  630. dashboard_import_export.import_dashboards(db.session, f.stream)
  631. except DatabaseNotFound as e:
  632. flash(
  633. _(
  634. "Cannot import dashboard: %(db_error)s.\n"
  635. "Make sure to create the database before "
  636. "importing the dashboard.",
  637. db_error=e,
  638. ),
  639. "danger",
  640. )
  641. except Exception as e:
  642. logger.exception(e)
  643. flash(
  644. _(
  645. "An unknown error occurred. "
  646. "Please contact your Superset administrator"
  647. ),
  648. "danger",
  649. )
  650. return redirect("/dashboard/list/")
  651. return self.render_template("superset/import_dashboards.html")
  652. @event_logger.log_this
  653. @has_access
  654. @expose("/explore/<datasource_type>/<datasource_id>/", methods=["GET", "POST"])
  655. @expose("/explore/", methods=["GET", "POST"])
  656. def explore(self, datasource_type=None, datasource_id=None):
  657. user_id = g.user.get_id() if g.user else None
  658. form_data, slc = get_form_data(use_slice_data=True)
  659. # Flash the SIP-15 message if the slice is owned by the current user and has not
  660. # been updated, i.e., is not using the [start, end) interval.
  661. if (
  662. config["SIP_15_ENABLED"]
  663. and slc
  664. and g.user in slc.owners
  665. and (
  666. not form_data.get("time_range_endpoints")
  667. or form_data["time_range_endpoints"]
  668. != (
  669. utils.TimeRangeEndpoint.INCLUSIVE,
  670. utils.TimeRangeEndpoint.EXCLUSIVE,
  671. )
  672. )
  673. ):
  674. url = Href("/superset/explore/")(
  675. {
  676. "form_data": json.dumps(
  677. {
  678. "slice_id": slc.id,
  679. "time_range_endpoints": (
  680. utils.TimeRangeEndpoint.INCLUSIVE.value,
  681. utils.TimeRangeEndpoint.EXCLUSIVE.value,
  682. ),
  683. }
  684. )
  685. }
  686. )
  687. flash(Markup(config["SIP_15_TOAST_MESSAGE"].format(url=url)))
  688. error_redirect = "/chart/list/"
  689. try:
  690. datasource_id, datasource_type = get_datasource_info(
  691. datasource_id, datasource_type, form_data
  692. )
  693. except SupersetException:
  694. return redirect(error_redirect)
  695. datasource = ConnectorRegistry.get_datasource(
  696. datasource_type, datasource_id, db.session
  697. )
  698. if not datasource:
  699. flash(DATASOURCE_MISSING_ERR, "danger")
  700. return redirect(error_redirect)
  701. if config["ENABLE_ACCESS_REQUEST"] and (
  702. not security_manager.datasource_access(datasource)
  703. ):
  704. flash(
  705. __(security_manager.get_datasource_access_error_msg(datasource)),
  706. "danger",
  707. )
  708. return redirect(
  709. "superset/request_access/?"
  710. f"datasource_type={datasource_type}&"
  711. f"datasource_id={datasource_id}&"
  712. )
  713. viz_type = form_data.get("viz_type")
  714. if not viz_type and datasource.default_endpoint:
  715. return redirect(datasource.default_endpoint)
  716. # slc perms
  717. slice_add_perm = security_manager.can_access("can_add", "SliceModelView")
  718. slice_overwrite_perm = is_owner(slc, g.user)
  719. slice_download_perm = security_manager.can_access(
  720. "can_download", "SliceModelView"
  721. )
  722. form_data["datasource"] = str(datasource_id) + "__" + datasource_type
  723. # On explore, merge legacy and extra filters into the form data
  724. utils.convert_legacy_filters_into_adhoc(form_data)
  725. utils.merge_extra_filters(form_data)
  726. # merge request url params
  727. if request.method == "GET":
  728. utils.merge_request_params(form_data, request.args)
  729. # handle save or overwrite
  730. action = request.args.get("action")
  731. if action == "overwrite" and not slice_overwrite_perm:
  732. return json_error_response(
  733. _("You don't have the rights to ") + _("alter this ") + _("chart"),
  734. status=400,
  735. )
  736. if action == "saveas" and not slice_add_perm:
  737. return json_error_response(
  738. _("You don't have the rights to ") + _("create a ") + _("chart"),
  739. status=400,
  740. )
  741. if action in ("saveas", "overwrite"):
  742. return self.save_or_overwrite_slice(
  743. request.args,
  744. slc,
  745. slice_add_perm,
  746. slice_overwrite_perm,
  747. slice_download_perm,
  748. datasource_id,
  749. datasource_type,
  750. datasource.name,
  751. )
  752. standalone = (
  753. request.args.get(utils.ReservedUrlParameters.STANDALONE.value) == "true"
  754. )
  755. bootstrap_data = {
  756. "can_add": slice_add_perm,
  757. "can_download": slice_download_perm,
  758. "can_overwrite": slice_overwrite_perm,
  759. "datasource": datasource.data,
  760. "form_data": form_data,
  761. "datasource_id": datasource_id,
  762. "datasource_type": datasource_type,
  763. "slice": slc.data if slc else None,
  764. "standalone": standalone,
  765. "user_id": user_id,
  766. "forced_height": request.args.get("height"),
  767. "common": common_bootstrap_payload(),
  768. }
  769. table_name = (
  770. datasource.table_name
  771. if datasource_type == "table"
  772. else datasource.datasource_name
  773. )
  774. if slc:
  775. title = slc.slice_name
  776. else:
  777. title = _("Explore - %(table)s", table=table_name)
  778. return self.render_template(
  779. "superset/basic.html",
  780. bootstrap_data=json.dumps(
  781. bootstrap_data, default=utils.pessimistic_json_iso_dttm_ser
  782. ),
  783. entry="explore",
  784. title=title,
  785. standalone_mode=standalone,
  786. )
  787. @api
  788. @handle_api_exception
  789. @has_access_api
  790. @expose("/filter/<datasource_type>/<datasource_id>/<column>/")
  791. def filter(self, datasource_type, datasource_id, column):
  792. """
  793. Endpoint to retrieve values for specified column.
  794. :param datasource_type: Type of datasource e.g. table
  795. :param datasource_id: Datasource id
  796. :param column: Column name to retrieve values for
  797. :return:
  798. """
  799. # TODO: Cache endpoint by user, datasource and column
  800. datasource = ConnectorRegistry.get_datasource(
  801. datasource_type, datasource_id, db.session
  802. )
  803. if not datasource:
  804. return json_error_response(DATASOURCE_MISSING_ERR)
  805. security_manager.assert_datasource_permission(datasource)
  806. payload = json.dumps(
  807. datasource.values_for_column(column, config["FILTER_SELECT_ROW_LIMIT"]),
  808. default=utils.json_int_dttm_ser,
  809. )
  810. return json_success(payload)
  811. def save_or_overwrite_slice(
  812. self,
  813. args,
  814. slc,
  815. slice_add_perm,
  816. slice_overwrite_perm,
  817. slice_download_perm,
  818. datasource_id,
  819. datasource_type,
  820. datasource_name,
  821. ):
  822. """Save or overwrite a slice"""
  823. slice_name = args.get("slice_name")
  824. action = args.get("action")
  825. form_data = get_form_data()[0]
  826. if action in ("saveas"):
  827. if "slice_id" in form_data:
  828. form_data.pop("slice_id") # don't save old slice_id
  829. slc = Slice(owners=[g.user] if g.user else [])
  830. slc.params = json.dumps(form_data, indent=2, sort_keys=True)
  831. slc.datasource_name = datasource_name
  832. slc.viz_type = form_data["viz_type"]
  833. slc.datasource_type = datasource_type
  834. slc.datasource_id = datasource_id
  835. slc.slice_name = slice_name
  836. if action in ("saveas") and slice_add_perm:
  837. self.save_slice(slc)
  838. elif action == "overwrite" and slice_overwrite_perm:
  839. self.overwrite_slice(slc)
  840. # Adding slice to a dashboard if requested
  841. dash = None
  842. if request.args.get("add_to_dash") == "existing":
  843. dash = (
  844. db.session.query(Dashboard)
  845. .filter_by(id=int(request.args.get("save_to_dashboard_id")))
  846. .one()
  847. )
  848. # check edit dashboard permissions
  849. dash_overwrite_perm = check_ownership(dash, raise_if_false=False)
  850. if not dash_overwrite_perm:
  851. return json_error_response(
  852. _("You don't have the rights to ")
  853. + _("alter this ")
  854. + _("dashboard"),
  855. status=400,
  856. )
  857. flash(
  858. _("Chart [{}] was added to dashboard [{}]").format(
  859. slc.slice_name, dash.dashboard_title
  860. ),
  861. "info",
  862. )
  863. elif request.args.get("add_to_dash") == "new":
  864. # check create dashboard permissions
  865. dash_add_perm = security_manager.can_access("can_add", "DashboardModelView")
  866. if not dash_add_perm:
  867. return json_error_response(
  868. _("You don't have the rights to ")
  869. + _("create a ")
  870. + _("dashboard"),
  871. status=400,
  872. )
  873. dash = Dashboard(
  874. dashboard_title=request.args.get("new_dashboard_name"),
  875. owners=[g.user] if g.user else [],
  876. )
  877. flash(
  878. _(
  879. "Dashboard [{}] just got created and chart [{}] was added " "to it"
  880. ).format(dash.dashboard_title, slc.slice_name),
  881. "info",
  882. )
  883. if dash and slc not in dash.slices:
  884. dash.slices.append(slc)
  885. db.session.commit()
  886. response = {
  887. "can_add": slice_add_perm,
  888. "can_download": slice_download_perm,
  889. "can_overwrite": is_owner(slc, g.user),
  890. "form_data": slc.form_data,
  891. "slice": slc.data,
  892. "dashboard_id": dash.id if dash else None,
  893. }
  894. if request.args.get("goto_dash") == "true":
  895. response.update({"dashboard": dash.url})
  896. return json_success(json.dumps(response))
  897. def save_slice(self, slc):
  898. session = db.session()
  899. msg = _("Chart [{}] has been saved").format(slc.slice_name)
  900. session.add(slc)
  901. session.commit()
  902. flash(msg, "info")
  903. def overwrite_slice(self, slc):
  904. session = db.session()
  905. session.merge(slc)
  906. session.commit()
  907. msg = _("Chart [{}] has been overwritten").format(slc.slice_name)
  908. flash(msg, "info")
  909. @api
  910. @has_access_api
  911. @expose("/schemas/<db_id>/")
  912. @expose("/schemas/<db_id>/<force_refresh>/")
  913. def schemas(self, db_id, force_refresh="false"):
  914. db_id = int(db_id)
  915. force_refresh = force_refresh.lower() == "true"
  916. database = db.session.query(models.Database).get(db_id)
  917. if database:
  918. schemas = database.get_all_schema_names(
  919. cache=database.schema_cache_enabled,
  920. cache_timeout=database.schema_cache_timeout,
  921. force=force_refresh,
  922. )
  923. schemas = security_manager.schemas_accessible_by_user(database, schemas)
  924. else:
  925. schemas = []
  926. return Response(json.dumps({"schemas": schemas}), mimetype="application/json")
  927. @api
  928. @has_access_api
  929. @expose("/tables/<int:db_id>/<schema>/<substr>/")
  930. @expose("/tables/<int:db_id>/<schema>/<substr>/<force_refresh>/")
  931. def tables(
  932. self, db_id: int, schema: str, substr: str, force_refresh: str = "false"
  933. ):
  934. """Endpoint to fetch the list of tables for given database"""
  935. # Guarantees database filtering by security access
  936. query = db.session.query(models.Database)
  937. query = DatabaseFilter("id", SQLAInterface(models.Database, db.session)).apply(
  938. query, None
  939. )
  940. database = query.filter_by(id=db_id).one_or_none()
  941. if not database:
  942. return json_error_response("Not found", 404)
  943. force_refresh_parsed = force_refresh.lower() == "true"
  944. schema_parsed = utils.parse_js_uri_path_item(schema, eval_undefined=True)
  945. substr_parsed = utils.parse_js_uri_path_item(substr, eval_undefined=True)
  946. if schema_parsed:
  947. tables = (
  948. database.get_all_table_names_in_schema(
  949. schema=schema_parsed,
  950. force=force_refresh_parsed,
  951. cache=database.table_cache_enabled,
  952. cache_timeout=database.table_cache_timeout,
  953. )
  954. or []
  955. )
  956. views = (
  957. database.get_all_view_names_in_schema(
  958. schema=schema_parsed,
  959. force=force_refresh_parsed,
  960. cache=database.table_cache_enabled,
  961. cache_timeout=database.table_cache_timeout,
  962. )
  963. or []
  964. )
  965. else:
  966. tables = database.get_all_table_names_in_database(
  967. cache=True, force=False, cache_timeout=24 * 60 * 60
  968. )
  969. views = database.get_all_view_names_in_database(
  970. cache=True, force=False, cache_timeout=24 * 60 * 60
  971. )
  972. tables = security_manager.get_datasources_accessible_by_user(
  973. database, tables, schema_parsed
  974. )
  975. views = security_manager.get_datasources_accessible_by_user(
  976. database, views, schema_parsed
  977. )
  978. def get_datasource_label(ds_name: utils.DatasourceName) -> str:
  979. return (
  980. ds_name.table if schema_parsed else f"{ds_name.schema}.{ds_name.table}"
  981. )
  982. if substr_parsed:
  983. tables = [tn for tn in tables if substr_parsed in get_datasource_label(tn)]
  984. views = [vn for vn in views if substr_parsed in get_datasource_label(vn)]
  985. if not schema_parsed and database.default_schemas:
  986. user_schema = g.user.email.split("@")[0]
  987. valid_schemas = set(database.default_schemas + [user_schema])
  988. tables = [tn for tn in tables if tn.schema in valid_schemas]
  989. views = [vn for vn in views if vn.schema in valid_schemas]
  990. max_items = config["MAX_TABLE_NAMES"] or len(tables)
  991. total_items = len(tables) + len(views)
  992. max_tables = len(tables)
  993. max_views = len(views)
  994. if total_items and substr_parsed:
  995. max_tables = max_items * len(tables) // total_items
  996. max_views = max_items * len(views) // total_items
  997. table_options = [
  998. {
  999. "value": tn.table,
  1000. "schema": tn.schema,
  1001. "label": get_datasource_label(tn),
  1002. "title": get_datasource_label(tn),
  1003. "type": "table",
  1004. }
  1005. for tn in tables[:max_tables]
  1006. ]
  1007. table_options.extend(
  1008. [
  1009. {
  1010. "value": vn.table,
  1011. "schema": vn.schema,
  1012. "label": get_datasource_label(vn),
  1013. "title": get_datasource_label(vn),
  1014. "type": "view",
  1015. }
  1016. for vn in views[:max_views]
  1017. ]
  1018. )
  1019. table_options.sort(key=lambda value: value["label"])
  1020. payload = {"tableLength": len(tables) + len(views), "options": table_options}
  1021. return json_success(json.dumps(payload))
  1022. @api
  1023. @has_access_api
  1024. @expose("/copy_dash/<dashboard_id>/", methods=["GET", "POST"])
  1025. def copy_dash(self, dashboard_id):
  1026. """Copy dashboard"""
  1027. session = db.session()
  1028. data = json.loads(request.form.get("data"))
  1029. dash = models.Dashboard()
  1030. original_dash = session.query(Dashboard).get(dashboard_id)
  1031. dash.owners = [g.user] if g.user else []
  1032. dash.dashboard_title = data["dashboard_title"]
  1033. if data["duplicate_slices"]:
  1034. # Duplicating slices as well, mapping old ids to new ones
  1035. old_to_new_sliceids = {}
  1036. for slc in original_dash.slices:
  1037. new_slice = slc.clone()
  1038. new_slice.owners = [g.user] if g.user else []
  1039. session.add(new_slice)
  1040. session.flush()
  1041. new_slice.dashboards.append(dash)
  1042. old_to_new_sliceids["{}".format(slc.id)] = "{}".format(new_slice.id)
  1043. # update chartId of layout entities
  1044. # in v2_dash positions json data, chartId should be integer,
  1045. # while in older version slice_id is string type
  1046. for value in data["positions"].values():
  1047. if (
  1048. isinstance(value, dict)
  1049. and value.get("meta")
  1050. and value.get("meta").get("chartId")
  1051. ):
  1052. old_id = "{}".format(value.get("meta").get("chartId"))
  1053. new_id = int(old_to_new_sliceids[old_id])
  1054. value["meta"]["chartId"] = new_id
  1055. else:
  1056. dash.slices = original_dash.slices
  1057. dash.params = original_dash.params
  1058. self._set_dash_metadata(dash, data)
  1059. session.add(dash)
  1060. session.commit()
  1061. dash_json = json.dumps(dash.data)
  1062. session.close()
  1063. return json_success(dash_json)
  1064. @api
  1065. @has_access_api
  1066. @expose("/save_dash/<dashboard_id>/", methods=["GET", "POST"])
  1067. def save_dash(self, dashboard_id):
  1068. """Save a dashboard's metadata"""
  1069. session = db.session()
  1070. dash = session.query(Dashboard).get(dashboard_id)
  1071. check_ownership(dash, raise_if_false=True)
  1072. data = json.loads(request.form.get("data"))
  1073. self._set_dash_metadata(dash, data)
  1074. session.merge(dash)
  1075. session.commit()
  1076. session.close()
  1077. return json_success(json.dumps({"status": "SUCCESS"}))
  1078. @staticmethod
  1079. def _set_dash_metadata(dashboard, data):
  1080. positions = data["positions"]
  1081. # find slices in the position data
  1082. slice_ids = []
  1083. slice_id_to_name = {}
  1084. for value in positions.values():
  1085. if isinstance(value, dict):
  1086. try:
  1087. slice_id = value["meta"]["chartId"]
  1088. slice_ids.append(slice_id)
  1089. slice_id_to_name[slice_id] = value["meta"]["sliceName"]
  1090. except KeyError:
  1091. pass
  1092. session = db.session()
  1093. current_slices = session.query(Slice).filter(Slice.id.in_(slice_ids)).all()
  1094. dashboard.slices = current_slices
  1095. # update slice names. this assumes user has permissions to update the slice
  1096. # we allow user set slice name be empty string
  1097. for slc in dashboard.slices:
  1098. try:
  1099. new_name = slice_id_to_name[slc.id]
  1100. if slc.slice_name != new_name:
  1101. slc.slice_name = new_name
  1102. session.merge(slc)
  1103. session.flush()
  1104. except KeyError:
  1105. pass
  1106. # remove leading and trailing white spaces in the dumped json
  1107. dashboard.position_json = json.dumps(
  1108. positions, indent=None, separators=(",", ":"), sort_keys=True
  1109. )
  1110. md = dashboard.params_dict
  1111. dashboard.css = data.get("css")
  1112. dashboard.dashboard_title = data["dashboard_title"]
  1113. if "timed_refresh_immune_slices" not in md:
  1114. md["timed_refresh_immune_slices"] = []
  1115. if "filter_scopes" in data:
  1116. md["filter_scopes"] = json.loads(data["filter_scopes"] or "{}")
  1117. md["expanded_slices"] = data["expanded_slices"]
  1118. md["refresh_frequency"] = data.get("refresh_frequency", 0)
  1119. default_filters_data = json.loads(data.get("default_filters", "{}"))
  1120. applicable_filters = {
  1121. key: v for key, v in default_filters_data.items() if int(key) in slice_ids
  1122. }
  1123. md["default_filters"] = json.dumps(applicable_filters)
  1124. if data.get("color_namespace"):
  1125. md["color_namespace"] = data.get("color_namespace")
  1126. if data.get("color_scheme"):
  1127. md["color_scheme"] = data.get("color_scheme")
  1128. if data.get("label_colors"):
  1129. md["label_colors"] = data.get("label_colors")
  1130. dashboard.json_metadata = json.dumps(md)
  1131. @api
  1132. @has_access_api
  1133. @expose("/add_slices/<dashboard_id>/", methods=["POST"])
  1134. def add_slices(self, dashboard_id):
  1135. """Add and save slices to a dashboard"""
  1136. data = json.loads(request.form.get("data"))
  1137. session = db.session()
  1138. dash = session.query(Dashboard).get(dashboard_id)
  1139. check_ownership(dash, raise_if_false=True)
  1140. new_slices = session.query(Slice).filter(Slice.id.in_(data["slice_ids"]))
  1141. dash.slices += new_slices
  1142. session.merge(dash)
  1143. session.commit()
  1144. session.close()
  1145. return "SLICES ADDED"
  1146. @api
  1147. @has_access_api
  1148. @expose("/testconn", methods=["POST", "GET"])
  1149. def testconn(self):
  1150. """Tests a sqla connection"""
  1151. try:
  1152. db_name = request.json.get("name")
  1153. uri = request.json.get("uri")
  1154. # if the database already exists in the database, only its safe (password-masked) URI
  1155. # would be shown in the UI and would be passed in the form data.
  1156. # so if the database already exists and the form was submitted with the safe URI,
  1157. # we assume we should retrieve the decrypted URI to test the connection.
  1158. if db_name:
  1159. existing_database = (
  1160. db.session.query(models.Database)
  1161. .filter_by(database_name=db_name)
  1162. .one_or_none()
  1163. )
  1164. if existing_database and uri == existing_database.safe_sqlalchemy_uri():
  1165. uri = existing_database.sqlalchemy_uri_decrypted
  1166. # this is the database instance that will be tested
  1167. database = models.Database(
  1168. # extras is sent as json, but required to be a string in the Database model
  1169. extra=json.dumps(request.json.get("extras", {})),
  1170. impersonate_user=request.json.get("impersonate_user"),
  1171. encrypted_extra=json.dumps(request.json.get("encrypted_extra", {})),
  1172. )
  1173. database.set_sqlalchemy_uri(uri)
  1174. username = g.user.username if g.user is not None else None
  1175. engine = database.get_sqla_engine(user_name=username)
  1176. with closing(engine.connect()) as conn:
  1177. conn.scalar(select([1]))
  1178. return json_success('"OK"')
  1179. except Exception as e:
  1180. logger.exception(e)
  1181. return json_error_response(
  1182. "Connection failed!\n\n" f"The error message returned was:\n{e}", 400
  1183. )
  1184. @api
  1185. @has_access_api
  1186. @expose("/recent_activity/<user_id>/", methods=["GET"])
  1187. def recent_activity(self, user_id):
  1188. """Recent activity (actions) for a given user"""
  1189. M = models
  1190. if request.args.get("limit"):
  1191. limit = int(request.args.get("limit"))
  1192. else:
  1193. limit = 1000
  1194. qry = (
  1195. db.session.query(M.Log, M.Dashboard, Slice)
  1196. .outerjoin(M.Dashboard, M.Dashboard.id == M.Log.dashboard_id)
  1197. .outerjoin(Slice, Slice.id == M.Log.slice_id)
  1198. .filter(
  1199. and_(
  1200. ~M.Log.action.in_(("queries", "shortner", "sql_json")),
  1201. M.Log.user_id == user_id,
  1202. )
  1203. )
  1204. .order_by(M.Log.dttm.desc())
  1205. .limit(limit)
  1206. )
  1207. payload = []
  1208. for log in qry.all():
  1209. item_url = None
  1210. item_title = None
  1211. if log.Dashboard:
  1212. item_url = log.Dashboard.url
  1213. item_title = log.Dashboard.dashboard_title
  1214. elif log.Slice:
  1215. item_url = log.Slice.slice_url
  1216. item_title = log.Slice.slice_name
  1217. payload.append(
  1218. {
  1219. "action": log.Log.action,
  1220. "item_url": item_url,
  1221. "item_title": item_title,
  1222. "time": log.Log.dttm,
  1223. }
  1224. )
  1225. return json_success(json.dumps(payload, default=utils.json_int_dttm_ser))
  1226. @api
  1227. @has_access_api
  1228. @expose("/csrf_token/", methods=["GET"])
  1229. def csrf_token(self):
  1230. return Response(
  1231. self.render_template("superset/csrf_token.json"), mimetype="text/json"
  1232. )
  1233. @api
  1234. @has_access_api
  1235. @expose("/available_domains/", methods=["GET"])
  1236. def available_domains(self):
  1237. """
  1238. Returns the list of available Superset Webserver domains (if any)
  1239. defined in config. This enables charts embedded in other apps to
  1240. leverage domain sharding if appropriately configured.
  1241. """
  1242. return Response(
  1243. json.dumps(conf.get("SUPERSET_WEBSERVER_DOMAINS")), mimetype="text/json"
  1244. )
  1245. @api
  1246. @has_access_api
  1247. @expose("/fave_dashboards_by_username/<username>/", methods=["GET"])
  1248. def fave_dashboards_by_username(self, username):
  1249. """This lets us use a user's username to pull favourite dashboards"""
  1250. user = security_manager.find_user(username=username)
  1251. return self.fave_dashboards(user.get_id())
  1252. @api
  1253. @has_access_api
  1254. @expose("/fave_dashboards/<user_id>/", methods=["GET"])
  1255. def fave_dashboards(self, user_id):
  1256. qry = (
  1257. db.session.query(Dashboard, models.FavStar.dttm)
  1258. .join(
  1259. models.FavStar,
  1260. and_(
  1261. models.FavStar.user_id == int(user_id),
  1262. models.FavStar.class_name == "Dashboard",
  1263. Dashboard.id == models.FavStar.obj_id,
  1264. ),
  1265. )
  1266. .order_by(models.FavStar.dttm.desc())
  1267. )
  1268. payload = []
  1269. for o in qry.all():
  1270. d = {
  1271. "id": o.Dashboard.id,
  1272. "dashboard": o.Dashboard.dashboard_link(),
  1273. "title": o.Dashboard.dashboard_title,
  1274. "url": o.Dashboard.url,
  1275. "dttm": o.dttm,
  1276. }
  1277. if o.Dashboard.created_by:
  1278. user = o.Dashboard.created_by
  1279. d["creator"] = str(user)
  1280. d["creator_url"] = "/superset/profile/{}/".format(user.username)
  1281. payload.append(d)
  1282. return json_success(json.dumps(payload, default=utils.json_int_dttm_ser))
  1283. @api
  1284. @has_access_api
  1285. @expose("/created_dashboards/<user_id>/", methods=["GET"])
  1286. def created_dashboards(self, user_id):
  1287. Dash = Dashboard
  1288. qry = (
  1289. db.session.query(Dash)
  1290. .filter(or_(Dash.created_by_fk == user_id, Dash.changed_by_fk == user_id))
  1291. .order_by(Dash.changed_on.desc())
  1292. )
  1293. payload = [
  1294. {
  1295. "id": o.id,
  1296. "dashboard": o.dashboard_link(),
  1297. "title": o.dashboard_title,
  1298. "url": o.url,
  1299. "dttm": o.changed_on,
  1300. }
  1301. for o in qry.all()
  1302. ]
  1303. return json_success(json.dumps(payload, default=utils.json_int_dttm_ser))
  1304. @api
  1305. @has_access_api
  1306. @expose("/user_slices", methods=["GET"])
  1307. @expose("/user_slices/<user_id>/", methods=["GET"])
  1308. def user_slices(self, user_id=None):
  1309. """List of slices a user created, or faved"""
  1310. if not user_id:
  1311. user_id = g.user.id
  1312. FavStar = models.FavStar
  1313. qry = (
  1314. db.session.query(Slice, FavStar.dttm)
  1315. .join(
  1316. models.FavStar,
  1317. and_(
  1318. models.FavStar.user_id == int(user_id),
  1319. models.FavStar.class_name == "slice",
  1320. Slice.id == models.FavStar.obj_id,
  1321. ),
  1322. isouter=True,
  1323. )
  1324. .filter(
  1325. or_(
  1326. Slice.created_by_fk == user_id,
  1327. Slice.changed_by_fk == user_id,
  1328. FavStar.user_id == user_id,
  1329. )
  1330. )
  1331. .order_by(Slice.slice_name.asc())
  1332. )
  1333. payload = [
  1334. {
  1335. "id": o.Slice.id,
  1336. "title": o.Slice.slice_name,
  1337. "url": o.Slice.slice_url,
  1338. "data": o.Slice.form_data,
  1339. "dttm": o.dttm if o.dttm else o.Slice.changed_on,
  1340. "viz_type": o.Slice.viz_type,
  1341. }
  1342. for o in qry.all()
  1343. ]
  1344. return json_success(json.dumps(payload, default=utils.json_int_dttm_ser))
  1345. @api
  1346. @has_access_api
  1347. @expose("/created_slices", methods=["GET"])
  1348. @expose("/created_slices/<user_id>/", methods=["GET"])
  1349. def created_slices(self, user_id=None):
  1350. """List of slices created by this user"""
  1351. if not user_id:
  1352. user_id = g.user.id
  1353. qry = (
  1354. db.session.query(Slice)
  1355. .filter(or_(Slice.created_by_fk == user_id, Slice.changed_by_fk == user_id))
  1356. .order_by(Slice.changed_on.desc())
  1357. )
  1358. payload = [
  1359. {
  1360. "id": o.id,
  1361. "title": o.slice_name,
  1362. "url": o.slice_url,
  1363. "dttm": o.changed_on,
  1364. "viz_type": o.viz_type,
  1365. }
  1366. for o in qry.all()
  1367. ]
  1368. return json_success(json.dumps(payload, default=utils.json_int_dttm_ser))
  1369. @api
  1370. @has_access_api
  1371. @expose("/fave_slices", methods=["GET"])
  1372. @expose("/fave_slices/<user_id>/", methods=["GET"])
  1373. def fave_slices(self, user_id=None):
  1374. """Favorite slices for a user"""
  1375. if not user_id:
  1376. user_id = g.user.id
  1377. qry = (
  1378. db.session.query(Slice, models.FavStar.dttm)
  1379. .join(
  1380. models.FavStar,
  1381. and_(
  1382. models.FavStar.user_id == int(user_id),
  1383. models.FavStar.class_name == "slice",
  1384. Slice.id == models.FavStar.obj_id,
  1385. ),
  1386. )
  1387. .order_by(models.FavStar.dttm.desc())
  1388. )
  1389. payload = []
  1390. for o in qry.all():
  1391. d = {
  1392. "id": o.Slice.id,
  1393. "title": o.Slice.slice_name,
  1394. "url": o.Slice.slice_url,
  1395. "dttm": o.dttm,
  1396. "viz_type": o.Slice.viz_type,
  1397. }
  1398. if o.Slice.created_by:
  1399. user = o.Slice.created_by
  1400. d["creator"] = str(user)
  1401. d["creator_url"] = "/superset/profile/{}/".format(user.username)
  1402. payload.append(d)
  1403. return json_success(json.dumps(payload, default=utils.json_int_dttm_ser))
  1404. @api
  1405. @has_access_api
  1406. @expose("/warm_up_cache/", methods=["GET"])
  1407. def warm_up_cache(self):
  1408. """Warms up the cache for the slice or table.
  1409. Note for slices a force refresh occurs.
  1410. """
  1411. slices = None
  1412. session = db.session()
  1413. slice_id = request.args.get("slice_id")
  1414. table_name = request.args.get("table_name")
  1415. db_name = request.args.get("db_name")
  1416. if not slice_id and not (table_name and db_name):
  1417. return json_error_response(
  1418. __(
  1419. "Malformed request. slice_id or table_name and db_name "
  1420. "arguments are expected"
  1421. ),
  1422. status=400,
  1423. )
  1424. if slice_id:
  1425. slices = session.query(Slice).filter_by(id=slice_id).all()
  1426. if not slices:
  1427. return json_error_response(
  1428. __("Chart %(id)s not found", id=slice_id), status=404
  1429. )
  1430. elif table_name and db_name:
  1431. SqlaTable = ConnectorRegistry.sources["table"]
  1432. table = (
  1433. session.query(SqlaTable)
  1434. .join(models.Database)
  1435. .filter(
  1436. models.Database.database_name == db_name
  1437. or SqlaTable.table_name == table_name
  1438. )
  1439. ).one_or_none()
  1440. if not table:
  1441. return json_error_response(
  1442. __(
  1443. "Table %(t)s wasn't found in the database %(d)s",
  1444. t=table_name,
  1445. s=db_name,
  1446. ),
  1447. status=404,
  1448. )
  1449. slices = (
  1450. session.query(Slice)
  1451. .filter_by(datasource_id=table.id, datasource_type=table.type)
  1452. .all()
  1453. )
  1454. for slc in slices:
  1455. try:
  1456. form_data = get_form_data(slc.id, use_slice_data=True)[0]
  1457. obj = get_viz(
  1458. datasource_type=slc.datasource.type,
  1459. datasource_id=slc.datasource.id,
  1460. form_data=form_data,
  1461. force=True,
  1462. )
  1463. obj.get_json()
  1464. except Exception as e:
  1465. logger.exception("Failed to warm up cache")
  1466. return json_error_response(utils.error_msg_from_exception(e))
  1467. return json_success(
  1468. json.dumps(
  1469. [{"slice_id": slc.id, "slice_name": slc.slice_name} for slc in slices]
  1470. )
  1471. )
  1472. @has_access_api
  1473. @expose("/favstar/<class_name>/<obj_id>/<action>/")
  1474. def favstar(self, class_name, obj_id, action):
  1475. """Toggle favorite stars on Slices and Dashboard"""
  1476. session = db.session()
  1477. FavStar = models.FavStar
  1478. count = 0
  1479. favs = (
  1480. session.query(FavStar)
  1481. .filter_by(class_name=class_name, obj_id=obj_id, user_id=g.user.get_id())
  1482. .all()
  1483. )
  1484. if action == "select":
  1485. if not favs:
  1486. session.add(
  1487. FavStar(
  1488. class_name=class_name,
  1489. obj_id=obj_id,
  1490. user_id=g.user.get_id(),
  1491. dttm=datetime.now(),
  1492. )
  1493. )
  1494. count = 1
  1495. elif action == "unselect":
  1496. for fav in favs:
  1497. session.delete(fav)
  1498. else:
  1499. count = len(favs)
  1500. session.commit()
  1501. return json_success(json.dumps({"count": count}))
  1502. @api
  1503. @has_access_api
  1504. @expose("/dashboard/<dashboard_id>/published/", methods=("GET", "POST"))
  1505. def publish(self, dashboard_id):
  1506. """Gets and toggles published status on dashboards"""
  1507. logger.warning(
  1508. "This API endpoint is deprecated and will be removed in version 1.0.0"
  1509. )
  1510. session = db.session()
  1511. Role = ab_models.Role
  1512. dash = (
  1513. session.query(Dashboard).filter(Dashboard.id == dashboard_id).one_or_none()
  1514. )
  1515. admin_role = session.query(Role).filter(Role.name == "Admin").one_or_none()
  1516. if request.method == "GET":
  1517. if dash:
  1518. return json_success(json.dumps({"published": dash.published}))
  1519. else:
  1520. return json_error_response(
  1521. f"ERROR: cannot find dashboard {dashboard_id}", status=404
  1522. )
  1523. else:
  1524. edit_perm = is_owner(dash, g.user) or admin_role in get_user_roles()
  1525. if not edit_perm:
  1526. return json_error_response(
  1527. f'ERROR: "{g.user.username}" cannot alter dashboard "{dash.dashboard_title}"',
  1528. status=403,
  1529. )
  1530. dash.published = str(request.form["published"]).lower() == "true"
  1531. session.commit()
  1532. return json_success(json.dumps({"published": dash.published}))
  1533. @has_access
  1534. @expose("/dashboard/<dashboard_id>/")
  1535. def dashboard(self, dashboard_id):
  1536. """Server side rendering for a dashboard"""
  1537. session = db.session()
  1538. qry = session.query(Dashboard)
  1539. if dashboard_id.isdigit():
  1540. qry = qry.filter_by(id=int(dashboard_id))
  1541. else:
  1542. qry = qry.filter_by(slug=dashboard_id)
  1543. dash = qry.one_or_none()
  1544. if not dash:
  1545. abort(404)
  1546. datasources = set()
  1547. for slc in dash.slices:
  1548. datasource = slc.datasource
  1549. if datasource:
  1550. datasources.add(datasource)
  1551. if config["ENABLE_ACCESS_REQUEST"]:
  1552. for datasource in datasources:
  1553. if datasource and not security_manager.datasource_access(datasource):
  1554. flash(
  1555. __(
  1556. security_manager.get_datasource_access_error_msg(datasource)
  1557. ),
  1558. "danger",
  1559. )
  1560. return redirect(
  1561. "superset/request_access/?" f"dashboard_id={dash.id}&"
  1562. )
  1563. dash_edit_perm = check_ownership(
  1564. dash, raise_if_false=False
  1565. ) and security_manager.can_access("can_save_dash", "Superset")
  1566. dash_save_perm = security_manager.can_access("can_save_dash", "Superset")
  1567. superset_can_explore = security_manager.can_access("can_explore", "Superset")
  1568. superset_can_csv = security_manager.can_access("can_csv", "Superset")
  1569. slice_can_edit = security_manager.can_access("can_edit", "SliceModelView")
  1570. standalone_mode = (
  1571. request.args.get(utils.ReservedUrlParameters.STANDALONE.value) == "true"
  1572. )
  1573. edit_mode = (
  1574. request.args.get(utils.ReservedUrlParameters.EDIT_MODE.value) == "true"
  1575. )
  1576. # Hack to log the dashboard_id properly, even when getting a slug
  1577. @event_logger.log_this
  1578. def dashboard(**kwargs):
  1579. pass
  1580. dashboard(
  1581. dashboard_id=dash.id,
  1582. dashboard_version="v2",
  1583. dash_edit_perm=dash_edit_perm,
  1584. edit_mode=edit_mode,
  1585. )
  1586. dashboard_data = dash.data
  1587. dashboard_data.update(
  1588. {
  1589. "standalone_mode": standalone_mode,
  1590. "dash_save_perm": dash_save_perm,
  1591. "dash_edit_perm": dash_edit_perm,
  1592. "superset_can_explore": superset_can_explore,
  1593. "superset_can_csv": superset_can_csv,
  1594. "slice_can_edit": slice_can_edit,
  1595. }
  1596. )
  1597. url_params = {
  1598. key: value
  1599. for key, value in request.args.items()
  1600. if key not in [param.value for param in utils.ReservedUrlParameters]
  1601. }
  1602. bootstrap_data = {
  1603. "user_id": g.user.get_id(),
  1604. "dashboard_data": dashboard_data,
  1605. "datasources": {ds.uid: ds.data for ds in datasources},
  1606. "common": common_bootstrap_payload(),
  1607. "editMode": edit_mode,
  1608. "urlParams": url_params,
  1609. }
  1610. if request.args.get("json") == "true":
  1611. return json_success(
  1612. json.dumps(bootstrap_data, default=utils.pessimistic_json_iso_dttm_ser)
  1613. )
  1614. return self.render_template(
  1615. "superset/dashboard.html",
  1616. entry="dashboard",
  1617. standalone_mode=standalone_mode,
  1618. title=dash.dashboard_title,
  1619. bootstrap_data=json.dumps(
  1620. bootstrap_data, default=utils.pessimistic_json_iso_dttm_ser
  1621. ),
  1622. )
  1623. @api
  1624. @event_logger.log_this
  1625. @expose("/log/", methods=["POST"])
  1626. def log(self):
  1627. return Response(status=200)
  1628. @has_access
  1629. @expose("/sync_druid/", methods=["POST"])
  1630. @event_logger.log_this
  1631. def sync_druid_source(self):
  1632. """Syncs the druid datasource in main db with the provided config.
  1633. The endpoint takes 3 arguments:
  1634. user - user name to perform the operation as
  1635. cluster - name of the druid cluster
  1636. config - configuration stored in json that contains:
  1637. name: druid datasource name
  1638. dimensions: list of the dimensions, they become druid columns
  1639. with the type STRING
  1640. metrics_spec: list of metrics (dictionary). Metric consists of
  1641. 2 attributes: type and name. Type can be count,
  1642. etc. `count` type is stored internally as longSum
  1643. other fields will be ignored.
  1644. Example: {
  1645. 'name': 'test_click',
  1646. 'metrics_spec': [{'type': 'count', 'name': 'count'}],
  1647. 'dimensions': ['affiliate_id', 'campaign', 'first_seen']
  1648. }
  1649. """
  1650. payload = request.get_json(force=True)
  1651. druid_config = payload["config"]
  1652. user_name = payload["user"]
  1653. cluster_name = payload["cluster"]
  1654. user = security_manager.find_user(username=user_name)
  1655. DruidDatasource = ConnectorRegistry.sources["druid"]
  1656. DruidCluster = DruidDatasource.cluster_class
  1657. if not user:
  1658. err_msg = __(
  1659. "Can't find User '%(name)s', please ask your admin " "to create one.",
  1660. name=user_name,
  1661. )
  1662. logger.error(err_msg)
  1663. return json_error_response(err_msg)
  1664. cluster = (
  1665. db.session.query(DruidCluster)
  1666. .filter_by(cluster_name=cluster_name)
  1667. .one_or_none()
  1668. )
  1669. if not cluster:
  1670. err_msg = __(
  1671. "Can't find DruidCluster with cluster_name = " "'%(name)s'",
  1672. name=cluster_name,
  1673. )
  1674. logger.error(err_msg)
  1675. return json_error_response(err_msg)
  1676. try:
  1677. DruidDatasource.sync_to_db_from_config(druid_config, user, cluster)
  1678. except Exception as e:
  1679. logger.exception(utils.error_msg_from_exception(e))
  1680. return json_error_response(utils.error_msg_from_exception(e))
  1681. return Response(status=201)
  1682. @has_access
  1683. @expose("/sqllab_viz/", methods=["POST"])
  1684. @event_logger.log_this
  1685. def sqllab_viz(self):
  1686. SqlaTable = ConnectorRegistry.sources["table"]
  1687. data = json.loads(request.form.get("data"))
  1688. table_name = data.get("datasourceName")
  1689. database_id = data.get("dbId")
  1690. table = (
  1691. db.session.query(SqlaTable)
  1692. .filter_by(database_id=database_id, table_name=table_name)
  1693. .one_or_none()
  1694. )
  1695. if not table:
  1696. table = SqlaTable(table_name=table_name, owners=[g.user])
  1697. table.database_id = database_id
  1698. table.schema = data.get("schema")
  1699. table.template_params = data.get("templateParams")
  1700. table.is_sqllab_view = True
  1701. q = ParsedQuery(data.get("sql"))
  1702. table.sql = q.stripped()
  1703. db.session.add(table)
  1704. cols = []
  1705. for config in data.get("columns"):
  1706. column_name = config.get("name")
  1707. SqlaTable = ConnectorRegistry.sources["table"]
  1708. TableColumn = SqlaTable.column_class
  1709. SqlMetric = SqlaTable.metric_class
  1710. col = TableColumn(
  1711. column_name=column_name,
  1712. filterable=True,
  1713. groupby=True,
  1714. is_dttm=config.get("is_date", False),
  1715. type=config.get("type", False),
  1716. )
  1717. cols.append(col)
  1718. table.columns = cols
  1719. table.metrics = [SqlMetric(metric_name="count", expression="count(*)")]
  1720. db.session.commit()
  1721. return json_success(json.dumps({"table_id": table.id}))
  1722. @has_access
  1723. @expose("/extra_table_metadata/<database_id>/<table_name>/<schema>/")
  1724. @event_logger.log_this
  1725. def extra_table_metadata(self, database_id, table_name, schema):
  1726. schema = utils.parse_js_uri_path_item(schema, eval_undefined=True)
  1727. table_name = utils.parse_js_uri_path_item(table_name)
  1728. mydb = db.session.query(models.Database).filter_by(id=database_id).one()
  1729. payload = mydb.db_engine_spec.extra_table_metadata(mydb, table_name, schema)
  1730. return json_success(json.dumps(payload))
  1731. @has_access
  1732. @expose("/select_star/<database_id>/<table_name>")
  1733. @expose("/select_star/<database_id>/<table_name>/<schema>")
  1734. @event_logger.log_this
  1735. def select_star(self, database_id, table_name, schema=None):
  1736. logging.warning(
  1737. f"{self.__class__.__name__}.select_star "
  1738. "This API endpoint is deprecated and will be removed in version 1.0.0"
  1739. )
  1740. stats_logger.incr(f"{self.__class__.__name__}.select_star.init")
  1741. database = db.session.query(models.Database).get(database_id)
  1742. if not database:
  1743. stats_logger.incr(
  1744. f"deprecated.{self.__class__.__name__}.select_star.database_not_found"
  1745. )
  1746. return json_error_response("Not found", 404)
  1747. schema = utils.parse_js_uri_path_item(schema, eval_undefined=True)
  1748. table_name = utils.parse_js_uri_path_item(table_name)
  1749. # Check that the user can access the datasource
  1750. if not self.appbuilder.sm.can_access_datasource(database, table_name, schema):
  1751. stats_logger.incr(
  1752. f"deprecated.{self.__class__.__name__}.select_star.permission_denied"
  1753. )
  1754. logging.warning(
  1755. f"Permission denied for user {g.user} on table: {table_name} "
  1756. f"schema: {schema}"
  1757. )
  1758. return json_error_response("Not found", 404)
  1759. stats_logger.incr(f"deprecated.{self.__class__.__name__}.select_star.success")
  1760. return json_success(
  1761. database.select_star(
  1762. table_name, schema, latest_partition=True, show_cols=True
  1763. )
  1764. )
  1765. @has_access_api
  1766. @expose("/estimate_query_cost/<database_id>/", methods=["POST"])
  1767. @expose("/estimate_query_cost/<database_id>/<schema>/", methods=["POST"])
  1768. @event_logger.log_this
  1769. def estimate_query_cost(
  1770. self, database_id: int, schema: Optional[str] = None
  1771. ) -> Response:
  1772. mydb = db.session.query(models.Database).get(database_id)
  1773. sql = json.loads(request.form.get("sql", '""'))
  1774. template_params = json.loads(request.form.get("templateParams") or "{}")
  1775. if template_params:
  1776. template_processor = get_template_processor(mydb)
  1777. sql = template_processor.process_template(sql, **template_params)
  1778. timeout = SQLLAB_QUERY_COST_ESTIMATE_TIMEOUT
  1779. timeout_msg = f"The estimation exceeded the {timeout} seconds timeout."
  1780. try:
  1781. with utils.timeout(seconds=timeout, error_message=timeout_msg):
  1782. cost = mydb.db_engine_spec.estimate_query_cost(
  1783. mydb, schema, sql, utils.QuerySource.SQL_LAB
  1784. )
  1785. except SupersetTimeoutException as e:
  1786. logger.exception(e)
  1787. return json_error_response(timeout_msg)
  1788. except Exception as e:
  1789. return json_error_response(str(e))
  1790. spec = mydb.db_engine_spec
  1791. query_cost_formatters = get_feature_flags().get(
  1792. "QUERY_COST_FORMATTERS_BY_ENGINE", {}
  1793. )
  1794. query_cost_formatter = query_cost_formatters.get(
  1795. spec.engine, spec.query_cost_formatter
  1796. )
  1797. cost = query_cost_formatter(cost)
  1798. return json_success(json.dumps(cost))
  1799. @expose("/theme/")
  1800. def theme(self):
  1801. return self.render_template("superset/theme.html")
  1802. @has_access_api
  1803. @expose("/results/<key>/")
  1804. @event_logger.log_this
  1805. def results(self, key):
  1806. return self.results_exec(key)
  1807. def results_exec(self, key: str):
  1808. """Serves a key off of the results backend
  1809. It is possible to pass the `rows` query argument to limit the number
  1810. of rows returned.
  1811. """
  1812. if not results_backend:
  1813. return json_error_response("Results backend isn't configured")
  1814. read_from_results_backend_start = now_as_float()
  1815. blob = results_backend.get(key)
  1816. stats_logger.timing(
  1817. "sqllab.query.results_backend_read",
  1818. now_as_float() - read_from_results_backend_start,
  1819. )
  1820. if not blob:
  1821. return json_error_response(
  1822. "Data could not be retrieved. " "You may want to re-run the query.",
  1823. status=410,
  1824. )
  1825. query = db.session.query(Query).filter_by(results_key=key).one_or_none()
  1826. if query is None:
  1827. return json_error_response(
  1828. "Data could not be retrieved. You may want to re-run the query.",
  1829. status=404,
  1830. )
  1831. rejected_tables = security_manager.rejected_tables(
  1832. query.sql, query.database, query.schema
  1833. )
  1834. if rejected_tables:
  1835. return json_error_response(
  1836. security_manager.get_table_access_error_msg(rejected_tables), status=403
  1837. )
  1838. payload = utils.zlib_decompress(blob, decode=not results_backend_use_msgpack)
  1839. obj: dict = _deserialize_results_payload(
  1840. payload, query, cast(bool, results_backend_use_msgpack)
  1841. )
  1842. if "rows" in request.args:
  1843. try:
  1844. rows = int(request.args["rows"])
  1845. except ValueError:
  1846. return json_error_response("Invalid `rows` argument", status=400)
  1847. obj = apply_display_max_row_limit(obj, rows)
  1848. return json_success(
  1849. json.dumps(obj, default=utils.json_iso_dttm_ser, ignore_nan=True)
  1850. )
  1851. @has_access_api
  1852. @expose("/stop_query/", methods=["POST"])
  1853. @event_logger.log_this
  1854. @backoff.on_exception(
  1855. backoff.constant,
  1856. Exception,
  1857. interval=1,
  1858. on_backoff=lambda details: db.session.rollback(),
  1859. on_giveup=lambda details: db.session.rollback(),
  1860. max_tries=5,
  1861. )
  1862. def stop_query(self):
  1863. client_id = request.form.get("client_id")
  1864. query = db.session.query(Query).filter_by(client_id=client_id).one()
  1865. if query.status in [
  1866. QueryStatus.FAILED,
  1867. QueryStatus.SUCCESS,
  1868. QueryStatus.TIMED_OUT,
  1869. ]:
  1870. logger.error(
  1871. f"Query with client_id {client_id} could not be stopped: query already complete"
  1872. )
  1873. return self.json_response("OK")
  1874. query.status = QueryStatus.STOPPED
  1875. db.session.commit()
  1876. return self.json_response("OK")
  1877. @has_access_api
  1878. @expose("/validate_sql_json/", methods=["POST", "GET"])
  1879. @event_logger.log_this
  1880. def validate_sql_json(self):
  1881. """Validates that arbitrary sql is acceptable for the given database.
  1882. Returns a list of error/warning annotations as json.
  1883. """
  1884. sql = request.form.get("sql")
  1885. database_id = request.form.get("database_id")
  1886. schema = request.form.get("schema") or None
  1887. template_params = json.loads(request.form.get("templateParams") or "{}")
  1888. if len(template_params) > 0:
  1889. # TODO: factor the Database object out of template rendering
  1890. # or provide it as mydb so we can render template params
  1891. # without having to also persist a Query ORM object.
  1892. return json_error_response(
  1893. "SQL validation does not support template parameters", status=400
  1894. )
  1895. session = db.session()
  1896. mydb = session.query(models.Database).filter_by(id=database_id).one_or_none()
  1897. if not mydb:
  1898. return json_error_response(
  1899. "Database with id {} is missing.".format(database_id), status=400
  1900. )
  1901. spec = mydb.db_engine_spec
  1902. validators_by_engine = get_feature_flags().get("SQL_VALIDATORS_BY_ENGINE")
  1903. if not validators_by_engine or spec.engine not in validators_by_engine:
  1904. return json_error_response(
  1905. "no SQL validator is configured for {}".format(spec.engine), status=400
  1906. )
  1907. validator_name = validators_by_engine[spec.engine]
  1908. validator = get_validator_by_name(validator_name)
  1909. if not validator:
  1910. return json_error_response(
  1911. "No validator named {} found (configured for the {} engine)".format(
  1912. validator_name, spec.engine
  1913. )
  1914. )
  1915. try:
  1916. timeout = config["SQLLAB_VALIDATION_TIMEOUT"]
  1917. timeout_msg = f"The query exceeded the {timeout} seconds timeout."
  1918. with utils.timeout(seconds=timeout, error_message=timeout_msg):
  1919. errors = validator.validate(sql, schema, mydb)
  1920. payload = json.dumps(
  1921. [err.to_dict() for err in errors],
  1922. default=utils.pessimistic_json_iso_dttm_ser,
  1923. ignore_nan=True,
  1924. encoding=None,
  1925. )
  1926. return json_success(payload)
  1927. except Exception as e:
  1928. logger.exception(e)
  1929. msg = _(
  1930. f"{validator.name} was unable to check your query.\n"
  1931. "Please recheck your query.\n"
  1932. f"Exception: {e}"
  1933. )
  1934. # Return as a 400 if the database error message says we got a 4xx error
  1935. if re.search(r"([\W]|^)4\d{2}([\W]|$)", str(e)):
  1936. return json_error_response(f"{msg}", status=400)
  1937. else:
  1938. return json_error_response(f"{msg}")
  1939. def _sql_json_async(
  1940. self,
  1941. session: Session,
  1942. rendered_query: str,
  1943. query: Query,
  1944. expand_data: bool,
  1945. log_params: Optional[Dict[str, Any]] = None,
  1946. ) -> str:
  1947. """
  1948. Send SQL JSON query to celery workers
  1949. :param session: SQLAlchemy session object
  1950. :param rendered_query: the rendered query to perform by workers
  1951. :param query: The query (SQLAlchemy) object
  1952. :return: String JSON response
  1953. """
  1954. logger.info(f"Query {query.id}: Running query on a Celery worker")
  1955. # Ignore the celery future object and the request may time out.
  1956. try:
  1957. sql_lab.get_sql_results.delay(
  1958. query.id,
  1959. rendered_query,
  1960. return_results=False,
  1961. store_results=not query.select_as_cta,
  1962. user_name=g.user.username if g.user else None,
  1963. start_time=now_as_float(),
  1964. expand_data=expand_data,
  1965. log_params=log_params,
  1966. )
  1967. except Exception as e:
  1968. logger.exception(f"Query {query.id}: {e}")
  1969. msg = _(
  1970. "Failed to start remote query on a worker. "
  1971. "Tell your administrator to verify the availability of "
  1972. "the message queue."
  1973. )
  1974. query.status = QueryStatus.FAILED
  1975. query.error_message = msg
  1976. session.commit()
  1977. return json_error_response("{}".format(msg))
  1978. resp = json_success(
  1979. json.dumps(
  1980. {"query": query.to_dict()},
  1981. default=utils.json_int_dttm_ser,
  1982. ignore_nan=True,
  1983. ),
  1984. status=202,
  1985. )
  1986. session.commit()
  1987. return resp
  1988. def _sql_json_sync(
  1989. self,
  1990. session: Session,
  1991. rendered_query: str,
  1992. query: Query,
  1993. expand_data: bool,
  1994. log_params: Optional[Dict[str, Any]] = None,
  1995. ) -> str:
  1996. """
  1997. Execute SQL query (sql json)
  1998. :param rendered_query: The rendered query (included templates)
  1999. :param query: The query SQL (SQLAlchemy) object
  2000. :return: String JSON response
  2001. """
  2002. try:
  2003. timeout = config["SQLLAB_TIMEOUT"]
  2004. timeout_msg = f"The query exceeded the {timeout} seconds timeout."
  2005. store_results = (
  2006. is_feature_enabled("SQLLAB_BACKEND_PERSISTENCE")
  2007. and not query.select_as_cta
  2008. )
  2009. with utils.timeout(seconds=timeout, error_message=timeout_msg):
  2010. # pylint: disable=no-value-for-parameter
  2011. data = sql_lab.get_sql_results(
  2012. query.id,
  2013. rendered_query,
  2014. return_results=True,
  2015. store_results=store_results,
  2016. user_name=g.user.username if g.user else None,
  2017. expand_data=expand_data,
  2018. log_params=log_params,
  2019. )
  2020. payload = json.dumps(
  2021. apply_display_max_row_limit(data),
  2022. default=utils.pessimistic_json_iso_dttm_ser,
  2023. ignore_nan=True,
  2024. encoding=None,
  2025. )
  2026. except Exception as e:
  2027. logger.exception(f"Query {query.id}: {e}")
  2028. return json_error_response(f"{{e}}")
  2029. if data.get("status") == QueryStatus.FAILED:
  2030. return json_error_response(payload=data)
  2031. return json_success(payload)
  2032. @has_access_api
  2033. @expose("/sql_json/", methods=["POST"])
  2034. @event_logger.log_this
  2035. def sql_json(self):
  2036. log_params = {
  2037. "user_agent": cast(Optional[str], request.headers.get("USER_AGENT"))
  2038. }
  2039. return self.sql_json_exec(request.json, log_params)
  2040. def sql_json_exec(
  2041. self, query_params: dict, log_params: Optional[Dict[str, Any]] = None
  2042. ):
  2043. """Runs arbitrary sql and returns data as json"""
  2044. # Collect Values
  2045. database_id: int = cast(int, query_params.get("database_id"))
  2046. schema: str = cast(str, query_params.get("schema"))
  2047. sql: str = cast(str, query_params.get("sql"))
  2048. try:
  2049. template_params: dict = json.loads(
  2050. query_params.get("templateParams") or "{}"
  2051. )
  2052. except json.JSONDecodeError:
  2053. logger.warning(
  2054. f"Invalid template parameter {query_params.get('templateParams')}"
  2055. " specified. Defaulting to empty dict"
  2056. )
  2057. template_params = {}
  2058. limit: int = query_params.get("queryLimit") or app.config["SQL_MAX_ROW"]
  2059. async_flag: bool = cast(bool, query_params.get("runAsync"))
  2060. if limit < 0:
  2061. logger.warning(
  2062. f"Invalid limit of {limit} specified. Defaulting to max limit."
  2063. )
  2064. limit = 0
  2065. select_as_cta: bool = cast(bool, query_params.get("select_as_cta"))
  2066. tmp_table_name: str = cast(str, query_params.get("tmp_table_name"))
  2067. client_id: str = cast(
  2068. str, query_params.get("client_id") or utils.shortid()[:10]
  2069. )
  2070. sql_editor_id: str = cast(str, query_params.get("sql_editor_id"))
  2071. tab_name: str = cast(str, query_params.get("tab"))
  2072. status: str = QueryStatus.PENDING if async_flag else QueryStatus.RUNNING
  2073. session = db.session()
  2074. mydb = session.query(models.Database).get(database_id)
  2075. if not mydb:
  2076. return json_error_response(f"Database with id {database_id} is missing.")
  2077. # Set tmp_table_name for CTA
  2078. if select_as_cta and mydb.force_ctas_schema:
  2079. tmp_table_name = f"{mydb.force_ctas_schema}.{tmp_table_name}"
  2080. # Save current query
  2081. query = Query(
  2082. database_id=database_id,
  2083. sql=sql,
  2084. schema=schema,
  2085. select_as_cta=select_as_cta,
  2086. start_time=now_as_float(),
  2087. tab_name=tab_name,
  2088. status=status,
  2089. sql_editor_id=sql_editor_id,
  2090. tmp_table_name=tmp_table_name,
  2091. user_id=g.user.get_id() if g.user else None,
  2092. client_id=client_id,
  2093. )
  2094. try:
  2095. session.add(query)
  2096. session.flush()
  2097. query_id = query.id
  2098. session.commit() # shouldn't be necessary
  2099. except SQLAlchemyError as e:
  2100. logger.error(f"Errors saving query details {e}")
  2101. session.rollback()
  2102. raise Exception(_("Query record was not created as expected."))
  2103. if not query_id:
  2104. raise Exception(_("Query record was not created as expected."))
  2105. logger.info(f"Triggering query_id: {query_id}")
  2106. rejected_tables = security_manager.rejected_tables(sql, mydb, schema)
  2107. if rejected_tables:
  2108. query.status = QueryStatus.FAILED
  2109. session.commit()
  2110. return json_error_response(
  2111. security_manager.get_table_access_error_msg(rejected_tables),
  2112. link=security_manager.get_table_access_link(rejected_tables),
  2113. status=403,
  2114. )
  2115. try:
  2116. template_processor = get_template_processor(
  2117. database=query.database, query=query
  2118. )
  2119. rendered_query = template_processor.process_template(
  2120. query.sql, **template_params
  2121. )
  2122. except Exception as e:
  2123. error_msg = utils.error_msg_from_exception(e)
  2124. return json_error_response(
  2125. f"Query {query_id}: Template rendering failed: {error_msg}"
  2126. )
  2127. # set LIMIT after template processing
  2128. limits = [mydb.db_engine_spec.get_limit_from_sql(rendered_query), limit]
  2129. query.limit = min(lim for lim in limits if lim is not None)
  2130. # Flag for whether or not to expand data
  2131. # (feature that will expand Presto row objects and arrays)
  2132. expand_data: bool = cast(
  2133. bool,
  2134. is_feature_enabled("PRESTO_EXPAND_DATA")
  2135. and query_params.get("expand_data"),
  2136. )
  2137. # Async request.
  2138. if async_flag:
  2139. return self._sql_json_async(
  2140. session, rendered_query, query, expand_data, log_params
  2141. )
  2142. # Sync request.
  2143. return self._sql_json_sync(
  2144. session, rendered_query, query, expand_data, log_params
  2145. )
  2146. @has_access
  2147. @expose("/csv/<client_id>")
  2148. @event_logger.log_this
  2149. def csv(self, client_id):
  2150. """Download the query results as csv."""
  2151. logger.info("Exporting CSV file [{}]".format(client_id))
  2152. query = db.session.query(Query).filter_by(client_id=client_id).one()
  2153. rejected_tables = security_manager.rejected_tables(
  2154. query.sql, query.database, query.schema
  2155. )
  2156. if rejected_tables:
  2157. flash(security_manager.get_table_access_error_msg(rejected_tables))
  2158. return redirect("/")
  2159. blob = None
  2160. if results_backend and query.results_key:
  2161. logger.info(
  2162. "Fetching CSV from results backend " "[{}]".format(query.results_key)
  2163. )
  2164. blob = results_backend.get(query.results_key)
  2165. if blob:
  2166. logger.info("Decompressing")
  2167. payload = utils.zlib_decompress(
  2168. blob, decode=not results_backend_use_msgpack
  2169. )
  2170. obj = _deserialize_results_payload(
  2171. payload, query, results_backend_use_msgpack
  2172. )
  2173. columns = [c["name"] for c in obj["columns"]]
  2174. df = pd.DataFrame.from_records(obj["data"], columns=columns)
  2175. logger.info("Using pandas to convert to CSV")
  2176. csv = df.to_csv(index=False, **config["CSV_EXPORT"])
  2177. else:
  2178. logger.info("Running a query to turn into CSV")
  2179. sql = query.select_sql or query.executed_sql
  2180. df = query.database.get_df(sql, query.schema)
  2181. # TODO(bkyryliuk): add compression=gzip for big files.
  2182. csv = df.to_csv(index=False, **config["CSV_EXPORT"])
  2183. response = Response(csv, mimetype="text/csv")
  2184. response.headers[
  2185. "Content-Disposition"
  2186. ] = f"attachment; filename={query.name}.csv"
  2187. event_info = {
  2188. "event_type": "data_export",
  2189. "client_id": client_id,
  2190. "row_count": len(df.index),
  2191. "database": query.database.name,
  2192. "schema": query.schema,
  2193. "sql": query.sql,
  2194. "exported_format": "csv",
  2195. }
  2196. logger.info(
  2197. f"CSV exported: {repr(event_info)}", extra={"superset_event": event_info}
  2198. )
  2199. return response
  2200. @api
  2201. @handle_api_exception
  2202. @has_access
  2203. @expose("/fetch_datasource_metadata")
  2204. @event_logger.log_this
  2205. def fetch_datasource_metadata(self):
  2206. datasource_id, datasource_type = request.args.get("datasourceKey").split("__")
  2207. datasource = ConnectorRegistry.get_datasource(
  2208. datasource_type, datasource_id, db.session
  2209. )
  2210. # Check if datasource exists
  2211. if not datasource:
  2212. return json_error_response(DATASOURCE_MISSING_ERR)
  2213. # Check permission for datasource
  2214. security_manager.assert_datasource_permission(datasource)
  2215. return json_success(json.dumps(datasource.data))
  2216. @has_access_api
  2217. @expose("/queries/<last_updated_ms>")
  2218. def queries(self, last_updated_ms):
  2219. """
  2220. Get the updated queries.
  2221. :param last_updated_ms: unix time, milliseconds
  2222. """
  2223. last_updated_ms_int = int(float(last_updated_ms)) if last_updated_ms else 0
  2224. return self.queries_exec(last_updated_ms_int)
  2225. def queries_exec(self, last_updated_ms_int: int):
  2226. stats_logger.incr("queries")
  2227. if not g.user.get_id():
  2228. return json_error_response(
  2229. "Please login to access the queries.", status=403
  2230. )
  2231. # UTC date time, same that is stored in the DB.
  2232. last_updated_dt = utils.EPOCH + timedelta(seconds=last_updated_ms_int / 1000)
  2233. sql_queries = (
  2234. db.session.query(Query)
  2235. .filter(
  2236. Query.user_id == g.user.get_id(), Query.changed_on >= last_updated_dt
  2237. )
  2238. .all()
  2239. )
  2240. dict_queries = {q.client_id: q.to_dict() for q in sql_queries}
  2241. return json_success(json.dumps(dict_queries, default=utils.json_int_dttm_ser))
  2242. @has_access
  2243. @expose("/search_queries")
  2244. @event_logger.log_this
  2245. def search_queries(self) -> Response:
  2246. """
  2247. Search for previously run sqllab queries. Used for Sqllab Query Search
  2248. page /superset/sqllab#search.
  2249. Custom permission can_only_search_queries_owned restricts queries
  2250. to only queries run by current user.
  2251. :returns: Response with list of sql query dicts
  2252. """
  2253. query = db.session.query(Query)
  2254. if security_manager.can_access_all_queries():
  2255. search_user_id = request.args.get("user_id")
  2256. elif (
  2257. request.args.get("user_id") is not None
  2258. and request.args.get("user_id") != g.user.get_user_id()
  2259. ):
  2260. return Response(status=403, mimetype="application/json")
  2261. else:
  2262. search_user_id = g.user.get_user_id()
  2263. database_id = request.args.get("database_id")
  2264. search_text = request.args.get("search_text")
  2265. status = request.args.get("status")
  2266. # From and To time stamp should be Epoch timestamp in seconds
  2267. from_time = request.args.get("from")
  2268. to_time = request.args.get("to")
  2269. if search_user_id:
  2270. # Filter on user_id
  2271. query = query.filter(Query.user_id == search_user_id)
  2272. if database_id:
  2273. # Filter on db Id
  2274. query = query.filter(Query.database_id == database_id)
  2275. if status:
  2276. # Filter on status
  2277. query = query.filter(Query.status == status)
  2278. if search_text:
  2279. # Filter on search text
  2280. query = query.filter(Query.sql.like("%{}%".format(search_text)))
  2281. if from_time:
  2282. query = query.filter(Query.start_time > int(from_time))
  2283. if to_time:
  2284. query = query.filter(Query.start_time < int(to_time))
  2285. query_limit = config["QUERY_SEARCH_LIMIT"]
  2286. sql_queries = query.order_by(Query.start_time.asc()).limit(query_limit).all()
  2287. dict_queries = [q.to_dict() for q in sql_queries]
  2288. return Response(
  2289. json.dumps(dict_queries, default=utils.json_int_dttm_ser),
  2290. status=200,
  2291. mimetype="application/json",
  2292. )
  2293. @app.errorhandler(500)
  2294. def show_traceback(self):
  2295. return (
  2296. render_template("superset/traceback.html", error_msg=get_error_msg()),
  2297. 500,
  2298. )
  2299. @expose("/welcome")
  2300. def welcome(self):
  2301. """Personalized welcome page"""
  2302. if not g.user or not g.user.get_id():
  2303. return redirect(appbuilder.get_url_for_login)
  2304. welcome_dashboard_id = (
  2305. db.session.query(UserAttribute.welcome_dashboard_id)
  2306. .filter_by(user_id=g.user.get_id())
  2307. .scalar()
  2308. )
  2309. if welcome_dashboard_id:
  2310. return self.dashboard(str(welcome_dashboard_id))
  2311. payload = {
  2312. "user": bootstrap_user_data(g.user),
  2313. "common": common_bootstrap_payload(),
  2314. }
  2315. return self.render_template(
  2316. "superset/welcome.html",
  2317. entry="welcome",
  2318. bootstrap_data=json.dumps(
  2319. payload, default=utils.pessimistic_json_iso_dttm_ser
  2320. ),
  2321. )
  2322. @has_access
  2323. @expose("/profile/<username>/")
  2324. def profile(self, username):
  2325. """User profile page"""
  2326. if not username and g.user:
  2327. username = g.user.username
  2328. user = (
  2329. db.session.query(ab_models.User).filter_by(username=username).one_or_none()
  2330. )
  2331. if not user:
  2332. abort(404, description=f"User: {username} does not exist.")
  2333. payload = {
  2334. "user": bootstrap_user_data(user, include_perms=True),
  2335. "common": common_bootstrap_payload(),
  2336. }
  2337. return self.render_template(
  2338. "superset/basic.html",
  2339. title=_("%(user)s's profile", user=username),
  2340. entry="profile",
  2341. bootstrap_data=json.dumps(
  2342. payload, default=utils.pessimistic_json_iso_dttm_ser
  2343. ),
  2344. )
  2345. @staticmethod
  2346. def _get_sqllab_payload(user_id: int) -> Dict[str, Any]:
  2347. # send list of tab state ids
  2348. tabs_state = (
  2349. db.session.query(TabState.id, TabState.label)
  2350. .filter_by(user_id=user_id)
  2351. .all()
  2352. )
  2353. tab_state_ids = [tab_state[0] for tab_state in tabs_state]
  2354. # return first active tab, or fallback to another one if no tab is active
  2355. active_tab = (
  2356. db.session.query(TabState)
  2357. .filter_by(user_id=user_id)
  2358. .order_by(TabState.active.desc())
  2359. .first()
  2360. )
  2361. databases: Dict[int, Any] = {}
  2362. queries: Dict[str, Any] = {}
  2363. # These are unnecessary if sqllab backend persistence is disabled
  2364. if is_feature_enabled("SQLLAB_BACKEND_PERSISTENCE"):
  2365. databases = {
  2366. database.id: {
  2367. k: v for k, v in database.to_json().items() if k in DATABASE_KEYS
  2368. }
  2369. for database in db.session.query(models.Database).all()
  2370. }
  2371. # return all user queries associated with existing SQL editors
  2372. user_queries = (
  2373. db.session.query(Query)
  2374. .filter_by(user_id=user_id)
  2375. .filter(Query.sql_editor_id.cast(Integer).in_(tab_state_ids))
  2376. .all()
  2377. )
  2378. queries = {
  2379. query.client_id: {k: v for k, v in query.to_dict().items()}
  2380. for query in user_queries
  2381. }
  2382. return {
  2383. "defaultDbId": config["SQLLAB_DEFAULT_DBID"],
  2384. "common": common_bootstrap_payload(),
  2385. "tab_state_ids": tabs_state,
  2386. "active_tab": active_tab.to_dict() if active_tab else None,
  2387. "databases": databases,
  2388. "queries": queries,
  2389. }
  2390. @has_access
  2391. @expose("/sqllab")
  2392. def sqllab(self):
  2393. """SQL Editor"""
  2394. payload = self._get_sqllab_payload(g.user.get_id())
  2395. bootstrap_data = json.dumps(
  2396. payload, default=utils.pessimistic_json_iso_dttm_ser
  2397. )
  2398. return self.render_template(
  2399. "superset/basic.html", entry="sqllab", bootstrap_data=bootstrap_data
  2400. )
  2401. @api
  2402. @handle_api_exception
  2403. @has_access_api
  2404. @expose("/slice_query/<slice_id>/")
  2405. def slice_query(self, slice_id):
  2406. """
  2407. This method exposes an API endpoint to
  2408. get the database query string for this slice
  2409. """
  2410. viz_obj = get_viz(slice_id)
  2411. security_manager.assert_viz_permission(viz_obj)
  2412. return self.get_query_string_response(viz_obj)
  2413. @api
  2414. @has_access_api
  2415. @expose("/schemas_access_for_csv_upload")
  2416. def schemas_access_for_csv_upload(self):
  2417. """
  2418. This method exposes an API endpoint to
  2419. get the schema access control settings for csv upload in this database
  2420. """
  2421. if not request.args.get("db_id"):
  2422. return json_error_response("No database is allowed for your csv upload")
  2423. db_id = int(request.args.get("db_id"))
  2424. database = db.session.query(models.Database).filter_by(id=db_id).one()
  2425. try:
  2426. schemas_allowed = database.get_schema_access_for_csv_upload()
  2427. if (
  2428. security_manager.database_access(database)
  2429. or security_manager.all_datasource_access()
  2430. ):
  2431. return self.json_response(schemas_allowed)
  2432. # the list schemas_allowed should not be empty here
  2433. # and the list schemas_allowed_processed returned from security_manager
  2434. # should not be empty either,
  2435. # otherwise the database should have been filtered out
  2436. # in CsvToDatabaseForm
  2437. schemas_allowed_processed = security_manager.schemas_accessible_by_user(
  2438. database, schemas_allowed, False
  2439. )
  2440. return self.json_response(schemas_allowed_processed)
  2441. except Exception as e:
  2442. logger.exception(e)
  2443. return json_error_response(
  2444. "Failed to fetch schemas allowed for csv upload in this database! "
  2445. "Please contact your Superset Admin!"
  2446. )
  2447. class CssTemplateModelView(SupersetModelView, DeleteMixin):
  2448. datamodel = SQLAInterface(models.CssTemplate)
  2449. include_route_methods = RouteMethod.CRUD_SET
  2450. list_title = _("CSS Templates")
  2451. show_title = _("Show CSS Template")
  2452. add_title = _("Add CSS Template")
  2453. edit_title = _("Edit CSS Template")
  2454. list_columns = ["template_name"]
  2455. edit_columns = ["template_name", "css"]
  2456. add_columns = edit_columns
  2457. label_columns = {"template_name": _("Template Name")}
  2458. class CssTemplateAsyncModelView(CssTemplateModelView):
  2459. include_route_methods = {RouteMethod.API_READ}
  2460. list_columns = ["template_name", "css"]
  2461. @app.after_request
  2462. def apply_http_headers(response: Response):
  2463. """Applies the configuration's http headers to all responses"""
  2464. # HTTP_HEADERS is deprecated, this provides backwards compatibility
  2465. response.headers.extend(
  2466. {**config["OVERRIDE_HTTP_HEADERS"], **config["HTTP_HEADERS"]}
  2467. )
  2468. for k, v in config["DEFAULT_HTTP_HEADERS"].items():
  2469. if k not in response.headers:
  2470. response.headers[k] = v
  2471. return response