core.py 106 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980
  1. # pylint: disable=C,R,W
  2. from datetime import datetime, timedelta
  3. import inspect
  4. import logging
  5. import os
  6. import re
  7. import time
  8. import traceback
  9. from urllib import parse
  10. from flask import (
  11. flash, g, Markup, redirect, render_template, request, Response, url_for,
  12. )
  13. from flask_appbuilder import expose, SimpleFormView
  14. from flask_appbuilder.actions import action
  15. from flask_appbuilder.models.sqla.interface import SQLAInterface
  16. from flask_appbuilder.security.decorators import has_access, has_access_api
  17. from flask_babel import gettext as __
  18. from flask_babel import lazy_gettext as _
  19. import pandas as pd
  20. import simplejson as json
  21. import sqlalchemy as sqla
  22. from sqlalchemy import and_, create_engine, MetaData, or_, update
  23. from sqlalchemy.engine.url import make_url
  24. from sqlalchemy.exc import IntegrityError
  25. from unidecode import unidecode
  26. from werkzeug.routing import BaseConverter
  27. from werkzeug.utils import secure_filename
  28. from superset import (
  29. app, appbuilder, cache, dashboard_import_export_util, db, results_backend,
  30. security_manager, sql_lab, utils, viz)
  31. from superset.connectors.connector_registry import ConnectorRegistry
  32. from superset.connectors.sqla.models import AnnotationDatasource, SqlaTable
  33. from superset.exceptions import SupersetException
  34. from superset.forms import CsvToDatabaseForm
  35. from superset.jinja_context import get_template_processor
  36. from superset.legacy import cast_form_data, update_time_range
  37. import superset.models.core as models
  38. from superset.models.sql_lab import Query
  39. from superset.models.user_attributes import UserAttribute
  40. from superset.sql_parse import SupersetQuery
  41. from superset.utils import (
  42. merge_extra_filters, merge_request_params, QueryStatus,
  43. )
  44. from .base import (
  45. api, BaseSupersetView,
  46. check_ownership,
  47. CsvResponse, DeleteMixin, XlsxResponse,
  48. generate_download_headers, get_error_msg,
  49. json_error_response, SupersetFilter, SupersetModelView, YamlExportMixin,
  50. )
  51. from .utils import bootstrap_user_data
  52. config = app.config
  53. stats_logger = config.get('STATS_LOGGER')
  54. log_this = models.Log.log_this
  55. DAR = models.DatasourceAccessRequest
  56. ALL_DATASOURCE_ACCESS_ERR = __(
  57. 'This endpoint requires the `all_datasource_access` permission')
  58. DATASOURCE_MISSING_ERR = __('The datasource seems to have been deleted')
  59. ACCESS_REQUEST_MISSING_ERR = __(
  60. 'The access requests seem to have been deleted')
  61. USER_MISSING_ERR = __('The user seems to have been deleted')
  62. FORM_DATA_KEY_BLACKLIST = []
  63. if not config.get('ENABLE_JAVASCRIPT_CONTROLS'):
  64. FORM_DATA_KEY_BLACKLIST = [
  65. 'js_tooltip',
  66. 'js_onclick_href',
  67. 'js_data_mutator',
  68. ]
  69. def get_database_access_error_msg(database_name):
  70. return __('This view requires the database %(name)s or '
  71. '`all_datasource_access` permission', name=database_name)
  72. def json_success(json_msg, status=200):
  73. return Response(json_msg, status=status, mimetype='application/json')
  74. def is_owner(obj, user):
  75. """ Check if user is owner of the slice """
  76. return obj and user in obj.owners
  77. class SliceFilter(SupersetFilter):
  78. def apply(self, query, func): # noqa
  79. if security_manager.all_datasource_access():
  80. return query
  81. perms = self.get_view_menus('datasource_access')
  82. # TODO(bogdan): add `schema_access` support here
  83. return query.filter(self.model.perm.in_(perms))
  84. class DashboardFilter(SupersetFilter):
  85. """List dashboards for which users have access to at least one slice or are owners"""
  86. def apply(self, query, func): # noqa
  87. if security_manager.all_datasource_access():
  88. return query
  89. Slice = models.Slice # noqa
  90. Dash = models.Dashboard # noqa
  91. User = security_manager.user_model
  92. # TODO(bogdan): add `schema_access` support here
  93. datasource_perms = self.get_view_menus('datasource_access')
  94. slice_ids_qry = (
  95. db.session
  96. .query(Slice.id)
  97. .filter(Slice.perm.in_(datasource_perms))
  98. )
  99. owner_ids_qry = (
  100. db.session
  101. .query(Dash.id)
  102. .join(Dash.owners)
  103. .filter(User.id == User.get_user_id())
  104. )
  105. query = query.filter(
  106. or_(Dash.id.in_(
  107. db.session.query(Dash.id)
  108. .distinct()
  109. .join(Dash.slices)
  110. .filter(Slice.id.in_(slice_ids_qry)),
  111. ), Dash.id.in_(owner_ids_qry)),
  112. )
  113. return query
  114. class DatabaseView(SupersetModelView, DeleteMixin, YamlExportMixin): # noqa
  115. datamodel = SQLAInterface(models.Database)
  116. list_title = _('List Databases')
  117. show_title = _('Show Database')
  118. add_title = _('Add Database')
  119. edit_title = _('Edit Database')
  120. list_columns = [
  121. 'database_name', 'backend', 'allow_run_sync', 'allow_run_async',
  122. 'allow_dml', 'allow_csv_upload', 'creator', 'modified']
  123. order_columns = [
  124. 'database_name', 'allow_run_sync', 'allow_run_async', 'allow_dml',
  125. 'modified', 'allow_csv_upload',
  126. ]
  127. add_columns = [
  128. 'database_name', 'sqlalchemy_uri', 'cache_timeout', 'expose_in_sqllab',
  129. 'allow_run_sync', 'allow_run_async', 'allow_csv_upload',
  130. 'allow_ctas', 'allow_dml', 'force_ctas_schema', 'impersonate_user',
  131. 'allow_multi_schema_metadata_fetch', 'extra',
  132. ]
  133. search_exclude_columns = (
  134. 'password', 'tables', 'created_by', 'changed_by', 'queries',
  135. 'saved_queries')
  136. edit_columns = add_columns
  137. show_columns = [
  138. 'tables',
  139. 'cache_timeout',
  140. 'extra',
  141. 'database_name',
  142. 'sqlalchemy_uri',
  143. 'perm',
  144. 'created_by',
  145. 'created_on',
  146. 'changed_by',
  147. 'changed_on',
  148. ]
  149. add_template = 'superset/models/database/add.html'
  150. edit_template = 'superset/models/database/edit.html'
  151. base_order = ('changed_on', 'desc')
  152. description_columns = {
  153. 'sqlalchemy_uri': utils.markdown(
  154. 'Refer to the '
  155. '[SqlAlchemy docs]'
  156. '(http://docs.sqlalchemy.org/en/rel_1_0/core/engines.html#'
  157. 'database-urls) '
  158. 'for more information on how to structure your URI.', True),
  159. 'expose_in_sqllab': _('Expose this DB in SQL Lab'),
  160. 'allow_run_sync': _(
  161. 'Allow users to run synchronous queries, this is the default '
  162. 'and should work well for queries that can be executed '
  163. 'within a web request scope (<~1 minute)'),
  164. 'allow_run_async': _(
  165. 'Allow users to run queries, against an async backend. '
  166. 'This assumes that you have a Celery worker setup as well '
  167. 'as a results backend.'),
  168. 'allow_ctas': _('Allow CREATE TABLE AS option in SQL Lab'),
  169. 'allow_dml': _(
  170. 'Allow users to run non-SELECT statements '
  171. '(UPDATE, DELETE, CREATE, ...) '
  172. 'in SQL Lab'),
  173. 'force_ctas_schema': _(
  174. 'When allowing CREATE TABLE AS option in SQL Lab, '
  175. 'this option forces the table to be created in this schema'),
  176. 'extra': utils.markdown(
  177. 'JSON string containing extra configuration elements.<br/>'
  178. '1. The ``engine_params`` object gets unpacked into the '
  179. '[sqlalchemy.create_engine]'
  180. '(http://docs.sqlalchemy.org/en/latest/core/engines.html#'
  181. 'sqlalchemy.create_engine) call, while the ``metadata_params`` '
  182. 'gets unpacked into the [sqlalchemy.MetaData]'
  183. '(http://docs.sqlalchemy.org/en/rel_1_0/core/metadata.html'
  184. '#sqlalchemy.schema.MetaData) call.<br/>'
  185. '2. The ``metadata_cache_timeout`` is a cache timeout setting '
  186. 'in seconds for metadata fetch of this database. Specify it as '
  187. '**"metadata_cache_timeout": {"schema_cache_timeout": 600}**. '
  188. 'If unset, cache will not be enabled for the functionality. '
  189. 'A timeout of 0 indicates that the cache never expires.<br/>'
  190. '3. The ``schemas_allowed_for_csv_upload`` is a comma separated list '
  191. 'of schemas that CSVs are allowed to upload to. '
  192. 'Specify it as **"schemas_allowed": ["public", "csv_upload"]**. '
  193. 'If database flavor does not support schema or any schema is allowed '
  194. 'to be accessed, just leave the list empty', True),
  195. 'impersonate_user': _(
  196. 'If Presto, all the queries in SQL Lab are going to be executed as the '
  197. 'currently logged on user who must have permission to run them.<br/>'
  198. 'If Hive and hive.server2.enable.doAs is enabled, will run the queries as '
  199. 'service account, but impersonate the currently logged on user '
  200. 'via hive.server2.proxy.user property.'),
  201. 'allow_multi_schema_metadata_fetch': _(
  202. 'Allow SQL Lab to fetch a list of all tables and all views across '
  203. 'all database schemas. For large data warehouse with thousands of '
  204. 'tables, this can be expensive and put strain on the system.'),
  205. 'cache_timeout': _(
  206. 'Duration (in seconds) of the caching timeout for charts of this database. '
  207. 'A timeout of 0 indicates that the cache never expires. '
  208. 'Note this defaults to the global timeout if undefined.'),
  209. 'allow_csv_upload': _(
  210. 'If selected, please set the schemas allowed for csv upload in Extra.'),
  211. }
  212. label_columns = {
  213. 'expose_in_sqllab': _('Expose in SQL Lab'),
  214. 'allow_ctas': _('Allow CREATE TABLE AS'),
  215. 'allow_dml': _('Allow DML'),
  216. 'force_ctas_schema': _('CTAS Schema'),
  217. 'database_name': _('Database'),
  218. 'creator': _('Creator'),
  219. 'changed_on_': _('Last Changed'),
  220. 'sqlalchemy_uri': _('SQLAlchemy URI'),
  221. 'cache_timeout': _('Chart Cache Timeout'),
  222. 'extra': _('Extra'),
  223. 'allow_run_sync': _('Allow Run Sync'),
  224. 'allow_run_async': _('Allow Run Async'),
  225. 'impersonate_user': _('Impersonate the logged on user'),
  226. 'allow_csv_upload': _('Allow Csv Upload'),
  227. 'modified': _('Modified'),
  228. 'allow_multi_schema_metadata_fetch': _('Allow Multi Schema Metadata Fetch'),
  229. 'backend': _('Backend'),
  230. }
  231. def pre_add(self, db):
  232. self.check_extra(db)
  233. db.set_sqlalchemy_uri(db.sqlalchemy_uri)
  234. security_manager.merge_perm('database_access', db.perm)
  235. # adding a new database we always want to force refresh schema list
  236. for schema in db.all_schema_names(force_refresh=True):
  237. security_manager.merge_perm(
  238. 'schema_access', security_manager.get_schema_perm(db, schema))
  239. def pre_update(self, db):
  240. self.pre_add(db)
  241. def pre_delete(self, obj):
  242. if obj.tables:
  243. raise SupersetException(Markup(
  244. 'Cannot delete a database that has tables attached. '
  245. "Here's the list of associated tables: " +
  246. ', '.join('{}'.format(o) for o in obj.tables)))
  247. def _delete(self, pk):
  248. DeleteMixin._delete(self, pk)
  249. def check_extra(self, db):
  250. # this will check whether json.loads(extra) can succeed
  251. try:
  252. extra = db.get_extra()
  253. except Exception as e:
  254. raise Exception('Extra field cannot be decoded by JSON. {}'.format(str(e)))
  255. # this will check whether 'metadata_params' is configured correctly
  256. metadata_signature = inspect.signature(MetaData)
  257. for key in extra.get('metadata_params', {}):
  258. if key not in metadata_signature.parameters:
  259. raise Exception('The metadata_params in Extra field '
  260. 'is not configured correctly. The key '
  261. '{} is invalid.'.format(key))
  262. appbuilder.add_link(
  263. 'Import Dashboards',
  264. label=__('Import Dashboards'),
  265. href='/superset/import_dashboards',
  266. icon='fa-cloud-upload',
  267. category='Manage',
  268. category_label=__('Manage'),
  269. category_icon='fa-wrench')
  270. appbuilder.add_view(
  271. DatabaseView,
  272. 'Databases',
  273. label=__('Databases'),
  274. icon='fa-database',
  275. category='Sources',
  276. category_label=__('Sources'),
  277. category_icon='fa-database')
  278. class DatabaseAsync(DatabaseView):
  279. list_columns = [
  280. 'id', 'database_name',
  281. 'expose_in_sqllab', 'allow_ctas', 'force_ctas_schema',
  282. 'allow_run_async', 'allow_run_sync', 'allow_dml',
  283. 'allow_multi_schema_metadata_fetch', 'allow_csv_upload',
  284. 'allows_subquery',
  285. ]
  286. appbuilder.add_view_no_menu(DatabaseAsync)
  287. class CsvToDatabaseView(SimpleFormView):
  288. form = CsvToDatabaseForm
  289. form_template = 'superset/form_view/csv_to_database_view/edit.html'
  290. form_title = _('CSV to Database configuration')
  291. add_columns = ['database', 'schema', 'table_name']
  292. def form_get(self, form):
  293. form.sep.data = ','
  294. form.header.data = 0
  295. form.mangle_dupe_cols.data = True
  296. form.skipinitialspace.data = False
  297. form.skip_blank_lines.data = True
  298. form.infer_datetime_format.data = True
  299. form.decimal.data = '.'
  300. form.if_exists.data = 'fail'
  301. def form_post(self, form):
  302. database = form.con.data
  303. schema_name = form.schema.data or ''
  304. if not self.is_schema_allowed(database, schema_name):
  305. message = _('Database "{0}" Schema "{1}" is not allowed for csv uploads. '
  306. 'Please contact Superset Admin'.format(database.database_name,
  307. schema_name))
  308. flash(message, 'danger')
  309. return redirect('/csvtodatabaseview/form')
  310. csv_file = form.csv_file.data
  311. form.csv_file.data.filename = secure_filename(form.csv_file.data.filename)
  312. csv_filename = form.csv_file.data.filename
  313. path = os.path.join(config['UPLOAD_FOLDER'], csv_filename)
  314. try:
  315. utils.ensure_path_exists(config['UPLOAD_FOLDER'])
  316. csv_file.save(path)
  317. table = SqlaTable(table_name=form.name.data)
  318. table.database = form.data.get('con')
  319. table.database_id = table.database.id
  320. table.database.db_engine_spec.create_table_from_csv(form, table)
  321. except Exception as e:
  322. try:
  323. os.remove(path)
  324. except OSError:
  325. pass
  326. message = 'Table name {} already exists. Please pick another'.format(
  327. form.name.data) if isinstance(e, IntegrityError) else e
  328. flash(
  329. message,
  330. 'danger')
  331. return redirect('/csvtodatabaseview/form')
  332. os.remove(path)
  333. # Go back to welcome page / splash screen
  334. db_name = table.database.database_name
  335. message = _('CSV file "{0}" uploaded to table "{1}" in '
  336. 'database "{2}"'.format(csv_filename,
  337. form.name.data,
  338. db_name))
  339. flash(message, 'info')
  340. return redirect('/tablemodelview/list/')
  341. def is_schema_allowed(self, database, schema):
  342. if not database.allow_csv_upload:
  343. return False
  344. schemas = database.get_schema_access_for_csv_upload()
  345. if schemas:
  346. return schema in schemas
  347. return (security_manager.database_access(database) or
  348. security_manager.all_datasource_access())
  349. appbuilder.add_view_no_menu(CsvToDatabaseView)
  350. class DatabaseTablesAsync(DatabaseView):
  351. list_columns = ['id', 'all_table_names', 'all_schema_names']
  352. appbuilder.add_view_no_menu(DatabaseTablesAsync)
  353. if config.get('ENABLE_ACCESS_REQUEST'):
  354. class AccessRequestsModelView(SupersetModelView, DeleteMixin):
  355. datamodel = SQLAInterface(DAR)
  356. list_columns = [
  357. 'username', 'user_roles', 'datasource_link',
  358. 'roles_with_datasource', 'created_on']
  359. order_columns = ['created_on']
  360. base_order = ('changed_on', 'desc')
  361. label_columns = {
  362. 'username': _('User'),
  363. 'user_roles': _('User Roles'),
  364. 'database': _('Database URL'),
  365. 'datasource_link': _('Datasource'),
  366. 'roles_with_datasource': _('Roles to grant'),
  367. 'created_on': _('Created On'),
  368. }
  369. appbuilder.add_view(
  370. AccessRequestsModelView,
  371. 'Access requests',
  372. label=__('Access requests'),
  373. category='Security',
  374. category_label=__('Security'),
  375. icon='fa-table')
  376. class SliceModelView(SupersetModelView, DeleteMixin): # noqa
  377. route_base = '/chart'
  378. datamodel = SQLAInterface(models.Slice)
  379. list_title = _('List Charts')
  380. show_title = _('Show Chart')
  381. add_title = _('Add Chart')
  382. edit_title = _('Edit Chart')
  383. can_add = False
  384. label_columns = {
  385. 'datasource_link': _('Datasource'),
  386. }
  387. search_columns = (
  388. 'slice_name', 'description', 'viz_type', 'datasource_name', 'owners',
  389. )
  390. list_columns = [
  391. 'slice_link', 'viz_type', 'datasource_link', 'creator', 'modified']
  392. order_columns = ['viz_type', 'datasource_link', 'modified']
  393. edit_columns = [
  394. 'slice_name', 'description', 'viz_type', 'owners', 'dashboards',
  395. 'params', 'cache_timeout']
  396. base_order = ('changed_on', 'desc')
  397. description_columns = {
  398. 'description': Markup(
  399. 'The content here can be displayed as widget headers in the '
  400. 'dashboard view. Supports '
  401. '<a href="https://daringfireball.net/projects/markdown/"">'
  402. 'markdown</a>'),
  403. 'params': _(
  404. 'These parameters are generated dynamically when clicking '
  405. 'the save or overwrite button in the explore view. This JSON '
  406. 'object is exposed here for reference and for power users who may '
  407. 'want to alter specific parameters.',
  408. ),
  409. 'cache_timeout': _(
  410. 'Duration (in seconds) of the caching timeout for this chart. '
  411. 'Note this defaults to the datasource/table timeout if undefined.'),
  412. }
  413. base_filters = [['id', SliceFilter, lambda: []]]
  414. label_columns = {
  415. 'cache_timeout': _('Cache Timeout'),
  416. 'creator': _('Creator'),
  417. 'dashboards': _('Dashboards'),
  418. 'datasource_link': _('Datasource'),
  419. 'description': _('Description'),
  420. 'modified': _('Last Modified'),
  421. 'owners': _('Owners'),
  422. 'params': _('Parameters'),
  423. 'slice_link': _('Chart'),
  424. 'slice_name': _('Name'),
  425. 'table': _('Table'),
  426. 'viz_type': _('Visualization Type'),
  427. }
  428. def pre_add(self, obj):
  429. utils.validate_json(obj.params)
  430. def pre_update(self, obj):
  431. utils.validate_json(obj.params)
  432. check_ownership(obj)
  433. def pre_delete(self, obj):
  434. check_ownership(obj)
  435. @expose('/add', methods=['GET', 'POST'])
  436. @has_access
  437. def add(self):
  438. datasources = ConnectorRegistry.get_all_datasources(db.session)
  439. datasources = [
  440. {'value': str(d.id) + '__' + d.type, 'label': repr(d)}
  441. for d in datasources
  442. ]
  443. return self.render_template(
  444. 'superset/add_slice.html',
  445. bootstrap_data=json.dumps({
  446. 'datasources': sorted(datasources, key=lambda d: d['label']),
  447. }),
  448. )
  449. appbuilder.add_view(
  450. SliceModelView,
  451. 'Charts',
  452. label=__('Charts'),
  453. icon='fa-bar-chart',
  454. category='',
  455. category_icon='')
  456. class SliceAsync(SliceModelView): # noqa
  457. route_base = '/sliceasync'
  458. list_columns = [
  459. 'id', 'slice_link', 'viz_type', 'slice_name',
  460. 'creator', 'modified', 'icons']
  461. label_columns = {
  462. 'icons': ' ',
  463. 'slice_link': _('Chart'),
  464. }
  465. appbuilder.add_view_no_menu(SliceAsync)
  466. class SliceAddView(SliceModelView): # noqa
  467. route_base = '/sliceaddview'
  468. list_columns = [
  469. 'id', 'slice_name', 'slice_url', 'edit_url', 'viz_type', 'params',
  470. 'description', 'description_markeddown', 'datasource_id', 'datasource_type',
  471. 'datasource_name_text', 'datasource_link',
  472. 'owners', 'modified', 'changed_on']
  473. appbuilder.add_view_no_menu(SliceAddView)
  474. class DashboardModelView(SupersetModelView, DeleteMixin): # noqa
  475. route_base = '/dashboard'
  476. datamodel = SQLAInterface(models.Dashboard)
  477. list_title = _('List Dashboards')
  478. show_title = _('Show Dashboard')
  479. add_title = _('Add Dashboard')
  480. edit_title = _('Edit Dashboard')
  481. list_columns = ['dashboard_link', 'creator', 'modified']
  482. order_columns = ['modified']
  483. edit_columns = [
  484. 'dashboard_title', 'slug', 'owners', 'position_json', 'css',
  485. 'json_metadata']
  486. show_columns = edit_columns + ['table_names', 'slices']
  487. search_columns = ('dashboard_title', 'slug', 'owners')
  488. add_columns = edit_columns
  489. base_order = ('changed_on', 'desc')
  490. description_columns = {
  491. 'position_json': _(
  492. 'This json object describes the positioning of the widgets in '
  493. 'the dashboard. It is dynamically generated when adjusting '
  494. 'the widgets size and positions by using drag & drop in '
  495. 'the dashboard view'),
  496. 'css': _(
  497. 'The css for individual dashboards can be altered here, or '
  498. 'in the dashboard view where changes are immediately '
  499. 'visible'),
  500. 'slug': _('To get a readable URL for your dashboard'),
  501. 'json_metadata': _(
  502. 'This JSON object is generated dynamically when clicking '
  503. 'the save or overwrite button in the dashboard view. It '
  504. 'is exposed here for reference and for power users who may '
  505. 'want to alter specific parameters.'),
  506. 'owners': _('Owners is a list of users who can alter the dashboard.'),
  507. }
  508. base_filters = [['slice', DashboardFilter, lambda: []]]
  509. label_columns = {
  510. 'dashboard_link': _('Dashboard'),
  511. 'dashboard_title': _('Title'),
  512. 'slug': _('Slug'),
  513. 'slices': _('Charts'),
  514. 'owners': _('Owners'),
  515. 'creator': _('Creator'),
  516. 'modified': _('Modified'),
  517. 'position_json': _('Position JSON'),
  518. 'css': _('CSS'),
  519. 'json_metadata': _('JSON Metadata'),
  520. 'table_names': _('Underlying Tables'),
  521. }
  522. def pre_add(self, obj):
  523. obj.slug = obj.slug.strip() or None
  524. if obj.slug:
  525. obj.slug = obj.slug.replace(' ', '-')
  526. obj.slug = re.sub(r'[^\w\-]+', '', obj.slug)
  527. if g.user not in obj.owners:
  528. obj.owners.append(g.user)
  529. utils.validate_json(obj.json_metadata)
  530. utils.validate_json(obj.position_json)
  531. owners = [o for o in obj.owners]
  532. for slc in obj.slices:
  533. slc.owners = list(set(owners) | set(slc.owners))
  534. def pre_update(self, obj):
  535. check_ownership(obj)
  536. self.pre_add(obj)
  537. def pre_delete(self, obj):
  538. check_ownership(obj)
  539. @action('mulexport', __('Export'), __('Export dashboards?'), 'fa-database')
  540. def mulexport(self, items):
  541. if not isinstance(items, list):
  542. items = [items]
  543. ids = ''.join('&id={}'.format(d.id) for d in items)
  544. return redirect(
  545. '/dashboard/export_dashboards_form?{}'.format(ids[1:]))
  546. @expose('/export_dashboards_form')
  547. def download_dashboards(self):
  548. if request.args.get('action') == 'go':
  549. ids = request.args.getlist('id')
  550. return Response(
  551. models.Dashboard.export_dashboards(ids),
  552. headers=generate_download_headers('json'),
  553. mimetype='application/text')
  554. return self.render_template(
  555. 'superset/export_dashboards.html',
  556. dashboards_url='/dashboard/list',
  557. )
  558. appbuilder.add_view(
  559. DashboardModelView,
  560. 'Dashboards',
  561. label=__('Dashboards'),
  562. icon='fa-dashboard',
  563. category='',
  564. category_icon='')
  565. class DashboardModelViewAsync(DashboardModelView): # noqa
  566. route_base = '/dashboardasync'
  567. list_columns = [
  568. 'id', 'dashboard_link', 'creator', 'modified', 'dashboard_title',
  569. 'changed_on', 'url', 'changed_by_name',
  570. ]
  571. label_columns = {
  572. 'dashboard_link': _('Dashboard'),
  573. 'dashboard_title': _('Title'),
  574. 'creator': _('Creator'),
  575. 'modified': _('Modified'),
  576. }
  577. appbuilder.add_view_no_menu(DashboardModelViewAsync)
  578. class DashboardAddView(DashboardModelView): # noqa
  579. route_base = '/dashboardaddview'
  580. list_columns = [
  581. 'id', 'dashboard_link', 'creator', 'modified', 'dashboard_title',
  582. 'changed_on', 'url', 'changed_by_name',
  583. ]
  584. show_columns = list(set(DashboardModelView.edit_columns + list_columns))
  585. appbuilder.add_view_no_menu(DashboardAddView)
  586. class LogModelView(SupersetModelView):
  587. datamodel = SQLAInterface(models.Log)
  588. list_title = _('List Log')
  589. show_title = _('Show Log')
  590. add_title = _('Add Log')
  591. edit_title = _('Edit Log')
  592. list_columns = ('user', 'action', 'dttm')
  593. edit_columns = ('user', 'action', 'dttm', 'json')
  594. base_order = ('dttm', 'desc')
  595. label_columns = {
  596. 'user': _('User'),
  597. 'action': _('Action'),
  598. 'dttm': _('dttm'),
  599. 'json': _('JSON'),
  600. }
  601. appbuilder.add_view(
  602. LogModelView,
  603. 'Action Log',
  604. label=__('Action Log'),
  605. category='Security',
  606. category_label=__('Security'),
  607. icon='fa-list-ol')
  608. @app.route('/health')
  609. def health():
  610. return 'OK'
  611. @app.route('/healthcheck')
  612. def healthcheck():
  613. return 'OK'
  614. @app.route('/ping')
  615. def ping():
  616. return 'OK'
  617. class KV(BaseSupersetView):
  618. """Used for storing and retrieving key value pairs"""
  619. @log_this
  620. @expose('/store/', methods=['POST'])
  621. def store(self):
  622. try:
  623. value = request.form.get('data')
  624. obj = models.KeyValue(value=value)
  625. db.session.add(obj)
  626. db.session.commit()
  627. except Exception as e:
  628. return json_error_response(e)
  629. return Response(
  630. json.dumps({'id': obj.id}),
  631. status=200)
  632. @log_this
  633. @expose('/<key_id>/', methods=['GET'])
  634. def get_value(self, key_id):
  635. kv = None
  636. try:
  637. kv = db.session.query(models.KeyValue).filter_by(id=key_id).one()
  638. except Exception as e:
  639. return json_error_response(e)
  640. return Response(kv.value, status=200)
  641. appbuilder.add_view_no_menu(KV)
  642. class R(BaseSupersetView):
  643. """used for short urls"""
  644. @log_this
  645. @expose('/<url_id>')
  646. def index(self, url_id):
  647. url = db.session.query(models.Url).filter_by(id=url_id).first()
  648. if url:
  649. return redirect('/' + url.url)
  650. else:
  651. flash('URL to nowhere...', 'danger')
  652. return redirect('/')
  653. @log_this
  654. @expose('/shortner/', methods=['POST', 'GET'])
  655. def shortner(self):
  656. url = request.form.get('data')
  657. obj = models.Url(url=url)
  658. db.session.add(obj)
  659. db.session.commit()
  660. return Response(
  661. '{scheme}://{request.headers[Host]}/r/{obj.id}'.format(
  662. scheme=request.scheme, request=request, obj=obj),
  663. mimetype='text/plain')
  664. @expose('/msg/')
  665. def msg(self):
  666. """Redirects to specified url while flash a message"""
  667. flash(Markup(request.args.get('msg')), 'info')
  668. return redirect(request.args.get('url'))
  669. appbuilder.add_view_no_menu(R)
  670. class Superset(BaseSupersetView):
  671. """The base views for Superset!"""
  672. @has_access_api
  673. @expose('/datasources/')
  674. def datasources(self):
  675. datasources = ConnectorRegistry.get_all_datasources(db.session)
  676. datasources = [o.short_data for o in datasources]
  677. datasources = sorted(datasources, key=lambda o: o['name'])
  678. return self.json_response(datasources)
  679. @has_access_api
  680. @expose('/override_role_permissions/', methods=['POST'])
  681. def override_role_permissions(self):
  682. """Updates the role with the give datasource permissions.
  683. Permissions not in the request will be revoked. This endpoint should
  684. be available to admins only. Expects JSON in the format:
  685. {
  686. 'role_name': '{role_name}',
  687. 'database': [{
  688. 'datasource_type': '{table|druid}',
  689. 'name': '{database_name}',
  690. 'schema': [{
  691. 'name': '{schema_name}',
  692. 'datasources': ['{datasource name}, {datasource name}']
  693. }]
  694. }]
  695. }
  696. """
  697. data = request.get_json(force=True)
  698. role_name = data['role_name']
  699. databases = data['database']
  700. db_ds_names = set()
  701. for dbs in databases:
  702. for schema in dbs['schema']:
  703. for ds_name in schema['datasources']:
  704. fullname = utils.get_datasource_full_name(
  705. dbs['name'], ds_name, schema=schema['name'])
  706. db_ds_names.add(fullname)
  707. existing_datasources = ConnectorRegistry.get_all_datasources(db.session)
  708. datasources = [
  709. d for d in existing_datasources if d.full_name in db_ds_names]
  710. role = security_manager.find_role(role_name)
  711. # remove all permissions
  712. role.permissions = []
  713. # grant permissions to the list of datasources
  714. granted_perms = []
  715. for datasource in datasources:
  716. view_menu_perm = security_manager.find_permission_view_menu(
  717. view_menu_name=datasource.perm,
  718. permission_name='datasource_access')
  719. # prevent creating empty permissions
  720. if view_menu_perm and view_menu_perm.view_menu:
  721. role.permissions.append(view_menu_perm)
  722. granted_perms.append(view_menu_perm.view_menu.name)
  723. db.session.commit()
  724. return self.json_response({
  725. 'granted': granted_perms,
  726. 'requested': list(db_ds_names),
  727. }, status=201)
  728. @log_this
  729. @has_access
  730. @expose('/request_access/')
  731. def request_access(self):
  732. datasources = set()
  733. dashboard_id = request.args.get('dashboard_id')
  734. if dashboard_id:
  735. dash = (
  736. db.session.query(models.Dashboard)
  737. .filter_by(id=int(dashboard_id))
  738. .one()
  739. )
  740. datasources |= dash.datasources
  741. datasource_id = request.args.get('datasource_id')
  742. datasource_type = request.args.get('datasource_type')
  743. if datasource_id:
  744. ds_class = ConnectorRegistry.sources.get(datasource_type)
  745. datasource = (
  746. db.session.query(ds_class)
  747. .filter_by(id=int(datasource_id))
  748. .one()
  749. )
  750. datasources.add(datasource)
  751. has_access = all(
  752. (
  753. datasource and security_manager.datasource_access(datasource)
  754. for datasource in datasources
  755. ))
  756. if has_access:
  757. return redirect('/superset/dashboard/{}'.format(dashboard_id))
  758. if request.args.get('action') == 'go':
  759. for datasource in datasources:
  760. access_request = DAR(
  761. datasource_id=datasource.id,
  762. datasource_type=datasource.type)
  763. db.session.add(access_request)
  764. db.session.commit()
  765. flash(__('Access was requested'), 'info')
  766. return redirect('/')
  767. return self.render_template(
  768. 'superset/request_access.html',
  769. datasources=datasources,
  770. datasource_names=', '.join([o.name for o in datasources]),
  771. )
  772. @log_this
  773. @has_access
  774. @expose('/approve')
  775. def approve(self):
  776. def clean_fulfilled_requests(session):
  777. for r in session.query(DAR).all():
  778. datasource = ConnectorRegistry.get_datasource(
  779. r.datasource_type, r.datasource_id, session)
  780. user = security_manager.get_user_by_id(r.created_by_fk)
  781. if not datasource or \
  782. security_manager.datasource_access(datasource, user):
  783. # datasource does not exist anymore
  784. session.delete(r)
  785. session.commit()
  786. datasource_type = request.args.get('datasource_type')
  787. datasource_id = request.args.get('datasource_id')
  788. created_by_username = request.args.get('created_by')
  789. role_to_grant = request.args.get('role_to_grant')
  790. role_to_extend = request.args.get('role_to_extend')
  791. session = db.session
  792. datasource = ConnectorRegistry.get_datasource(
  793. datasource_type, datasource_id, session)
  794. if not datasource:
  795. flash(DATASOURCE_MISSING_ERR, 'alert')
  796. return json_error_response(DATASOURCE_MISSING_ERR)
  797. requested_by = security_manager.find_user(username=created_by_username)
  798. if not requested_by:
  799. flash(USER_MISSING_ERR, 'alert')
  800. return json_error_response(USER_MISSING_ERR)
  801. requests = (
  802. session.query(DAR)
  803. .filter(
  804. DAR.datasource_id == datasource_id,
  805. DAR.datasource_type == datasource_type,
  806. DAR.created_by_fk == requested_by.id)
  807. .all()
  808. )
  809. if not requests:
  810. flash(ACCESS_REQUEST_MISSING_ERR, 'alert')
  811. return json_error_response(ACCESS_REQUEST_MISSING_ERR)
  812. # check if you can approve
  813. if security_manager.all_datasource_access() or g.user.id == datasource.owner_id:
  814. # can by done by admin only
  815. if role_to_grant:
  816. role = security_manager.find_role(role_to_grant)
  817. requested_by.roles.append(role)
  818. msg = __(
  819. '%(user)s was granted the role %(role)s that gives access '
  820. 'to the %(datasource)s',
  821. user=requested_by.username,
  822. role=role_to_grant,
  823. datasource=datasource.full_name)
  824. utils.notify_user_about_perm_udate(
  825. g.user, requested_by, role, datasource,
  826. 'email/role_granted.txt', app.config)
  827. flash(msg, 'info')
  828. if role_to_extend:
  829. perm_view = security_manager.find_permission_view_menu(
  830. 'email/datasource_access', datasource.perm)
  831. role = security_manager.find_role(role_to_extend)
  832. security_manager.add_permission_role(role, perm_view)
  833. msg = __('Role %(r)s was extended to provide the access to '
  834. 'the datasource %(ds)s', r=role_to_extend,
  835. ds=datasource.full_name)
  836. utils.notify_user_about_perm_udate(
  837. g.user, requested_by, role, datasource,
  838. 'email/role_extended.txt', app.config)
  839. flash(msg, 'info')
  840. clean_fulfilled_requests(session)
  841. else:
  842. flash(__('You have no permission to approve this request'),
  843. 'danger')
  844. return redirect('/accessrequestsmodelview/list/')
  845. for r in requests:
  846. session.delete(r)
  847. session.commit()
  848. return redirect('/accessrequestsmodelview/list/')
  849. def get_form_data(self, slice_id=None, use_slice_data=False):
  850. form_data = {}
  851. post_data = request.form.get('form_data')
  852. request_args_data = request.args.get('form_data')
  853. # Supporting POST
  854. if post_data:
  855. form_data.update(json.loads(post_data))
  856. # request params can overwrite post body
  857. if request_args_data:
  858. form_data.update(json.loads(request_args_data))
  859. url_id = request.args.get('r')
  860. if url_id:
  861. saved_url = db.session.query(models.Url).filter_by(id=url_id).first()
  862. if saved_url:
  863. url_str = parse.unquote_plus(
  864. saved_url.url.split('?')[1][10:], encoding='utf-8', errors=None)
  865. url_form_data = json.loads(url_str)
  866. # allow form_date in request override saved url
  867. url_form_data.update(form_data)
  868. form_data = url_form_data
  869. if request.args.get('viz_type'):
  870. # Converting old URLs
  871. form_data = cast_form_data(form_data)
  872. form_data = {
  873. k: v
  874. for k, v in form_data.items()
  875. if k not in FORM_DATA_KEY_BLACKLIST
  876. }
  877. # When a slice_id is present, load from DB and override
  878. # the form_data from the DB with the other form_data provided
  879. slice_id = form_data.get('slice_id') or slice_id
  880. slc = None
  881. # Check if form data only contains slice_id
  882. contains_only_slc_id = not any(key != 'slice_id' for key in form_data)
  883. # Include the slice_form_data if request from explore or slice calls
  884. # or if form_data only contains slice_id
  885. if slice_id and (use_slice_data or contains_only_slc_id):
  886. slc = db.session.query(models.Slice).filter_by(id=slice_id).first()
  887. slice_form_data = slc.form_data.copy()
  888. # allow form_data in request override slice from_data
  889. slice_form_data.update(form_data)
  890. form_data = slice_form_data
  891. update_time_range(form_data)
  892. return form_data, slc
  893. def get_viz(
  894. self,
  895. slice_id=None,
  896. form_data=None,
  897. datasource_type=None,
  898. datasource_id=None,
  899. force=False,
  900. ):
  901. if slice_id:
  902. slc = (
  903. db.session.query(models.Slice)
  904. .filter_by(id=slice_id)
  905. .one()
  906. )
  907. return slc.get_viz()
  908. else:
  909. viz_type = form_data.get('viz_type', 'table')
  910. datasource = ConnectorRegistry.get_datasource(
  911. datasource_type, datasource_id, db.session)
  912. viz_obj = viz.viz_types[viz_type](
  913. datasource,
  914. form_data=form_data,
  915. force=force,
  916. )
  917. return viz_obj
  918. @has_access
  919. @expose('/slice/<slice_id>/')
  920. def slice(self, slice_id):
  921. form_data, slc = self.get_form_data(slice_id, use_slice_data=True)
  922. endpoint = '/superset/explore/?form_data={}'.format(
  923. parse.quote(json.dumps(form_data)),
  924. )
  925. if request.args.get('standalone') == 'true':
  926. endpoint += '&standalone=true'
  927. return redirect(endpoint)
  928. def get_query_string_response(self, viz_obj):
  929. query = None
  930. try:
  931. query_obj = viz_obj.query_obj()
  932. if query_obj:
  933. query = viz_obj.datasource.get_query_str(query_obj)
  934. except Exception as e:
  935. logging.exception(e)
  936. return json_error_response(e)
  937. if query_obj and query_obj['prequeries']:
  938. query_obj['prequeries'].append(query)
  939. query = ';\n\n'.join(query_obj['prequeries'])
  940. if query:
  941. query += ';'
  942. else:
  943. query = 'No query.'
  944. return self.json_response({
  945. 'query': query,
  946. 'language': viz_obj.datasource.query_language,
  947. })
  948. def get_raw_results(self, viz_obj):
  949. return self.json_response({
  950. 'data': viz_obj.get_df().to_dict('records'),
  951. })
  952. def get_samples(self, viz_obj):
  953. return self.json_response({
  954. 'data': viz_obj.get_samples(),
  955. })
  956. def generate_json(
  957. self, datasource_type, datasource_id, form_data,
  958. csv=False,xlsx=False,query=False, force=False, results=False,
  959. samples=False,
  960. ):
  961. try:
  962. viz_obj = self.get_viz(
  963. datasource_type=datasource_type,
  964. datasource_id=datasource_id,
  965. form_data=form_data,
  966. force=force,
  967. )
  968. except Exception as e:
  969. logging.exception(e)
  970. return json_error_response(
  971. utils.error_msg_from_exception(e),
  972. stacktrace=traceback.format_exc())
  973. if not security_manager.datasource_access(viz_obj.datasource, g.user):
  974. return json_error_response(
  975. security_manager.get_datasource_access_error_msg(viz_obj.datasource),
  976. status=404,
  977. link=security_manager.get_datasource_access_link(viz_obj.datasource))
  978. if csv:
  979. return CsvResponse(
  980. viz_obj.get_csv(),
  981. status=200,
  982. headers=generate_download_headers('csv'),
  983. mimetype='application/csv')
  984. if xlsx:
  985. dt = datetime.now()
  986. name = dt.strftime('%Y%m%d_%H%M%S.xlsx')
  987. return XlsxResponse(
  988. viz_obj.get_xlsx(),
  989. status=200,
  990. headers={'Content-Disposition': 'attachment; filename='+name},
  991. mimetype='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet')
  992. if query:
  993. return self.get_query_string_response(viz_obj)
  994. if results:
  995. return self.get_raw_results(viz_obj)
  996. if samples:
  997. return self.get_samples(viz_obj)
  998. try:
  999. payload = viz_obj.get_payload()
  1000. except SupersetException as se:
  1001. logging.exception(se)
  1002. return json_error_response(utils.error_msg_from_exception(se),
  1003. status=se.status)
  1004. except Exception as e:
  1005. logging.exception(e)
  1006. return json_error_response(utils.error_msg_from_exception(e))
  1007. status = 200
  1008. if (
  1009. payload.get('status') == QueryStatus.FAILED or
  1010. payload.get('error') is not None
  1011. ):
  1012. status = 400
  1013. return json_success(viz_obj.json_dumps(payload), status=status)
  1014. @log_this
  1015. @has_access_api
  1016. @expose('/slice_json/<slice_id>')
  1017. def slice_json(self, slice_id):
  1018. try:
  1019. form_data, slc = self.get_form_data(slice_id, use_slice_data=True)
  1020. datasource_type = slc.datasource.type
  1021. datasource_id = slc.datasource.id
  1022. except Exception as e:
  1023. return json_error_response(
  1024. utils.error_msg_from_exception(e),
  1025. stacktrace=traceback.format_exc())
  1026. return self.generate_json(datasource_type=datasource_type,
  1027. datasource_id=datasource_id,
  1028. form_data=form_data)
  1029. @log_this
  1030. @has_access_api
  1031. @expose('/annotation_json/<layer_id>')
  1032. def annotation_json(self, layer_id):
  1033. form_data = self.get_form_data()[0]
  1034. form_data['layer_id'] = layer_id
  1035. form_data['filters'] = [{'col': 'layer_id',
  1036. 'op': '==',
  1037. 'val': layer_id}]
  1038. datasource = AnnotationDatasource()
  1039. viz_obj = viz.viz_types['table'](
  1040. datasource,
  1041. form_data=form_data,
  1042. force=False,
  1043. )
  1044. try:
  1045. payload = viz_obj.get_payload()
  1046. except Exception as e:
  1047. logging.exception(e)
  1048. return json_error_response(utils.error_msg_from_exception(e))
  1049. status = 200
  1050. if payload.get('status') == QueryStatus.FAILED:
  1051. status = 400
  1052. return json_success(viz_obj.json_dumps(payload), status=status)
  1053. @log_this
  1054. @has_access_api
  1055. @expose('/explore_json/<datasource_type>/<datasource_id>/', methods=['GET', 'POST'])
  1056. @expose('/explore_json/', methods=['GET', 'POST'])
  1057. def explore_json(self, datasource_type=None, datasource_id=None):
  1058. """Serves all request that GET or POST form_data
  1059. This endpoint evolved to be the entry point of many different
  1060. requests that GETs or POSTs a form_data.
  1061. `self.generate_json` receives this input and returns different
  1062. payloads based on the request args in the first block
  1063. TODO: break into one endpoint for each return shape"""
  1064. csv = request.args.get('csv') == 'true'
  1065. xlsx = request.args.get('xlsx') == 'true'
  1066. query = request.args.get('query') == 'true'
  1067. results = request.args.get('results') == 'true'
  1068. samples = request.args.get('samples') == 'true'
  1069. force = request.args.get('force') == 'true'
  1070. try:
  1071. form_data = self.get_form_data()[0]
  1072. datasource_id, datasource_type = self.datasource_info(
  1073. datasource_id, datasource_type, form_data)
  1074. except Exception as e:
  1075. logging.exception(e)
  1076. return json_error_response(
  1077. utils.error_msg_from_exception(e),
  1078. stacktrace=traceback.format_exc())
  1079. return self.generate_json(
  1080. datasource_type=datasource_type,
  1081. datasource_id=datasource_id,
  1082. form_data=form_data,
  1083. csv=csv,
  1084. xlsx=xlsx,
  1085. query=query,
  1086. results=results,
  1087. force=force,
  1088. samples=samples,
  1089. )
  1090. @log_this
  1091. @has_access
  1092. @expose('/import_dashboards', methods=['GET', 'POST'])
  1093. def import_dashboards(self):
  1094. """Overrides the dashboards using json instances from the file."""
  1095. f = request.files.get('file')
  1096. if request.method == 'POST' and f:
  1097. dashboard_import_export_util.import_dashboards(db.session, f.stream)
  1098. return redirect('/dashboard/list/')
  1099. return self.render_template('superset/import_dashboards.html')
  1100. @log_this
  1101. @has_access
  1102. @expose('/explorev2/<datasource_type>/<datasource_id>/')
  1103. def explorev2(self, datasource_type, datasource_id):
  1104. """Deprecated endpoint, here for backward compatibility of urls"""
  1105. return redirect(url_for(
  1106. 'Superset.explore',
  1107. datasource_type=datasource_type,
  1108. datasource_id=datasource_id,
  1109. **request.args))
  1110. @staticmethod
  1111. def datasource_info(datasource_id, datasource_type, form_data):
  1112. """Compatibility layer for handling of datasource info
  1113. datasource_id & datasource_type used to be passed in the URL
  1114. directory, now they should come as part of the form_data,
  1115. This function allows supporting both without duplicating code"""
  1116. datasource = form_data.get('datasource', '')
  1117. if '__' in datasource:
  1118. datasource_id, datasource_type = datasource.split('__')
  1119. # The case where the datasource has been deleted
  1120. datasource_id = None if datasource_id == 'None' else datasource_id
  1121. if not datasource_id:
  1122. raise Exception(
  1123. 'The datasource associated with this chart no longer exists')
  1124. datasource_id = int(datasource_id)
  1125. return datasource_id, datasource_type
  1126. @log_this
  1127. @has_access
  1128. @expose('/explore/<datasource_type>/<datasource_id>/', methods=['GET', 'POST'])
  1129. @expose('/explore/', methods=['GET', 'POST'])
  1130. def explore(self, datasource_type=None, datasource_id=None):
  1131. user_id = g.user.get_id() if g.user else None
  1132. form_data, slc = self.get_form_data(use_slice_data=True)
  1133. datasource_id, datasource_type = self.datasource_info(
  1134. datasource_id, datasource_type, form_data)
  1135. error_redirect = '/chart/list/'
  1136. datasource = ConnectorRegistry.get_datasource(
  1137. datasource_type, datasource_id, db.session)
  1138. if not datasource:
  1139. flash(DATASOURCE_MISSING_ERR, 'danger')
  1140. return redirect(error_redirect)
  1141. if config.get('ENABLE_ACCESS_REQUEST') and (
  1142. not security_manager.datasource_access(datasource)
  1143. ):
  1144. flash(
  1145. __(security_manager.get_datasource_access_error_msg(datasource)),
  1146. 'danger')
  1147. return redirect(
  1148. 'superset/request_access/?'
  1149. 'datasource_type={datasource_type}&'
  1150. 'datasource_id={datasource_id}&'
  1151. ''.format(**locals()))
  1152. viz_type = form_data.get('viz_type')
  1153. if not viz_type and datasource.default_endpoint:
  1154. return redirect(datasource.default_endpoint)
  1155. # slc perms
  1156. slice_add_perm = security_manager.can_access('can_add', 'SliceModelView')
  1157. slice_overwrite_perm = is_owner(slc, g.user)
  1158. slice_download_perm = security_manager.can_access(
  1159. 'can_download', 'SliceModelView')
  1160. form_data['datasource'] = str(datasource_id) + '__' + datasource_type
  1161. # On explore, merge legacy and extra filters into the form data
  1162. utils.convert_legacy_filters_into_adhoc(form_data)
  1163. merge_extra_filters(form_data)
  1164. # merge request url params
  1165. if request.method == 'GET':
  1166. merge_request_params(form_data, request.args)
  1167. # handle save or overwrite
  1168. action = request.args.get('action')
  1169. if action == 'overwrite' and not slice_overwrite_perm:
  1170. return json_error_response(
  1171. _('You don\'t have the rights to ') + _('alter this ') + _('chart'),
  1172. status=400)
  1173. if action == 'saveas' and not slice_add_perm:
  1174. return json_error_response(
  1175. _('You don\'t have the rights to ') + _('create a ') + _('chart'),
  1176. status=400)
  1177. if action in ('saveas', 'overwrite'):
  1178. return self.save_or_overwrite_slice(
  1179. request.args,
  1180. slc, slice_add_perm,
  1181. slice_overwrite_perm,
  1182. slice_download_perm,
  1183. datasource_id,
  1184. datasource_type,
  1185. datasource.name)
  1186. standalone = request.args.get('standalone') == 'true'
  1187. bootstrap_data = {
  1188. 'can_add': slice_add_perm,
  1189. 'can_download': slice_download_perm,
  1190. 'can_overwrite': slice_overwrite_perm,
  1191. 'datasource': datasource.data,
  1192. 'form_data': form_data,
  1193. 'datasource_id': datasource_id,
  1194. 'datasource_type': datasource_type,
  1195. 'slice': slc.data if slc else None,
  1196. 'standalone': standalone,
  1197. 'user_id': user_id,
  1198. 'forced_height': request.args.get('height'),
  1199. 'common': self.common_bootsrap_payload(),
  1200. }
  1201. table_name = datasource.table_name \
  1202. if datasource_type == 'table' \
  1203. else datasource.datasource_name
  1204. if slc:
  1205. title = slc.slice_name
  1206. else:
  1207. title = _('Explore - %(table)s', table=table_name)
  1208. return self.render_template(
  1209. 'superset/basic.html',
  1210. bootstrap_data=json.dumps(bootstrap_data),
  1211. entry='explore',
  1212. title=title,
  1213. standalone_mode=standalone)
  1214. @api
  1215. @has_access_api
  1216. @expose('/filter/<datasource_type>/<datasource_id>/<column>/')
  1217. def filter(self, datasource_type, datasource_id, column):
  1218. """
  1219. Endpoint to retrieve values for specified column.
  1220. :param datasource_type: Type of datasource e.g. table
  1221. :param datasource_id: Datasource id
  1222. :param column: Column name to retrieve values for
  1223. :return:
  1224. """
  1225. # TODO: Cache endpoint by user, datasource and column
  1226. datasource = ConnectorRegistry.get_datasource(
  1227. datasource_type, datasource_id, db.session)
  1228. if not datasource:
  1229. return json_error_response(DATASOURCE_MISSING_ERR)
  1230. if not security_manager.datasource_access(datasource):
  1231. return json_error_response(
  1232. security_manager.get_datasource_access_error_msg(datasource))
  1233. payload = json.dumps(
  1234. datasource.values_for_column(
  1235. column,
  1236. config.get('FILTER_SELECT_ROW_LIMIT', 10000),
  1237. ),
  1238. default=utils.json_int_dttm_ser)
  1239. return json_success(payload)
  1240. def save_or_overwrite_slice(
  1241. self, args, slc, slice_add_perm, slice_overwrite_perm, slice_download_perm,
  1242. datasource_id, datasource_type, datasource_name):
  1243. """Save or overwrite a slice"""
  1244. slice_name = args.get('slice_name')
  1245. action = args.get('action')
  1246. form_data, _ = self.get_form_data()
  1247. if action in ('saveas'):
  1248. if 'slice_id' in form_data:
  1249. form_data.pop('slice_id') # don't save old slice_id
  1250. slc = models.Slice(owners=[g.user] if g.user else [])
  1251. slc.params = json.dumps(form_data)
  1252. slc.datasource_name = datasource_name
  1253. slc.viz_type = form_data['viz_type']
  1254. slc.datasource_type = datasource_type
  1255. slc.datasource_id = datasource_id
  1256. slc.slice_name = slice_name
  1257. if action in ('saveas') and slice_add_perm:
  1258. self.save_slice(slc)
  1259. elif action == 'overwrite' and slice_overwrite_perm:
  1260. self.overwrite_slice(slc)
  1261. # Adding slice to a dashboard if requested
  1262. dash = None
  1263. if request.args.get('add_to_dash') == 'existing':
  1264. dash = (
  1265. db.session.query(models.Dashboard)
  1266. .filter_by(id=int(request.args.get('save_to_dashboard_id')))
  1267. .one()
  1268. )
  1269. # check edit dashboard permissions
  1270. dash_overwrite_perm = check_ownership(dash, raise_if_false=False)
  1271. if not dash_overwrite_perm:
  1272. return json_error_response(
  1273. _('You don\'t have the rights to ') + _('alter this ') +
  1274. _('dashboard'),
  1275. status=400)
  1276. flash(
  1277. 'Slice [{}] was added to dashboard [{}]'.format(
  1278. slc.slice_name,
  1279. dash.dashboard_title),
  1280. 'info')
  1281. elif request.args.get('add_to_dash') == 'new':
  1282. # check create dashboard permissions
  1283. dash_add_perm = security_manager.can_access('can_add', 'DashboardModelView')
  1284. if not dash_add_perm:
  1285. return json_error_response(
  1286. _('You don\'t have the rights to ') + _('create a ') + _('dashboard'),
  1287. status=400)
  1288. dash = models.Dashboard(
  1289. dashboard_title=request.args.get('new_dashboard_name'),
  1290. owners=[g.user] if g.user else [])
  1291. flash(
  1292. 'Dashboard [{}] just got created and slice [{}] was added '
  1293. 'to it'.format(
  1294. dash.dashboard_title,
  1295. slc.slice_name),
  1296. 'info')
  1297. if dash and slc not in dash.slices:
  1298. dash.slices.append(slc)
  1299. db.session.commit()
  1300. response = {
  1301. 'can_add': slice_add_perm,
  1302. 'can_download': slice_download_perm,
  1303. 'can_overwrite': is_owner(slc, g.user),
  1304. 'form_data': slc.form_data,
  1305. 'slice': slc.data,
  1306. }
  1307. if request.args.get('goto_dash') == 'true':
  1308. response.update({'dashboard': dash.url})
  1309. return json_success(json.dumps(response))
  1310. def save_slice(self, slc):
  1311. session = db.session()
  1312. msg = _('Chart [{}] has been saved').format(slc.slice_name)
  1313. session.add(slc)
  1314. session.commit()
  1315. flash(msg, 'info')
  1316. def overwrite_slice(self, slc):
  1317. session = db.session()
  1318. session.merge(slc)
  1319. session.commit()
  1320. msg = _('Chart [{}] has been overwritten').format(slc.slice_name)
  1321. flash(msg, 'info')
  1322. @api
  1323. @has_access_api
  1324. @expose('/checkbox/<model_view>/<id_>/<attr>/<value>', methods=['GET'])
  1325. def checkbox(self, model_view, id_, attr, value):
  1326. """endpoint for checking/unchecking any boolean in a sqla model"""
  1327. modelview_to_model = {
  1328. '{}ColumnInlineView'.format(name.capitalize()): source.column_class
  1329. for name, source in ConnectorRegistry.sources.items()
  1330. }
  1331. model = modelview_to_model[model_view]
  1332. col = db.session.query(model).filter_by(id=id_).first()
  1333. checked = value == 'true'
  1334. if col:
  1335. setattr(col, attr, checked)
  1336. if checked:
  1337. metrics = col.get_metrics().values()
  1338. col.datasource.add_missing_metrics(metrics)
  1339. db.session.commit()
  1340. return json_success('OK')
  1341. @api
  1342. @has_access_api
  1343. @expose('/schemas/<db_id>/')
  1344. @expose('/schemas/<db_id>/<force_refresh>/')
  1345. def schemas(self, db_id, force_refresh='true'):
  1346. db_id = int(db_id)
  1347. force_refresh = force_refresh.lower() == 'true'
  1348. database = (
  1349. db.session
  1350. .query(models.Database)
  1351. .filter_by(id=db_id)
  1352. .one()
  1353. )
  1354. schemas = database.all_schema_names(force_refresh=force_refresh)
  1355. schemas = security_manager.schemas_accessible_by_user(database, schemas)
  1356. return Response(
  1357. json.dumps({'schemas': schemas}),
  1358. mimetype='application/json')
  1359. @api
  1360. @has_access_api
  1361. @expose('/tables/<db_id>/<schema>/<substr>/')
  1362. def tables(self, db_id, schema, substr):
  1363. """Endpoint to fetch the list of tables for given database"""
  1364. db_id = int(db_id)
  1365. schema = utils.js_string_to_python(schema)
  1366. substr = utils.js_string_to_python(substr)
  1367. database = db.session.query(models.Database).filter_by(id=db_id).one()
  1368. table_names = security_manager.accessible_by_user(
  1369. database, database.all_table_names(schema), schema)
  1370. view_names = security_manager.accessible_by_user(
  1371. database, database.all_view_names(schema), schema)
  1372. if substr:
  1373. table_names = [tn for tn in table_names if substr in tn]
  1374. view_names = [vn for vn in view_names if substr in vn]
  1375. max_items = config.get('MAX_TABLE_NAMES') or len(table_names)
  1376. total_items = len(table_names) + len(view_names)
  1377. max_tables = len(table_names)
  1378. max_views = len(view_names)
  1379. if total_items and substr:
  1380. max_tables = max_items * len(table_names) // total_items
  1381. max_views = max_items * len(view_names) // total_items
  1382. table_options = [{'value': tn, 'label': tn}
  1383. for tn in table_names[:max_tables]]
  1384. table_options.extend([{'value': vn, 'label': '[view] {}'.format(vn)}
  1385. for vn in view_names[:max_views]])
  1386. payload = {
  1387. 'tableLength': len(table_names) + len(view_names),
  1388. 'options': table_options,
  1389. }
  1390. return json_success(json.dumps(payload))
  1391. @api
  1392. @has_access_api
  1393. @expose('/copy_dash/<dashboard_id>/', methods=['GET', 'POST'])
  1394. def copy_dash(self, dashboard_id):
  1395. """Copy dashboard"""
  1396. session = db.session()
  1397. data = json.loads(request.form.get('data'))
  1398. dash = models.Dashboard()
  1399. original_dash = (
  1400. session
  1401. .query(models.Dashboard)
  1402. .filter_by(id=dashboard_id).first())
  1403. dash.owners = [g.user] if g.user else []
  1404. dash.dashboard_title = data['dashboard_title']
  1405. if data['duplicate_slices']:
  1406. # Duplicating slices as well, mapping old ids to new ones
  1407. old_to_new_sliceids = {}
  1408. for slc in original_dash.slices:
  1409. new_slice = slc.clone()
  1410. new_slice.owners = [g.user] if g.user else []
  1411. session.add(new_slice)
  1412. session.flush()
  1413. new_slice.dashboards.append(dash)
  1414. old_to_new_sliceids['{}'.format(slc.id)] = \
  1415. '{}'.format(new_slice.id)
  1416. # update chartId of layout entities
  1417. # in v2_dash positions json data, chartId should be integer,
  1418. # while in older version slice_id is string type
  1419. for value in data['positions'].values():
  1420. if (
  1421. isinstance(value, dict) and value.get('meta') and
  1422. value.get('meta').get('chartId')
  1423. ):
  1424. old_id = '{}'.format(value.get('meta').get('chartId'))
  1425. new_id = int(old_to_new_sliceids[old_id])
  1426. value['meta']['chartId'] = new_id
  1427. else:
  1428. dash.slices = original_dash.slices
  1429. dash.params = original_dash.params
  1430. self._set_dash_metadata(dash, data)
  1431. session.add(dash)
  1432. session.commit()
  1433. dash_json = json.dumps(dash.data)
  1434. session.close()
  1435. return json_success(dash_json)
  1436. @api
  1437. @has_access_api
  1438. @expose('/save_dash/<dashboard_id>/', methods=['GET', 'POST'])
  1439. def save_dash(self, dashboard_id):
  1440. """Save a dashboard's metadata"""
  1441. session = db.session()
  1442. dash = (session
  1443. .query(models.Dashboard)
  1444. .filter_by(id=dashboard_id).first())
  1445. check_ownership(dash, raise_if_false=True)
  1446. data = json.loads(request.form.get('data'))
  1447. self._set_dash_metadata(dash, data)
  1448. session.merge(dash)
  1449. session.commit()
  1450. session.close()
  1451. return 'SUCCESS'
  1452. @staticmethod
  1453. def _set_dash_metadata(dashboard, data):
  1454. positions = data['positions']
  1455. # find slices in the position data
  1456. slice_ids = []
  1457. slice_id_to_name = {}
  1458. for value in positions.values():
  1459. if (
  1460. isinstance(value, dict) and value.get('meta') and
  1461. value.get('meta').get('chartId')
  1462. ):
  1463. slice_id = value.get('meta').get('chartId')
  1464. slice_ids.append(slice_id)
  1465. slice_id_to_name[slice_id] = value.get('meta').get('sliceName')
  1466. session = db.session()
  1467. Slice = models.Slice # noqa
  1468. current_slices = session.query(Slice).filter(
  1469. Slice.id.in_(slice_ids)).all()
  1470. dashboard.slices = current_slices
  1471. # update slice names. this assumes user has permissions to update the slice
  1472. for slc in dashboard.slices:
  1473. new_name = slice_id_to_name[slc.id]
  1474. if slc.slice_name != new_name:
  1475. slc.slice_name = new_name
  1476. session.merge(slc)
  1477. session.flush()
  1478. # remove leading and trailing white spaces in the dumped json
  1479. dashboard.position_json = json.dumps(
  1480. positions, indent=None, separators=(',', ':'), sort_keys=True)
  1481. md = dashboard.params_dict
  1482. dashboard.css = data.get('css')
  1483. dashboard.dashboard_title = data['dashboard_title']
  1484. if 'filter_immune_slices' not in md:
  1485. md['filter_immune_slices'] = []
  1486. if 'timed_refresh_immune_slices' not in md:
  1487. md['timed_refresh_immune_slices'] = []
  1488. if 'filter_immune_slice_fields' not in md:
  1489. md['filter_immune_slice_fields'] = {}
  1490. md['expanded_slices'] = data['expanded_slices']
  1491. default_filters_data = json.loads(data.get('default_filters', '{}'))
  1492. applicable_filters = \
  1493. {key: v for key, v in default_filters_data.items()
  1494. if int(key) in slice_ids}
  1495. md['default_filters'] = json.dumps(applicable_filters)
  1496. dashboard.json_metadata = json.dumps(md)
  1497. @api
  1498. @has_access_api
  1499. @expose('/add_slices/<dashboard_id>/', methods=['POST'])
  1500. def add_slices(self, dashboard_id):
  1501. """Add and save slices to a dashboard"""
  1502. data = json.loads(request.form.get('data'))
  1503. session = db.session()
  1504. Slice = models.Slice # noqa
  1505. dash = (
  1506. session.query(models.Dashboard).filter_by(id=dashboard_id).first())
  1507. check_ownership(dash, raise_if_false=True)
  1508. new_slices = session.query(Slice).filter(
  1509. Slice.id.in_(data['slice_ids']))
  1510. dash.slices += new_slices
  1511. session.merge(dash)
  1512. session.commit()
  1513. session.close()
  1514. return 'SLICES ADDED'
  1515. @api
  1516. @has_access_api
  1517. @expose('/testconn', methods=['POST', 'GET'])
  1518. def testconn(self):
  1519. """Tests a sqla connection"""
  1520. try:
  1521. username = g.user.username if g.user is not None else None
  1522. uri = request.json.get('uri')
  1523. db_name = request.json.get('name')
  1524. impersonate_user = request.json.get('impersonate_user')
  1525. database = None
  1526. if db_name:
  1527. database = (
  1528. db.session
  1529. .query(models.Database)
  1530. .filter_by(database_name=db_name)
  1531. .first()
  1532. )
  1533. if database and uri == database.safe_sqlalchemy_uri():
  1534. # the password-masked uri was passed
  1535. # use the URI associated with this database
  1536. uri = database.sqlalchemy_uri_decrypted
  1537. configuration = {}
  1538. if database and uri:
  1539. url = make_url(uri)
  1540. db_engine = models.Database.get_db_engine_spec_for_backend(
  1541. url.get_backend_name())
  1542. db_engine.patch()
  1543. masked_url = database.get_password_masked_url_from_uri(uri)
  1544. logging.info('Superset.testconn(). Masked URL: {0}'.format(masked_url))
  1545. configuration.update(
  1546. db_engine.get_configuration_for_impersonation(uri,
  1547. impersonate_user,
  1548. username),
  1549. )
  1550. engine_params = (
  1551. request.json
  1552. .get('extras', {})
  1553. .get('engine_params', {}))
  1554. connect_args = engine_params.get('connect_args')
  1555. if configuration:
  1556. connect_args['configuration'] = configuration
  1557. engine = create_engine(uri, **engine_params)
  1558. engine.connect()
  1559. return json_success(json.dumps(engine.table_names(), indent=4))
  1560. except Exception as e:
  1561. logging.exception(e)
  1562. return json_error_response((
  1563. 'Connection failed!\n\n'
  1564. 'The error message returned was:\n{}').format(e))
  1565. @api
  1566. @has_access_api
  1567. @expose('/recent_activity/<user_id>/', methods=['GET'])
  1568. def recent_activity(self, user_id):
  1569. """Recent activity (actions) for a given user"""
  1570. M = models # noqa
  1571. if request.args.get('limit'):
  1572. limit = int(request.args.get('limit'))
  1573. else:
  1574. limit = 1000
  1575. qry = (
  1576. db.session.query(M.Log, M.Dashboard, M.Slice)
  1577. .outerjoin(
  1578. M.Dashboard,
  1579. M.Dashboard.id == M.Log.dashboard_id,
  1580. )
  1581. .outerjoin(
  1582. M.Slice,
  1583. M.Slice.id == M.Log.slice_id,
  1584. )
  1585. .filter(
  1586. sqla.and_(
  1587. ~M.Log.action.in_(('queries', 'shortner', 'sql_json')),
  1588. M.Log.user_id == user_id,
  1589. ),
  1590. )
  1591. .order_by(M.Log.dttm.desc())
  1592. .limit(limit)
  1593. )
  1594. payload = []
  1595. for log in qry.all():
  1596. item_url = None
  1597. item_title = None
  1598. if log.Dashboard:
  1599. item_url = log.Dashboard.url
  1600. item_title = log.Dashboard.dashboard_title
  1601. elif log.Slice:
  1602. item_url = log.Slice.slice_url
  1603. item_title = log.Slice.slice_name
  1604. payload.append({
  1605. 'action': log.Log.action,
  1606. 'item_url': item_url,
  1607. 'item_title': item_title,
  1608. 'time': log.Log.dttm,
  1609. })
  1610. return json_success(
  1611. json.dumps(payload, default=utils.json_int_dttm_ser))
  1612. @api
  1613. @has_access_api
  1614. @expose('/csrf_token/', methods=['GET'])
  1615. def csrf_token(self):
  1616. return Response(
  1617. self.render_template('superset/csrf_token.json'),
  1618. mimetype='text/json',
  1619. )
  1620. @api
  1621. @has_access_api
  1622. @expose('/fave_dashboards_by_username/<username>/', methods=['GET'])
  1623. def fave_dashboards_by_username(self, username):
  1624. """This lets us use a user's username to pull favourite dashboards"""
  1625. user = security_manager.find_user(username=username)
  1626. return self.fave_dashboards(user.get_id())
  1627. @api
  1628. @has_access_api
  1629. @expose('/fave_dashboards/<user_id>/', methods=['GET'])
  1630. def fave_dashboards(self, user_id):
  1631. qry = (
  1632. db.session.query(
  1633. models.Dashboard,
  1634. models.FavStar.dttm,
  1635. )
  1636. .join(
  1637. models.FavStar,
  1638. sqla.and_(
  1639. models.FavStar.user_id == int(user_id),
  1640. models.FavStar.class_name == 'Dashboard',
  1641. models.Dashboard.id == models.FavStar.obj_id,
  1642. ),
  1643. )
  1644. .order_by(
  1645. models.FavStar.dttm.desc(),
  1646. )
  1647. )
  1648. payload = []
  1649. for o in qry.all():
  1650. d = {
  1651. 'id': o.Dashboard.id,
  1652. 'dashboard': o.Dashboard.dashboard_link(),
  1653. 'title': o.Dashboard.dashboard_title,
  1654. 'url': o.Dashboard.url,
  1655. 'dttm': o.dttm,
  1656. }
  1657. if o.Dashboard.created_by:
  1658. user = o.Dashboard.created_by
  1659. d['creator'] = str(user)
  1660. d['creator_url'] = '/superset/profile/{}/'.format(
  1661. user.username)
  1662. payload.append(d)
  1663. return json_success(
  1664. json.dumps(payload, default=utils.json_int_dttm_ser))
  1665. @api
  1666. @has_access_api
  1667. @expose('/created_dashboards/<user_id>/', methods=['GET'])
  1668. def created_dashboards(self, user_id):
  1669. Dash = models.Dashboard # noqa
  1670. qry = (
  1671. db.session.query(
  1672. Dash,
  1673. )
  1674. .filter(
  1675. sqla.or_(
  1676. Dash.created_by_fk == user_id,
  1677. Dash.changed_by_fk == user_id,
  1678. ),
  1679. )
  1680. .order_by(
  1681. Dash.changed_on.desc(),
  1682. )
  1683. )
  1684. payload = [{
  1685. 'id': o.id,
  1686. 'dashboard': o.dashboard_link(),
  1687. 'title': o.dashboard_title,
  1688. 'url': o.url,
  1689. 'dttm': o.changed_on,
  1690. } for o in qry.all()]
  1691. return json_success(
  1692. json.dumps(payload, default=utils.json_int_dttm_ser))
  1693. @api
  1694. @has_access_api
  1695. @expose('/user_slices', methods=['GET'])
  1696. @expose('/user_slices/<user_id>/', methods=['GET'])
  1697. def user_slices(self, user_id=None):
  1698. """List of slices a user created, or faved"""
  1699. if not user_id:
  1700. user_id = g.user.id
  1701. Slice = models.Slice # noqa
  1702. FavStar = models.FavStar # noqa
  1703. qry = (
  1704. db.session.query(Slice,
  1705. FavStar.dttm).join(
  1706. models.FavStar,
  1707. sqla.and_(
  1708. models.FavStar.user_id == int(user_id),
  1709. models.FavStar.class_name == 'slice',
  1710. models.Slice.id == models.FavStar.obj_id,
  1711. ),
  1712. isouter=True).filter(
  1713. sqla.or_(
  1714. Slice.created_by_fk == user_id,
  1715. Slice.changed_by_fk == user_id,
  1716. FavStar.user_id == user_id,
  1717. ),
  1718. )
  1719. .order_by(Slice.slice_name.asc())
  1720. )
  1721. payload = [{
  1722. 'id': o.Slice.id,
  1723. 'title': o.Slice.slice_name,
  1724. 'url': o.Slice.slice_url,
  1725. 'data': o.Slice.form_data,
  1726. 'dttm': o.dttm if o.dttm else o.Slice.changed_on,
  1727. 'viz_type': o.Slice.viz_type,
  1728. } for o in qry.all()]
  1729. return json_success(
  1730. json.dumps(payload, default=utils.json_int_dttm_ser))
  1731. @api
  1732. @has_access_api
  1733. @expose('/created_slices', methods=['GET'])
  1734. @expose('/created_slices/<user_id>/', methods=['GET'])
  1735. def created_slices(self, user_id=None):
  1736. """List of slices created by this user"""
  1737. if not user_id:
  1738. user_id = g.user.id
  1739. Slice = models.Slice # noqa
  1740. qry = (
  1741. db.session.query(Slice)
  1742. .filter(
  1743. sqla.or_(
  1744. Slice.created_by_fk == user_id,
  1745. Slice.changed_by_fk == user_id,
  1746. ),
  1747. )
  1748. .order_by(Slice.changed_on.desc())
  1749. )
  1750. payload = [{
  1751. 'id': o.id,
  1752. 'title': o.slice_name,
  1753. 'url': o.slice_url,
  1754. 'dttm': o.changed_on,
  1755. 'viz_type': o.viz_type,
  1756. } for o in qry.all()]
  1757. return json_success(
  1758. json.dumps(payload, default=utils.json_int_dttm_ser))
  1759. @api
  1760. @has_access_api
  1761. @expose('/fave_slices', methods=['GET'])
  1762. @expose('/fave_slices/<user_id>/', methods=['GET'])
  1763. def fave_slices(self, user_id=None):
  1764. """Favorite slices for a user"""
  1765. if not user_id:
  1766. user_id = g.user.id
  1767. qry = (
  1768. db.session.query(
  1769. models.Slice,
  1770. models.FavStar.dttm,
  1771. )
  1772. .join(
  1773. models.FavStar,
  1774. sqla.and_(
  1775. models.FavStar.user_id == int(user_id),
  1776. models.FavStar.class_name == 'slice',
  1777. models.Slice.id == models.FavStar.obj_id,
  1778. ),
  1779. )
  1780. .order_by(
  1781. models.FavStar.dttm.desc(),
  1782. )
  1783. )
  1784. payload = []
  1785. for o in qry.all():
  1786. d = {
  1787. 'id': o.Slice.id,
  1788. 'title': o.Slice.slice_name,
  1789. 'url': o.Slice.slice_url,
  1790. 'dttm': o.dttm,
  1791. 'viz_type': o.Slice.viz_type,
  1792. }
  1793. if o.Slice.created_by:
  1794. user = o.Slice.created_by
  1795. d['creator'] = str(user)
  1796. d['creator_url'] = '/superset/profile/{}/'.format(
  1797. user.username)
  1798. payload.append(d)
  1799. return json_success(
  1800. json.dumps(payload, default=utils.json_int_dttm_ser))
  1801. @api
  1802. @has_access_api
  1803. @expose('/warm_up_cache/', methods=['GET'])
  1804. def warm_up_cache(self):
  1805. """Warms up the cache for the slice or table.
  1806. Note for slices a force refresh occurs.
  1807. """
  1808. slices = None
  1809. session = db.session()
  1810. slice_id = request.args.get('slice_id')
  1811. table_name = request.args.get('table_name')
  1812. db_name = request.args.get('db_name')
  1813. if not slice_id and not (table_name and db_name):
  1814. return json_error_response(__(
  1815. 'Malformed request. slice_id or table_name and db_name '
  1816. 'arguments are expected'), status=400)
  1817. if slice_id:
  1818. slices = session.query(models.Slice).filter_by(id=slice_id).all()
  1819. if not slices:
  1820. return json_error_response(__(
  1821. 'Chart %(id)s not found', id=slice_id), status=404)
  1822. elif table_name and db_name:
  1823. SqlaTable = ConnectorRegistry.sources['table']
  1824. table = (
  1825. session.query(SqlaTable)
  1826. .join(models.Database)
  1827. .filter(
  1828. models.Database.database_name == db_name or
  1829. SqlaTable.table_name == table_name)
  1830. ).first()
  1831. if not table:
  1832. return json_error_response(__(
  1833. "Table %(t)s wasn't found in the database %(d)s",
  1834. t=table_name, s=db_name), status=404)
  1835. slices = session.query(models.Slice).filter_by(
  1836. datasource_id=table.id,
  1837. datasource_type=table.type).all()
  1838. for slc in slices:
  1839. try:
  1840. obj = slc.get_viz(force=True)
  1841. obj.get_json()
  1842. except Exception as e:
  1843. return json_error_response(utils.error_msg_from_exception(e))
  1844. return json_success(json.dumps(
  1845. [{'slice_id': slc.id, 'slice_name': slc.slice_name}
  1846. for slc in slices]))
  1847. @expose('/favstar/<class_name>/<obj_id>/<action>/')
  1848. def favstar(self, class_name, obj_id, action):
  1849. """Toggle favorite stars on Slices and Dashboard"""
  1850. session = db.session()
  1851. FavStar = models.FavStar # noqa
  1852. count = 0
  1853. favs = session.query(FavStar).filter_by(
  1854. class_name=class_name, obj_id=obj_id,
  1855. user_id=g.user.get_id()).all()
  1856. if action == 'select':
  1857. if not favs:
  1858. session.add(
  1859. FavStar(
  1860. class_name=class_name,
  1861. obj_id=obj_id,
  1862. user_id=g.user.get_id(),
  1863. dttm=datetime.now(),
  1864. ),
  1865. )
  1866. count = 1
  1867. elif action == 'unselect':
  1868. for fav in favs:
  1869. session.delete(fav)
  1870. else:
  1871. count = len(favs)
  1872. session.commit()
  1873. return json_success(json.dumps({'count': count}))
  1874. @has_access
  1875. @expose('/dashboard/<dashboard_id>/')
  1876. def dashboard(self, dashboard_id):
  1877. """Server side rendering for a dashboard"""
  1878. session = db.session()
  1879. qry = session.query(models.Dashboard)
  1880. if dashboard_id.isdigit():
  1881. qry = qry.filter_by(id=int(dashboard_id))
  1882. else:
  1883. qry = qry.filter_by(slug=dashboard_id)
  1884. dash = qry.one()
  1885. datasources = set()
  1886. for slc in dash.slices:
  1887. datasource = slc.datasource
  1888. if datasource:
  1889. datasources.add(datasource)
  1890. if config.get('ENABLE_ACCESS_REQUEST'):
  1891. for datasource in datasources:
  1892. if datasource and not security_manager.datasource_access(datasource):
  1893. flash(
  1894. __(security_manager.get_datasource_access_error_msg(datasource)),
  1895. 'danger')
  1896. return redirect(
  1897. 'superset/request_access/?'
  1898. 'dashboard_id={dash.id}&'.format(**locals()))
  1899. dash_edit_perm = check_ownership(dash, raise_if_false=False) and \
  1900. security_manager.can_access('can_save_dash', 'Superset')
  1901. dash_save_perm = security_manager.can_access('can_save_dash', 'Superset')
  1902. superset_can_explore = security_manager.can_access('can_explore', 'Superset')
  1903. slice_can_edit = security_manager.can_access('can_edit', 'SliceModelView')
  1904. standalone_mode = request.args.get('standalone') == 'true'
  1905. edit_mode = request.args.get('edit') == 'true'
  1906. # Hack to log the dashboard_id properly, even when getting a slug
  1907. @log_this
  1908. def dashboard(**kwargs): # noqa
  1909. pass
  1910. dashboard(
  1911. dashboard_id=dash.id,
  1912. dashboard_version='v2',
  1913. dash_edit_perm=dash_edit_perm,
  1914. edit_mode=edit_mode)
  1915. dashboard_data = dash.data
  1916. dashboard_data.update({
  1917. 'standalone_mode': standalone_mode,
  1918. 'dash_save_perm': dash_save_perm,
  1919. 'dash_edit_perm': dash_edit_perm,
  1920. 'superset_can_explore': superset_can_explore,
  1921. 'slice_can_edit': slice_can_edit,
  1922. })
  1923. bootstrap_data = {
  1924. 'user_id': g.user.get_id(),
  1925. 'dashboard_data': dashboard_data,
  1926. 'datasources': {ds.uid: ds.data for ds in datasources},
  1927. 'common': self.common_bootsrap_payload(),
  1928. 'editMode': edit_mode,
  1929. }
  1930. if request.args.get('json') == 'true':
  1931. return json_success(json.dumps(bootstrap_data))
  1932. return self.render_template(
  1933. 'superset/dashboard.html',
  1934. entry='dashboard',
  1935. standalone_mode=standalone_mode,
  1936. title=dash.dashboard_title,
  1937. bootstrap_data=json.dumps(bootstrap_data),
  1938. )
  1939. @api
  1940. @log_this
  1941. @expose('/log/', methods=['POST'])
  1942. def log(self):
  1943. return Response(status=200)
  1944. @has_access
  1945. @expose('/sync_druid/', methods=['POST'])
  1946. @log_this
  1947. def sync_druid_source(self):
  1948. """Syncs the druid datasource in main db with the provided config.
  1949. The endpoint takes 3 arguments:
  1950. user - user name to perform the operation as
  1951. cluster - name of the druid cluster
  1952. config - configuration stored in json that contains:
  1953. name: druid datasource name
  1954. dimensions: list of the dimensions, they become druid columns
  1955. with the type STRING
  1956. metrics_spec: list of metrics (dictionary). Metric consists of
  1957. 2 attributes: type and name. Type can be count,
  1958. etc. `count` type is stored internally as longSum
  1959. other fields will be ignored.
  1960. Example: {
  1961. 'name': 'test_click',
  1962. 'metrics_spec': [{'type': 'count', 'name': 'count'}],
  1963. 'dimensions': ['affiliate_id', 'campaign', 'first_seen']
  1964. }
  1965. """
  1966. payload = request.get_json(force=True)
  1967. druid_config = payload['config']
  1968. user_name = payload['user']
  1969. cluster_name = payload['cluster']
  1970. user = security_manager.find_user(username=user_name)
  1971. DruidDatasource = ConnectorRegistry.sources['druid']
  1972. DruidCluster = DruidDatasource.cluster_class
  1973. if not user:
  1974. err_msg = __("Can't find User '%(name)s', please ask your admin "
  1975. 'to create one.', name=user_name)
  1976. logging.error(err_msg)
  1977. return json_error_response(err_msg)
  1978. cluster = db.session.query(DruidCluster).filter_by(
  1979. cluster_name=cluster_name).first()
  1980. if not cluster:
  1981. err_msg = __("Can't find DruidCluster with cluster_name = "
  1982. "'%(name)s'", name=cluster_name)
  1983. logging.error(err_msg)
  1984. return json_error_response(err_msg)
  1985. try:
  1986. DruidDatasource.sync_to_db_from_config(
  1987. druid_config, user, cluster)
  1988. except Exception as e:
  1989. logging.exception(utils.error_msg_from_exception(e))
  1990. return json_error_response(utils.error_msg_from_exception(e))
  1991. return Response(status=201)
  1992. @has_access
  1993. @expose('/sqllab_viz/', methods=['POST'])
  1994. @log_this
  1995. def sqllab_viz(self):
  1996. SqlaTable = ConnectorRegistry.sources['table']
  1997. data = json.loads(request.form.get('data'))
  1998. table_name = data.get('datasourceName')
  1999. table = (
  2000. db.session.query(SqlaTable)
  2001. .filter_by(table_name=table_name)
  2002. .first()
  2003. )
  2004. if not table:
  2005. table = SqlaTable(table_name=table_name)
  2006. table.database_id = data.get('dbId')
  2007. table.schema = data.get('schema')
  2008. table.template_params = data.get('templateParams')
  2009. table.is_sqllab_view = True
  2010. q = SupersetQuery(data.get('sql'))
  2011. table.sql = q.stripped()
  2012. db.session.add(table)
  2013. cols = []
  2014. for config in data.get('columns'):
  2015. column_name = config.get('name')
  2016. SqlaTable = ConnectorRegistry.sources['table']
  2017. TableColumn = SqlaTable.column_class
  2018. SqlMetric = SqlaTable.metric_class
  2019. col = TableColumn(
  2020. column_name=column_name,
  2021. filterable=True,
  2022. groupby=True,
  2023. is_dttm=config.get('is_date', False),
  2024. type=config.get('type', False),
  2025. )
  2026. cols.append(col)
  2027. table.columns = cols
  2028. table.metrics = [
  2029. SqlMetric(metric_name='count', expression='count(*)'),
  2030. ]
  2031. db.session.commit()
  2032. return self.json_response(json.dumps({
  2033. 'table_id': table.id,
  2034. }))
  2035. @has_access
  2036. @expose('/table/<database_id>/<table_name>/<schema>/')
  2037. @log_this
  2038. def table(self, database_id, table_name, schema):
  2039. schema = utils.js_string_to_python(schema)
  2040. mydb = db.session.query(models.Database).filter_by(id=database_id).one()
  2041. payload_columns = []
  2042. indexes = []
  2043. primary_key = []
  2044. foreign_keys = []
  2045. try:
  2046. columns = mydb.get_columns(table_name, schema)
  2047. indexes = mydb.get_indexes(table_name, schema)
  2048. primary_key = mydb.get_pk_constraint(table_name, schema)
  2049. foreign_keys = mydb.get_foreign_keys(table_name, schema)
  2050. except Exception as e:
  2051. return json_error_response(utils.error_msg_from_exception(e))
  2052. keys = []
  2053. if primary_key and primary_key.get('constrained_columns'):
  2054. primary_key['column_names'] = primary_key.pop('constrained_columns')
  2055. primary_key['type'] = 'pk'
  2056. keys += [primary_key]
  2057. for fk in foreign_keys:
  2058. fk['column_names'] = fk.pop('constrained_columns')
  2059. fk['type'] = 'fk'
  2060. keys += foreign_keys
  2061. for idx in indexes:
  2062. idx['type'] = 'index'
  2063. keys += indexes
  2064. for col in columns:
  2065. dtype = ''
  2066. try:
  2067. dtype = '{}'.format(col['type'])
  2068. except Exception:
  2069. # sqla.types.JSON __str__ has a bug, so using __class__.
  2070. dtype = col['type'].__class__.__name__
  2071. pass
  2072. payload_columns.append({
  2073. 'name': col['name'],
  2074. 'type': dtype.split('(')[0] if '(' in dtype else dtype,
  2075. 'longType': dtype,
  2076. 'keys': [
  2077. k for k in keys
  2078. if col['name'] in k.get('column_names')
  2079. ],
  2080. })
  2081. tbl = {
  2082. 'name': table_name,
  2083. 'columns': payload_columns,
  2084. 'selectStar': mydb.select_star(
  2085. table_name, schema=schema, show_cols=True, indent=True,
  2086. cols=columns, latest_partition=True),
  2087. 'primaryKey': primary_key,
  2088. 'foreignKeys': foreign_keys,
  2089. 'indexes': keys,
  2090. }
  2091. return json_success(json.dumps(tbl))
  2092. @has_access
  2093. @expose('/extra_table_metadata/<database_id>/<table_name>/<schema>/')
  2094. @log_this
  2095. def extra_table_metadata(self, database_id, table_name, schema):
  2096. schema = utils.js_string_to_python(schema)
  2097. mydb = db.session.query(models.Database).filter_by(id=database_id).one()
  2098. payload = mydb.db_engine_spec.extra_table_metadata(
  2099. mydb, table_name, schema)
  2100. return json_success(json.dumps(payload))
  2101. @has_access
  2102. @expose('/select_star/<database_id>/<table_name>')
  2103. @expose('/select_star/<database_id>/<table_name>/<schema>')
  2104. @log_this
  2105. def select_star(self, database_id, table_name, schema=None):
  2106. mydb = db.session.query(
  2107. models.Database).filter_by(id=database_id).first()
  2108. return json_success(
  2109. mydb.select_star(
  2110. table_name,
  2111. schema,
  2112. latest_partition=True,
  2113. show_cols=True,
  2114. ),
  2115. )
  2116. @expose('/theme/')
  2117. def theme(self):
  2118. return self.render_template('superset/theme.html')
  2119. @has_access_api
  2120. @expose('/cached_key/<key>/')
  2121. @log_this
  2122. def cached_key(self, key):
  2123. """Returns a key from the cache"""
  2124. resp = cache.get(key)
  2125. if resp:
  2126. return resp
  2127. return 'nope'
  2128. @has_access_api
  2129. @expose('/cache_key_exist/<key>/')
  2130. @log_this
  2131. def cache_key_exist(self, key):
  2132. """Returns if a key from cache exist"""
  2133. key_exist = True if cache.get(key) else False
  2134. status = 200 if key_exist else 404
  2135. return json_success(json.dumps({'key_exist': key_exist}),
  2136. status=status)
  2137. @has_access_api
  2138. @expose('/results/<key>/')
  2139. @log_this
  2140. def results(self, key):
  2141. """Serves a key off of the results backend"""
  2142. if not results_backend:
  2143. return json_error_response("Results backend isn't configured")
  2144. read_from_results_backend_start = utils.now_as_float()
  2145. blob = results_backend.get(key)
  2146. stats_logger.timing(
  2147. 'sqllab.query.results_backend_read',
  2148. utils.now_as_float() - read_from_results_backend_start,
  2149. )
  2150. if not blob:
  2151. return json_error_response(
  2152. 'Data could not be retrieved. '
  2153. 'You may want to re-run the query.',
  2154. status=410,
  2155. )
  2156. query = db.session.query(Query).filter_by(results_key=key).one()
  2157. rejected_tables = security_manager.rejected_datasources(
  2158. query.sql, query.database, query.schema)
  2159. if rejected_tables:
  2160. return json_error_response(security_manager.get_table_access_error_msg(
  2161. '{}'.format(rejected_tables)), status=403)
  2162. payload = utils.zlib_decompress_to_string(blob)
  2163. display_limit = app.config.get('DISPLAY_MAX_ROW', None)
  2164. if display_limit:
  2165. payload_json = json.loads(payload)
  2166. payload_json['data'] = payload_json['data'][:display_limit]
  2167. return json_success(
  2168. json.dumps(
  2169. payload_json,
  2170. default=utils.json_iso_dttm_ser,
  2171. ignore_nan=True,
  2172. ),
  2173. )
  2174. @has_access_api
  2175. @expose('/stop_query/', methods=['POST'])
  2176. @log_this
  2177. def stop_query(self):
  2178. client_id = request.form.get('client_id')
  2179. try:
  2180. query = (
  2181. db.session.query(Query)
  2182. .filter_by(client_id=client_id).one()
  2183. )
  2184. query.status = utils.QueryStatus.STOPPED
  2185. db.session.commit()
  2186. except Exception:
  2187. pass
  2188. return self.json_response('OK')
  2189. @has_access_api
  2190. @expose('/sql_json/', methods=['POST', 'GET'])
  2191. @log_this
  2192. def sql_json(self):
  2193. """Runs arbitrary sql and returns and json"""
  2194. async_ = request.form.get('runAsync') == 'true'
  2195. sql = request.form.get('sql')
  2196. database_id = request.form.get('database_id')
  2197. schema = request.form.get('schema') or None
  2198. template_params = json.loads(
  2199. request.form.get('templateParams') or '{}')
  2200. session = db.session()
  2201. mydb = session.query(models.Database).filter_by(id=database_id).first()
  2202. if not mydb:
  2203. json_error_response(
  2204. 'Database with id {} is missing.'.format(database_id))
  2205. rejected_tables = security_manager.rejected_datasources(sql, mydb, schema)
  2206. if rejected_tables:
  2207. return json_error_response(
  2208. security_manager.get_table_access_error_msg(rejected_tables),
  2209. link=security_manager.get_table_access_link(rejected_tables),
  2210. status=403)
  2211. session.commit()
  2212. select_as_cta = request.form.get('select_as_cta') == 'true'
  2213. tmp_table_name = request.form.get('tmp_table_name')
  2214. if select_as_cta and mydb.force_ctas_schema:
  2215. tmp_table_name = '{}.{}'.format(
  2216. mydb.force_ctas_schema,
  2217. tmp_table_name,
  2218. )
  2219. client_id = request.form.get('client_id') or utils.shortid()[:10]
  2220. query = Query(
  2221. database_id=int(database_id),
  2222. limit=mydb.db_engine_spec.get_limit_from_sql(sql),
  2223. sql=sql,
  2224. schema=schema,
  2225. select_as_cta=request.form.get('select_as_cta') == 'true',
  2226. start_time=utils.now_as_float(),
  2227. tab_name=request.form.get('tab'),
  2228. status=QueryStatus.PENDING if async_ else QueryStatus.RUNNING,
  2229. sql_editor_id=request.form.get('sql_editor_id'),
  2230. tmp_table_name=tmp_table_name,
  2231. user_id=g.user.get_id() if g.user else None,
  2232. client_id=client_id,
  2233. )
  2234. session.add(query)
  2235. session.flush()
  2236. query_id = query.id
  2237. session.commit() # shouldn't be necessary
  2238. if not query_id:
  2239. raise Exception(_('Query record was not created as expected.'))
  2240. logging.info('Triggering query_id: {}'.format(query_id))
  2241. try:
  2242. template_processor = get_template_processor(
  2243. database=query.database, query=query)
  2244. rendered_query = template_processor.process_template(
  2245. query.sql,
  2246. **template_params)
  2247. except Exception as e:
  2248. return json_error_response(
  2249. 'Template rendering failed: {}'.format(utils.error_msg_from_exception(e)))
  2250. # Async request.
  2251. if async_:
  2252. logging.info('Running query on a Celery worker')
  2253. # Ignore the celery future object and the request may time out.
  2254. try:
  2255. sql_lab.get_sql_results.delay(
  2256. query_id,
  2257. rendered_query,
  2258. return_results=False,
  2259. store_results=not query.select_as_cta,
  2260. user_name=g.user.username if g.user else None,
  2261. start_time=utils.now_as_float())
  2262. except Exception as e:
  2263. logging.exception(e)
  2264. msg = (
  2265. 'Failed to start remote query on a worker. '
  2266. 'Tell your administrator to verify the availability of '
  2267. 'the message queue.'
  2268. )
  2269. query.status = QueryStatus.FAILED
  2270. query.error_message = msg
  2271. session.commit()
  2272. return json_error_response('{}'.format(msg))
  2273. resp = json_success(json.dumps(
  2274. {'query': query.to_dict()}, default=utils.json_int_dttm_ser,
  2275. ignore_nan=True), status=202)
  2276. session.commit()
  2277. return resp
  2278. # Sync request.
  2279. try:
  2280. timeout = config.get('SQLLAB_TIMEOUT')
  2281. timeout_msg = (
  2282. 'The query exceeded the {timeout} seconds '
  2283. 'timeout.').format(**locals())
  2284. with utils.timeout(seconds=timeout,
  2285. error_message=timeout_msg):
  2286. # pylint: disable=no-value-for-parameter
  2287. data = sql_lab.get_sql_results(
  2288. query_id,
  2289. rendered_query,
  2290. return_results=True,
  2291. user_name=g.user.username if g.user else None)
  2292. payload = json.dumps(
  2293. data,
  2294. default=utils.pessimistic_json_iso_dttm_ser,
  2295. ignore_nan=True,
  2296. encoding=None,
  2297. )
  2298. except Exception as e:
  2299. logging.exception(e)
  2300. return json_error_response('{}'.format(e))
  2301. if data.get('status') == QueryStatus.FAILED:
  2302. return json_error_response(payload=data)
  2303. return json_success(payload)
  2304. @has_access
  2305. @expose('/csv/<client_id>')
  2306. @log_this
  2307. def csv(self, client_id):
  2308. """Download the query results as csv."""
  2309. logging.info('Exporting CSV file [{}]'.format(client_id))
  2310. query = (
  2311. db.session.query(Query)
  2312. .filter_by(client_id=client_id)
  2313. .one()
  2314. )
  2315. rejected_tables = security_manager.rejected_datasources(
  2316. query.sql, query.database, query.schema)
  2317. if rejected_tables:
  2318. flash(
  2319. security_manager.get_table_access_error_msg('{}'.format(rejected_tables)))
  2320. return redirect('/')
  2321. blob = None
  2322. if results_backend and query.results_key:
  2323. logging.info(
  2324. 'Fetching CSV from results backend '
  2325. '[{}]'.format(query.results_key))
  2326. blob = results_backend.get(query.results_key)
  2327. if blob:
  2328. logging.info('Decompressing')
  2329. json_payload = utils.zlib_decompress_to_string(blob)
  2330. obj = json.loads(json_payload)
  2331. columns = [c['name'] for c in obj['columns']]
  2332. df = pd.DataFrame.from_records(obj['data'], columns=columns)
  2333. logging.info('Using pandas to convert to CSV')
  2334. csv = df.to_csv(index=False, **config.get('CSV_EXPORT'))
  2335. else:
  2336. logging.info('Running a query to turn into CSV')
  2337. sql = query.select_sql or query.executed_sql
  2338. df = query.database.get_df(sql, query.schema)
  2339. # TODO(bkyryliuk): add compression=gzip for big files.
  2340. csv = df.to_csv(index=False, **config.get('CSV_EXPORT'))
  2341. response = Response(csv, mimetype='text/csv')
  2342. response.headers['Content-Disposition'] = (
  2343. 'attachment; filename={}.csv'.format(unidecode(query.name)))
  2344. logging.info('Ready to return response')
  2345. return response
  2346. @has_access
  2347. @expose('/fetch_datasource_metadata')
  2348. @log_this
  2349. def fetch_datasource_metadata(self):
  2350. datasource_id, datasource_type = (
  2351. request.args.get('datasourceKey').split('__'))
  2352. datasource = ConnectorRegistry.get_datasource(
  2353. datasource_type, datasource_id, db.session)
  2354. # Check if datasource exists
  2355. if not datasource:
  2356. return json_error_response(DATASOURCE_MISSING_ERR)
  2357. # Check permission for datasource
  2358. if not security_manager.datasource_access(datasource):
  2359. return json_error_response(
  2360. security_manager.get_datasource_access_error_msg(datasource),
  2361. link=security_manager.get_datasource_access_link(datasource))
  2362. return json_success(json.dumps(datasource.data))
  2363. @expose('/queries/<last_updated_ms>')
  2364. def queries(self, last_updated_ms):
  2365. """Get the updated queries."""
  2366. stats_logger.incr('queries')
  2367. if not g.user.get_id():
  2368. return json_error_response(
  2369. 'Please login to access the queries.', status=403)
  2370. # Unix time, milliseconds.
  2371. last_updated_ms_int = int(float(last_updated_ms)) if last_updated_ms else 0
  2372. # UTC date time, same that is stored in the DB.
  2373. last_updated_dt = utils.EPOCH + timedelta(seconds=last_updated_ms_int / 1000)
  2374. sql_queries = (
  2375. db.session.query(Query)
  2376. .filter(
  2377. Query.user_id == g.user.get_id(),
  2378. Query.changed_on >= last_updated_dt,
  2379. )
  2380. .all()
  2381. )
  2382. dict_queries = {q.client_id: q.to_dict() for q in sql_queries}
  2383. now = int(round(time.time() * 1000))
  2384. unfinished_states = [
  2385. utils.QueryStatus.PENDING,
  2386. utils.QueryStatus.RUNNING,
  2387. ]
  2388. queries_to_timeout = [
  2389. client_id for client_id, query_dict in dict_queries.items()
  2390. if (
  2391. query_dict['state'] in unfinished_states and (
  2392. now - query_dict['startDttm'] >
  2393. config.get('SQLLAB_ASYNC_TIME_LIMIT_SEC') * 1000
  2394. )
  2395. )
  2396. ]
  2397. if queries_to_timeout:
  2398. update(Query).where(
  2399. and_(
  2400. Query.user_id == g.user.get_id(),
  2401. Query.client_id in queries_to_timeout,
  2402. ),
  2403. ).values(state=utils.QueryStatus.TIMED_OUT)
  2404. for client_id in queries_to_timeout:
  2405. dict_queries[client_id]['status'] = utils.QueryStatus.TIMED_OUT
  2406. return json_success(
  2407. json.dumps(dict_queries, default=utils.json_int_dttm_ser))
  2408. @has_access
  2409. @expose('/search_queries')
  2410. @log_this
  2411. def search_queries(self):
  2412. """Search for queries."""
  2413. query = db.session.query(Query)
  2414. search_user_id = request.args.get('user_id')
  2415. database_id = request.args.get('database_id')
  2416. search_text = request.args.get('search_text')
  2417. status = request.args.get('status')
  2418. # From and To time stamp should be Epoch timestamp in seconds
  2419. from_time = request.args.get('from')
  2420. to_time = request.args.get('to')
  2421. if search_user_id:
  2422. # Filter on db Id
  2423. query = query.filter(Query.user_id == search_user_id)
  2424. if database_id:
  2425. # Filter on db Id
  2426. query = query.filter(Query.database_id == database_id)
  2427. if status:
  2428. # Filter on status
  2429. query = query.filter(Query.status == status)
  2430. if search_text:
  2431. # Filter on search text
  2432. query = query \
  2433. .filter(Query.sql.like('%{}%'.format(search_text)))
  2434. if from_time:
  2435. query = query.filter(Query.start_time > int(from_time))
  2436. if to_time:
  2437. query = query.filter(Query.start_time < int(to_time))
  2438. query_limit = config.get('QUERY_SEARCH_LIMIT', 1000)
  2439. sql_queries = (
  2440. query.order_by(Query.start_time.asc())
  2441. .limit(query_limit)
  2442. .all()
  2443. )
  2444. dict_queries = [q.to_dict() for q in sql_queries]
  2445. return Response(
  2446. json.dumps(dict_queries, default=utils.json_int_dttm_ser),
  2447. status=200,
  2448. mimetype='application/json')
  2449. @app.errorhandler(500)
  2450. def show_traceback(self):
  2451. return render_template(
  2452. 'superset/traceback.html',
  2453. error_msg=get_error_msg(),
  2454. ), 500
  2455. @expose('/welcome')
  2456. def welcome(self):
  2457. """Personalized welcome page"""
  2458. if not g.user or not g.user.get_id():
  2459. return redirect(appbuilder.get_url_for_login)
  2460. welcome_dashboard_id = (
  2461. db.session
  2462. .query(UserAttribute.welcome_dashboard_id)
  2463. .filter_by(user_id=g.user.get_id())
  2464. .scalar()
  2465. )
  2466. if welcome_dashboard_id:
  2467. return self.dashboard(str(welcome_dashboard_id))
  2468. payload = {
  2469. 'user': bootstrap_user_data(),
  2470. 'common': self.common_bootsrap_payload(),
  2471. }
  2472. return self.render_template(
  2473. 'superset/basic.html',
  2474. entry='welcome',
  2475. title='Superset',
  2476. bootstrap_data=json.dumps(payload, default=utils.json_iso_dttm_ser),
  2477. )
  2478. @has_access
  2479. @expose('/profile/<username>/')
  2480. def profile(self, username):
  2481. """User profile page"""
  2482. if not username and g.user:
  2483. username = g.user.username
  2484. payload = {
  2485. 'user': bootstrap_user_data(username, include_perms=True),
  2486. 'common': self.common_bootsrap_payload(),
  2487. }
  2488. return self.render_template(
  2489. 'superset/basic.html',
  2490. title=_("%(user)s's profile", user=username),
  2491. entry='profile',
  2492. bootstrap_data=json.dumps(payload, default=utils.json_iso_dttm_ser),
  2493. )
  2494. @has_access
  2495. @expose('/sqllab')
  2496. def sqllab(self):
  2497. """SQL Editor"""
  2498. d = {
  2499. 'defaultDbId': config.get('SQLLAB_DEFAULT_DBID'),
  2500. 'common': self.common_bootsrap_payload(),
  2501. }
  2502. return self.render_template(
  2503. 'superset/basic.html',
  2504. entry='sqllab',
  2505. bootstrap_data=json.dumps(d, default=utils.json_iso_dttm_ser),
  2506. )
  2507. @api
  2508. @has_access_api
  2509. @expose('/slice_query/<slice_id>/')
  2510. def slice_query(self, slice_id):
  2511. """
  2512. This method exposes an API endpoint to
  2513. get the database query string for this slice
  2514. """
  2515. viz_obj = self.get_viz(slice_id)
  2516. if not security_manager.datasource_access(viz_obj.datasource):
  2517. return json_error_response(
  2518. security_manager.get_datasource_access_error_msg(viz_obj.datasource),
  2519. status=401,
  2520. link=security_manager.get_datasource_access_link(viz_obj.datasource))
  2521. return self.get_query_string_response(viz_obj)
  2522. @api
  2523. @has_access_api
  2524. @expose('/schema_access_for_csv_upload')
  2525. def schemas_access_for_csv_upload(self):
  2526. """
  2527. This method exposes an API endpoint to
  2528. get the schema access control settings for csv upload in this database
  2529. """
  2530. if not request.args.get('db_id'):
  2531. return json_error_response(
  2532. 'No database is allowed for your csv upload')
  2533. db_id = int(request.args.get('db_id'))
  2534. database = (
  2535. db.session
  2536. .query(models.Database)
  2537. .filter_by(id=db_id)
  2538. .one()
  2539. )
  2540. try:
  2541. schemas_allowed = database.get_schema_access_for_csv_upload()
  2542. if (security_manager.database_access(database) or
  2543. security_manager.all_datasource_access()):
  2544. return self.json_response(schemas_allowed)
  2545. # the list schemas_allowed should not be empty here
  2546. # and the list schemas_allowed_processed returned from security_manager
  2547. # should not be empty either,
  2548. # otherwise the database should have been filtered out
  2549. # in CsvToDatabaseForm
  2550. schemas_allowed_processed = security_manager.schemas_accessible_by_user(
  2551. database, schemas_allowed, False)
  2552. return self.json_response(schemas_allowed_processed)
  2553. except Exception:
  2554. return json_error_response((
  2555. 'Failed to fetch schemas allowed for csv upload in this database! '
  2556. 'Please contact Superset Admin!\n\n'
  2557. 'The error message returned was:\n{}').format(traceback.format_exc()))
  2558. appbuilder.add_view_no_menu(Superset)
  2559. class CssTemplateModelView(SupersetModelView, DeleteMixin):
  2560. datamodel = SQLAInterface(models.CssTemplate)
  2561. list_title = _('List Css Template')
  2562. show_title = _('Show Css Template')
  2563. add_title = _('Add Css Template')
  2564. edit_title = _('Edit Css Template')
  2565. list_columns = ['template_name']
  2566. edit_columns = ['template_name', 'css']
  2567. add_columns = edit_columns
  2568. label_columns = {
  2569. 'template_name': _('Template Name'),
  2570. }
  2571. class CssTemplateAsyncModelView(CssTemplateModelView):
  2572. list_columns = ['template_name', 'css']
  2573. appbuilder.add_separator('Sources')
  2574. appbuilder.add_view(
  2575. CssTemplateModelView,
  2576. 'CSS Templates',
  2577. label=__('CSS Templates'),
  2578. icon='fa-css3',
  2579. category='Manage',
  2580. category_label=__('Manage'),
  2581. category_icon='')
  2582. appbuilder.add_view_no_menu(CssTemplateAsyncModelView)
  2583. appbuilder.add_link(
  2584. 'SQL Editor',
  2585. label=_('SQL Editor'),
  2586. href='/superset/sqllab',
  2587. category_icon='fa-flask',
  2588. icon='fa-flask',
  2589. category='SQL Lab',
  2590. category_label=__('SQL Lab'),
  2591. )
  2592. appbuilder.add_link(
  2593. 'Query Search',
  2594. label=_('Query Search'),
  2595. href='/superset/sqllab#search',
  2596. icon='fa-search',
  2597. category_icon='fa-flask',
  2598. category='SQL Lab',
  2599. category_label=__('SQL Lab'),
  2600. )
  2601. appbuilder.add_link(
  2602. 'Upload a CSV',
  2603. label=__('Upload a CSV'),
  2604. href='/csvtodatabaseview/form',
  2605. icon='fa-upload',
  2606. category='Sources',
  2607. category_label=__('Sources'),
  2608. category_icon='fa-wrench')
  2609. appbuilder.add_separator('Sources')
  2610. @app.after_request
  2611. def apply_caching(response):
  2612. """Applies the configuration's http headers to all responses"""
  2613. for k, v in config.get('HTTP_HEADERS').items():
  2614. response.headers[k] = v
  2615. return response
  2616. # ---------------------------------------------------------------------
  2617. # Redirecting URL from previous names
  2618. class RegexConverter(BaseConverter):
  2619. def __init__(self, url_map, *items):
  2620. super(RegexConverter, self).__init__(url_map)
  2621. self.regex = items[0]
  2622. app.url_map.converters['regex'] = RegexConverter
  2623. @app.route('/<regex("panoramix\/.*"):url>')
  2624. def panoramix(url): # noqa
  2625. return redirect(request.full_path.replace('panoramix', 'superset'))
  2626. @app.route('/<regex("caravel\/.*"):url>')
  2627. def caravel(url): # noqa
  2628. return redirect(request.full_path.replace('caravel', 'superset'))
  2629. # ---------------------------------------------------------------------