base.py 26 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790
  1. #!/usr/bin/env python
  2. # Licensed to Cloudera, Inc. under one
  3. # or more contributor license agreements. See the NOTICE file
  4. # distributed with this work for additional information
  5. # regarding copyright ownership. Cloudera, Inc. licenses this file
  6. # to you under the Apache License, Version 2.0 (the
  7. # "License"); you may not use this file except in compliance
  8. # with the License. You may obtain a copy of the License at
  9. #
  10. # http://www.apache.org/licenses/LICENSE-2.0
  11. #
  12. # Unless required by applicable law or agreed to in writing, software
  13. # distributed under the License is distributed on an "AS IS" BASIS,
  14. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. # See the License for the specific language governing permissions and
  16. # limitations under the License.
  17. import re
  18. import sys
  19. import json
  20. import time
  21. import uuid
  22. import logging
  23. from builtins import object
  24. from django.utils.encoding import smart_str
  25. from beeswax.common import find_compute, is_compute
  26. from desktop.auth.backend import is_admin
  27. from desktop.conf import TASK_SERVER, has_connectors, is_cdw_compute_enabled
  28. from desktop.lib import export_csvxls
  29. from desktop.lib.exceptions_renderable import PopupException
  30. from desktop.lib.i18n import smart_unicode
  31. from metadata.optimizer.base import get_api as get_optimizer_api
  32. from notebook.conf import get_ordered_interpreters
  33. from notebook.sql_utils import get_current_statement
  34. if sys.version_info[0] > 2:
  35. from django.utils.translation import gettext as _
  36. else:
  37. from django.utils.translation import ugettext as _
  38. LOG = logging.getLogger()
  39. class SessionExpired(Exception):
  40. pass
  41. class QueryExpired(Exception):
  42. def __init__(self, message=None):
  43. super(QueryExpired, self).__init__()
  44. self.message = message
  45. class AuthenticationRequired(Exception):
  46. def __init__(self, message=None):
  47. super(AuthenticationRequired, self).__init__()
  48. self.message = message
  49. def __str__(self):
  50. return 'AuthenticationRequired: %s' % self.message
  51. class OperationTimeout(Exception):
  52. def __str__(self):
  53. return 'OperationTimeout'
  54. class OperationNotSupported(Exception):
  55. pass
  56. class QueryError(Exception):
  57. def __init__(self, message, handle=None):
  58. super(QueryError, self).__init__(message)
  59. self.message = message or _('No error message, please check the logs.')
  60. self.handle = handle
  61. self.extra = {}
  62. def __unicode__(self):
  63. return smart_unicode(self.message)
  64. class Notebook(object):
  65. def __init__(self, document=None, **options):
  66. self.document = None
  67. if document is not None:
  68. self.data = document.data
  69. self.document = document
  70. else:
  71. _data = {
  72. 'name': 'My Notebook',
  73. 'uuid': str(uuid.uuid4()),
  74. 'description': '',
  75. 'type': 'notebook',
  76. 'isSaved': False,
  77. 'isManaged': False, # Aka isTask
  78. 'skipHistorify': False,
  79. 'sessions': [],
  80. 'snippets': [],
  81. }
  82. _data.update(options)
  83. self.data = json.dumps(_data)
  84. def get_json(self):
  85. _data = self.get_data()
  86. return json.dumps(_data)
  87. def get_data(self):
  88. _data = json.loads(self.data)
  89. if self.document is not None:
  90. _data['id'] = self.document.id
  91. _data['is_history'] = self.document.is_history
  92. return _data
  93. def get_str(self, from_oozie_action=False):
  94. return '\n\n\n'.join([
  95. 'USE %s;\n\n%s' % (
  96. snippet['database'],
  97. snippet['statement_raw'] if from_oozie_action else Notebook.statement_with_variables(snippet)
  98. )
  99. for snippet in self.get_data()['snippets']
  100. ]
  101. )
  102. @staticmethod
  103. def statement_with_variables(snippet):
  104. statement_raw = snippet['statement_raw']
  105. hasCurlyBracketParameters = snippet['type'] != 'pig'
  106. variables = {}
  107. for variable in snippet['variables']:
  108. variables[variable['name']] = variable
  109. if variables:
  110. variables_names = []
  111. for variable in snippet['variables']:
  112. variables_names.append(variable['name'])
  113. variablesString = '|'.join(variables_names)
  114. def replace(match):
  115. p1 = match.group(1)
  116. p2 = match.group(2)
  117. variable = variables[p2]
  118. value = smart_str(variable['value'])
  119. return smart_str(p1) + smart_str(value if value is not None else variable['meta'].get('placeholder', ''))
  120. return re.sub(
  121. "([^\\\\])\\$" + (
  122. "{(" if hasCurlyBracketParameters else "(") + variablesString + ")(=[^}]*)?" + ("}"
  123. if hasCurlyBracketParameters else ""
  124. ),
  125. replace,
  126. smart_str(statement_raw)
  127. )
  128. return statement_raw
  129. def add_hive_snippet(self, database, sql):
  130. _data = json.loads(self.data)
  131. _data['snippets'].append(self._make_snippet({
  132. 'status': 'running',
  133. 'statement_raw': sql,
  134. 'statement': sql,
  135. 'type': 'hive',
  136. 'properties': {
  137. 'files': [],
  138. 'functions': [],
  139. 'settings': [],
  140. },
  141. 'database': database,
  142. }))
  143. self._add_session(_data, 'hive')
  144. self.data = json.dumps(_data)
  145. def add_java_snippet(self, clazz, app_jar, arguments, files):
  146. _data = json.loads(self.data)
  147. _data['snippets'].append(self._make_snippet({
  148. u'type': u'java',
  149. u'status': u'running',
  150. u'properties': {
  151. u'files': files,
  152. u'class': clazz,
  153. u'app_jar': app_jar,
  154. u'arguments': arguments,
  155. u'archives': [],
  156. }
  157. }))
  158. self._add_session(_data, 'java')
  159. self.data = json.dumps(_data)
  160. def add_sqoop_snippet(self, statement, arguments, files):
  161. _data = json.loads(self.data)
  162. _data['snippets'].append(self._make_snippet({
  163. u'type': u'sqoop1',
  164. u'status': u'running',
  165. u'properties': {
  166. u'files': files,
  167. u'arguments': arguments,
  168. u'archives': [],
  169. u'statement': statement
  170. }
  171. }))
  172. self._add_session(_data, 'java')
  173. self.data = json.dumps(_data)
  174. def add_spark_snippet(self, clazz, jars, arguments, files):
  175. _data = json.loads(self.data)
  176. _data['snippets'].append(self._make_snippet({
  177. u'type': u'spark',
  178. u'status': u'running',
  179. u'properties': {
  180. u'files': files,
  181. u'class': clazz,
  182. u'app_jar': jars,
  183. u'arguments': arguments,
  184. u'archives': [],
  185. u'spark_opts': ''
  186. }
  187. }))
  188. self._add_session(_data, 'spark')
  189. self.data = json.dumps(_data)
  190. def add_shell_snippet(self, shell_command, arguments=None, archives=None, files=None, env_var=None, last_executed=None,
  191. capture_output=True):
  192. _data = json.loads(self.data)
  193. if arguments is None:
  194. arguments = []
  195. if archives is None:
  196. archives = []
  197. if files is None:
  198. files = []
  199. if env_var is None:
  200. env_var = []
  201. _data['snippets'].append(self._make_snippet({
  202. u'type': u'shell',
  203. u'status': u'running',
  204. u'properties': {
  205. u'files': files,
  206. u'shell_command': shell_command,
  207. u'arguments': arguments,
  208. u'archives': archives,
  209. u'env_var': env_var,
  210. u'command_path': shell_command,
  211. u'capture_output': capture_output
  212. },
  213. u'lastExecuted': last_executed
  214. }))
  215. self._add_session(_data, 'shell')
  216. self.data = json.dumps(_data)
  217. def _make_snippet(self, _snippet):
  218. return {
  219. 'status': _snippet.get('status', 'ready'),
  220. 'id': str(uuid.uuid4()),
  221. 'statement_raw': _snippet.get('statement', ''),
  222. 'statement': _snippet.get('statement', ''),
  223. 'type': _snippet.get('type'),
  224. 'properties': _snippet['properties'],
  225. 'name': _snippet.get('name', '%(type)s snippet' % _snippet),
  226. 'database': _snippet.get('database'),
  227. 'result': {},
  228. 'variables': [],
  229. 'lastExecuted': _snippet.get('lastExecuted'),
  230. 'capture_output': _snippet.get('capture_output', True)
  231. }
  232. def _add_session(self, data, snippet_type):
  233. from notebook.connectors.hiveserver2 import HS2Api # Cyclic dependency
  234. if snippet_type not in [_s['type'] for _s in data['sessions']]:
  235. data['sessions'].append({
  236. 'type': snippet_type,
  237. 'properties': HS2Api.get_properties(snippet_type),
  238. 'id': None
  239. }
  240. )
  241. def execute(self, request, batch=False):
  242. from notebook.api import _execute_notebook # Cyclic dependency
  243. notebook_data = self.get_data()
  244. snippet = notebook_data['snippets'][0]
  245. snippet['wasBatchExecuted'] = batch
  246. return _execute_notebook(request, notebook_data, snippet)
  247. def execute_and_wait(self, request, timeout_sec=30.0, sleep_interval=1, include_results=False):
  248. """
  249. Run query and check status until it finishes or timeouts.
  250. Check status until it finishes or timeouts.
  251. """
  252. handle = self.execute(request, batch=False)
  253. if handle['status'] != 0:
  254. raise QueryError(message='SQL statement failed.', handle=handle)
  255. operation_id = handle['history_uuid']
  256. curr = time.time()
  257. end = curr + timeout_sec
  258. handle = self.check_status(request, operation_id=operation_id)
  259. while curr <= end:
  260. if handle['status'] == 0 and handle['query_status']['status'] not in ('waiting', 'running'):
  261. if include_results and handle['query_status']['status'] == 'available':
  262. handle.update(
  263. self.fetch_result_data(request, operation_id=operation_id)
  264. )
  265. # TODO: close
  266. return handle
  267. handle = self.check_status(request, operation_id=operation_id)
  268. time.sleep(sleep_interval)
  269. curr = time.time()
  270. # TODO
  271. # msg = "The query timed out after %(timeout)d seconds, canceled query." % {'timeout': timeout_sec}
  272. # LOG.warning(msg)
  273. # try:
  274. # self.cancel_operation(handle)
  275. # # get_api(request, snippet).cancel(notebook, snippet)
  276. # except Exception as e:
  277. # msg = "Failed to cancel query."
  278. # LOG.warning(msg)
  279. # self.close_operation(handle)
  280. # raise QueryServerException(e, message=msg)
  281. raise OperationTimeout()
  282. def check_status(self, request, operation_id):
  283. from notebook.api import _check_status
  284. return _check_status(request, operation_id=operation_id)
  285. def fetch_result_data(self, request, operation_id):
  286. from notebook.api import _fetch_result_data
  287. return _fetch_result_data(request, operation_id=operation_id, rows=100, start_over=False, nulls_only=True)
  288. def get_interpreter(connector_type, user=None):
  289. interpreter = [
  290. interpreter for interpreter in get_ordered_interpreters(user) if connector_type == interpreter['type']
  291. ]
  292. if not interpreter:
  293. if connector_type == 'hbase': # TODO move to connectors
  294. interpreter = [{
  295. 'name': 'hbase',
  296. 'type': 'hbase',
  297. 'interface': 'hbase',
  298. 'options': {},
  299. 'is_sql': False
  300. }]
  301. elif connector_type == 'kafka':
  302. interpreter = [{
  303. 'name': 'kafka',
  304. 'type': 'kafka',
  305. 'interface': 'kafka',
  306. 'options': {},
  307. 'is_sql': False
  308. }]
  309. elif connector_type == 'solr':
  310. interpreter = [{
  311. 'name': 'solr',
  312. 'type': 'solr',
  313. 'interface': 'solr',
  314. 'options': {},
  315. 'is_sql': False
  316. }]
  317. elif connector_type == 'custom':
  318. interpreter = [{
  319. 'name': 'custom',
  320. 'type': 'custom',
  321. 'interface': '',
  322. 'options': {},
  323. 'is_sql': False
  324. }]
  325. else:
  326. raise PopupException(_('Snippet type %s is not configured.') % connector_type)
  327. elif len(interpreter) > 1:
  328. raise PopupException(_('Snippet type %s matching more than one interpreter: %s') % (connector_type, len(interpreter)))
  329. return interpreter[0]
  330. def patch_snippet_for_connector(snippet, user=None):
  331. """
  332. Connector backward compatibility switcher.
  333. # TODO Connector unification
  334. """
  335. if is_compute(snippet):
  336. snippet['connector'] = find_compute(cluster=snippet, user=user)
  337. if snippet['connector'] and snippet['connector'].get('dialect'):
  338. snippet['dialect'] = snippet['connector']['dialect']
  339. return
  340. if snippet.get('connector') and snippet['connector'].get('type'):
  341. if snippet['connector'].get('dialect') != 'hplsql': # this is a workaround for hplsql describe not working
  342. snippet['type'] = snippet['connector']['type'] # To rename to 'id'
  343. if snippet.get('connector') and snippet['connector'].get('dialect'):
  344. snippet['dialect'] = snippet['connector']['dialect']
  345. else:
  346. snippet['dialect'] = snippet['type']
  347. def get_api(request, snippet):
  348. from notebook.connectors.oozie_batch import OozieApi
  349. if snippet.get('wasBatchExecuted') and not TASK_SERVER.ENABLED.get():
  350. return OozieApi(user=request.user, request=request)
  351. if snippet.get('type') == 'report':
  352. snippet['type'] = 'impala'
  353. patch_snippet_for_connector(snippet, request.user)
  354. connector_name = snippet['type']
  355. interpreter = None
  356. if has_connectors() and snippet.get('type') == 'hello' and is_admin(request.user):
  357. LOG.debug('Using the interpreter from snippet')
  358. interpreter = snippet.get('interpreter')
  359. elif is_cdw_compute_enabled():
  360. LOG.debug("Finding the compute from db using snippet: %s" % snippet)
  361. interpreter = find_compute(cluster=snippet, user=request.user)
  362. if interpreter is None:
  363. LOG.debug("Picking up the connectors from the configs using connector_name: %s" % connector_name)
  364. interpreter = get_interpreter(connector_type=connector_name, user=request.user)
  365. interface = interpreter['interface']
  366. # reconstruct 'custom' interpreter.
  367. if snippet.get('type') and snippet.get('type') == 'custom':
  368. interface = snippet.get('interface')
  369. interpreter['options'] = snippet.get('options')
  370. LOG.debug('Selected interpreter %s interface=%s compute=%s' % (
  371. interpreter['type'],
  372. interface,
  373. interpreter.get('compute') and interpreter['compute']['name'])
  374. )
  375. if interface == 'hiveserver2' or interface == 'hms':
  376. from notebook.connectors.hiveserver2 import HS2Api
  377. return HS2Api(user=request.user, request=request, interpreter=interpreter)
  378. elif interface == 'oozie':
  379. return OozieApi(user=request.user, request=request)
  380. elif interface == 'livy':
  381. from notebook.connectors.spark_shell import SparkApi
  382. return SparkApi(request.user, interpreter=interpreter)
  383. elif interface == 'livy-batch':
  384. from notebook.connectors.spark_batch import SparkBatchApi
  385. return SparkBatchApi(request.user, interpreter=interpreter)
  386. elif interface == 'text' or interface == 'markdown':
  387. from notebook.connectors.text import TextApi
  388. return TextApi(request.user)
  389. elif interface == 'rdbms':
  390. from notebook.connectors.rdbms import RdbmsApi
  391. return RdbmsApi(request.user, interpreter=snippet['type'], query_server=snippet.get('query_server'))
  392. elif interface == 'jdbc':
  393. if interpreter['options'] and interpreter['options'].get('url', '').find('teradata') >= 0:
  394. from notebook.connectors.jdbc_teradata import JdbcApiTeradata
  395. return JdbcApiTeradata(request.user, interpreter=interpreter)
  396. if interpreter['options'] and interpreter['options'].get('url', '').find('awsathena') >= 0:
  397. from notebook.connectors.jdbc_athena import JdbcApiAthena
  398. return JdbcApiAthena(request.user, interpreter=interpreter)
  399. elif interpreter['options'] and interpreter['options'].get('url', '').find('presto') >= 0:
  400. from notebook.connectors.jdbc_presto import JdbcApiPresto
  401. return JdbcApiPresto(request.user, interpreter=interpreter)
  402. elif interpreter['options'] and interpreter['options'].get('url', '').find('clickhouse') >= 0:
  403. from notebook.connectors.jdbc_clickhouse import JdbcApiClickhouse
  404. return JdbcApiClickhouse(request.user, interpreter=interpreter)
  405. elif interpreter['options'] and interpreter['options'].get('url', '').find('vertica') >= 0:
  406. from notebook.connectors.jdbc_vertica import JdbcApiVertica
  407. return JdbcApiVertica(request.user, interpreter=interpreter)
  408. elif interpreter['options'] and interpreter['options'].get('driver', '').find('kyuubi') >= 0:
  409. from notebook.connectors.jdbc_kyuubi import JdbcApiKyuubi
  410. return JdbcApiKyuubi(request.user, interpreter=interpreter)
  411. else:
  412. from notebook.connectors.jdbc import JdbcApi
  413. return JdbcApi(request.user, interpreter=interpreter)
  414. elif interface == 'sqlflow':
  415. from notebook.connectors.sqlflow import SqlFlowApi
  416. return SqlFlowApi(request.user, interpreter=interpreter)
  417. elif interface == 'teradata':
  418. from notebook.connectors.jdbc_teradata import JdbcApiTeradata
  419. return JdbcApiTeradata(request.user, interpreter=interpreter)
  420. elif interface == 'athena':
  421. from notebook.connectors.jdbc_athena import JdbcApiAthena
  422. return JdbcApiAthena(request.user, interpreter=interpreter)
  423. elif interface == 'presto':
  424. from notebook.connectors.jdbc_presto import JdbcApiPresto
  425. return JdbcApiPresto(request.user, interpreter=interpreter)
  426. elif interface == 'sqlalchemy':
  427. from notebook.connectors.sql_alchemy import SqlAlchemyApi
  428. return SqlAlchemyApi(request.user, interpreter=interpreter)
  429. elif interface == 'solr':
  430. from notebook.connectors.solr import SolrApi
  431. return SolrApi(request.user, interpreter=interpreter)
  432. elif interface == 'hbase':
  433. from notebook.connectors.hbase import HBaseApi
  434. return HBaseApi(request.user)
  435. elif interface == 'ksql':
  436. from notebook.connectors.ksql import KSqlApi
  437. return KSqlApi(request.user, interpreter=interpreter)
  438. elif interface == 'flink':
  439. from notebook.connectors.flink_sql import FlinkSqlApi
  440. return FlinkSqlApi(request.user, interpreter=interpreter)
  441. elif interface == 'trino':
  442. from notebook.connectors.trino import TrinoApi
  443. return TrinoApi(request.user, interpreter=interpreter)
  444. elif interface == 'kafka':
  445. from notebook.connectors.kafka import KafkaApi
  446. return KafkaApi(request.user)
  447. elif interface == 'pig':
  448. return OozieApi(user=request.user, request=request) # Backward compatibility until Hue 4
  449. else:
  450. raise PopupException(_('Notebook connector interface not recognized: %s') % interface)
  451. def _get_snippet_session(notebook, snippet):
  452. session = [session for session in notebook['sessions'] if session['type'] == snippet['type']]
  453. if not session:
  454. raise SessionExpired()
  455. else:
  456. return session[0]
  457. # Base API
  458. class Api(object):
  459. def __init__(self, user, interpreter=None, request=None, query_server=None):
  460. self.user = user
  461. self.interpreter = interpreter
  462. self.request = request
  463. self.query_server = query_server
  464. def create_session(self, lang, properties=None):
  465. return {
  466. 'type': lang,
  467. 'id': None,
  468. 'properties': properties if not None else []
  469. }
  470. def close_session(self, session):
  471. pass
  472. def fetch_result(self, notebook, snippet, rows, start_over):
  473. pass
  474. def can_start_over(self, notebook, snippet):
  475. return False
  476. def fetch_result_size(self, notebook, snippet):
  477. raise OperationNotSupported()
  478. def download(self, notebook, snippet, file_format='csv'):
  479. from beeswax import (
  480. conf,
  481. data_export, # TODO: Move to notebook?
  482. )
  483. result_wrapper = ExecutionWrapper(self, notebook, snippet)
  484. max_rows = conf.DOWNLOAD_ROW_LIMIT.get()
  485. max_bytes = conf.DOWNLOAD_BYTES_LIMIT.get()
  486. content_generator = data_export.DataAdapter(result_wrapper, max_rows=max_rows, max_bytes=max_bytes)
  487. return export_csvxls.create_generator(content_generator, file_format)
  488. def get_log(self, notebook, snippet, startFrom=None, size=None):
  489. return 'No logs'
  490. def autocomplete(self, snippet, database=None, table=None, column=None, nested=None, operation=None):
  491. return {}
  492. def progress(self, notebook, snippet, logs=None):
  493. return 50
  494. def get_jobs(self, notebook, snippet, logs):
  495. return []
  496. def get_sample_data(self, snippet, database=None, table=None, column=None, is_async=False, operation=None):
  497. raise NotImplementedError()
  498. def explain(self, notebook, snippet):
  499. return {
  500. 'status': 0,
  501. 'explanation': '',
  502. 'statement': '',
  503. }
  504. def export_data_as_hdfs_file(self, snippet, target_file, overwrite):
  505. raise NotImplementedError()
  506. def export_data_as_table(self, notebook, snippet, destination, is_temporary=False, location=None):
  507. raise NotImplementedError()
  508. def export_large_data_to_hdfs(self, notebook, snippet, destination):
  509. raise NotImplementedError()
  510. def statement_risk(self, interface, notebook, snippet):
  511. response = self._get_current_statement(notebook, snippet)
  512. query = response['statement']
  513. client = get_optimizer_api(self.user, interface)
  514. patch_snippet_for_connector(snippet, self.user)
  515. return client.query_risk(query=query, source_platform=snippet['dialect'], db_name=snippet.get('database') or 'default')
  516. def statement_compatibility(self, interface, notebook, snippet, source_platform, target_platform):
  517. response = self._get_current_statement(notebook, snippet)
  518. query = response['statement']
  519. client = get_optimizer_api(self.user, interface)
  520. return client.query_compatibility(source_platform, target_platform, query)
  521. def statement_similarity(self, interface, notebook, snippet, source_platform):
  522. response = self._get_current_statement(notebook, snippet)
  523. query = response['statement']
  524. client = get_optimizer_api(self.user, interface)
  525. return client.similar_queries(source_platform, query)
  526. def describe(self, notebook, snippet, database=None, table=None, column=None):
  527. if column:
  528. response = self.describe_column(notebook, snippet, database=database, table=table, column=column)
  529. elif table:
  530. response = {
  531. 'status': 0,
  532. 'name': table or '',
  533. 'partition_keys': [],
  534. 'cols': [],
  535. 'path_location': '',
  536. 'hdfs_link': '',
  537. 'comment': '',
  538. 'is_view': False,
  539. 'properties': [],
  540. 'details': {'properties': {'table_type': ''}, 'stats': {}},
  541. 'stats': []
  542. }
  543. describe_table = self.describe_table(notebook, snippet, database, table)
  544. response.update(describe_table)
  545. else:
  546. response = {
  547. 'status': 0,
  548. 'owner_name': '',
  549. 'owner_type': '',
  550. 'parameters': '',
  551. 'hdfs_link': '',
  552. 'message': ''
  553. }
  554. describe_database = self.describe_database(notebook, snippet, database)
  555. response.update(describe_database)
  556. return response
  557. def describe_column(self, notebook, snippet, database=None, table=None, column=None):
  558. return []
  559. def describe_table(self, notebook, snippet, database=None, table=None):
  560. response = {}
  561. autocomplete = self.autocomplete(snippet, database=database, table=table)
  562. response['cols'] = autocomplete['extended_columns'] if autocomplete and autocomplete.get('extended_columns') else [],
  563. return response
  564. def describe_database(self, notebook, snippet, database=None):
  565. return {}
  566. def close_statement(self, notebook, snippet): pass
  567. def _get_current_statement(self, notebook, snippet):
  568. should_close, resp = get_current_statement(snippet)
  569. if should_close:
  570. try:
  571. self.close_statement(notebook, snippet) # Close all the time past multi queries
  572. except Exception:
  573. LOG.warning('Could not close previous multiquery query')
  574. return resp
  575. def get_log_is_full_log(self, notebook, snippet):
  576. return True
  577. def _get_snippet_name(notebook, unique=False, table_format=False):
  578. name = (('%(name)s' + ('-%(id)s' if unique else '') if notebook.get('name') else '%(type)s-%(id)s') % notebook)
  579. if table_format:
  580. name = re.sub(r'[-|\s:]', '_', name)
  581. return name
  582. class ExecutionWrapper(object):
  583. def __init__(self, api, notebook, snippet, callback=None):
  584. self.api = api
  585. self.notebook = notebook
  586. self.snippet = snippet
  587. self.callback = callback
  588. self.should_close = False
  589. def fetch(self, handle, start_over=None, rows=None):
  590. if start_over:
  591. if not self.snippet['result'].get('handle') \
  592. or not self.snippet['result']['handle'].get('guid') \
  593. or not self.api.can_start_over(self.notebook, self.snippet):
  594. start_over = False
  595. handle = self.api.execute(self.notebook, self.snippet)
  596. self.snippet['result']['handle'] = handle
  597. if self.callback and hasattr(self.callback, 'on_execute'):
  598. self.callback.on_execute(handle)
  599. self.should_close = True
  600. self._until_available()
  601. if self.snippet['result']['handle'].get('sync', False):
  602. result = self.snippet['result']['handle']['result']
  603. else:
  604. result = self.api.fetch_result(self.notebook, self.snippet, rows, start_over)
  605. return ResultWrapper(result.get('meta'), result.get('data'), result.get('has_more'))
  606. def _until_available(self):
  607. if self.snippet['result']['handle'].get('sync', False):
  608. return # Request is already completed
  609. count = 0
  610. sleep_seconds = 1
  611. check_status_count = 0
  612. get_log_is_full_log = self.api.get_log_is_full_log(self.notebook, self.snippet)
  613. while True:
  614. response = self.api.check_status(self.notebook, self.snippet)
  615. if self.callback and hasattr(self.callback, 'on_status'):
  616. self.callback.on_status(response['status'])
  617. if self.callback and hasattr(self.callback, 'on_log'):
  618. log = self.api.get_log(self.notebook, self.snippet, startFrom=count)
  619. if get_log_is_full_log:
  620. log = log[count:]
  621. self.callback.on_log(log)
  622. count += len(log)
  623. if response['status'] not in ['waiting', 'running', 'submitted']:
  624. break
  625. check_status_count += 1
  626. if check_status_count > 5:
  627. sleep_seconds = 5
  628. elif check_status_count > 10:
  629. sleep_seconds = 10
  630. time.sleep(sleep_seconds)
  631. def close(self, handle):
  632. if self.should_close:
  633. self.should_close = False
  634. self.api.close_statement(self.notebook, self.snippet)
  635. class ResultWrapper(object):
  636. def __init__(self, cols, rows, has_more):
  637. self._cols = cols
  638. self._rows = rows
  639. self.has_more = has_more
  640. def full_cols(self):
  641. return self._cols
  642. def rows(self):
  643. return self._rows