hiveserver2.py 27 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787
  1. #!/usr/bin/env python
  2. # Licensed to Cloudera, Inc. under one
  3. # or more contributor license agreements. See the NOTICE file
  4. # distributed with this work for additional information
  5. # regarding copyright ownership. Cloudera, Inc. licenses this file
  6. # to you under the Apache License, Version 2.0 (the
  7. # "License"); you may not use this file except in compliance
  8. # with the License. You may obtain a copy of the License at
  9. #
  10. # http://www.apache.org/licenses/LICENSE-2.0
  11. #
  12. # Unless required by applicable law or agreed to in writing, software
  13. # distributed under the License is distributed on an "AS IS" BASIS,
  14. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. # See the License for the specific language governing permissions and
  16. # limitations under the License.
  17. import base64
  18. import copy
  19. import logging
  20. import re
  21. import StringIO
  22. import struct
  23. import time
  24. from django.core.urlresolvers import reverse
  25. from django.utils.translation import ugettext as _
  26. from desktop.conf import USE_DEFAULT_CONFIGURATION
  27. from desktop.lib.conf import BoundConfig
  28. from desktop.lib.exceptions import StructuredException
  29. from desktop.lib.exceptions_renderable import PopupException
  30. from desktop.lib.i18n import force_unicode
  31. from desktop.lib.rest.http_client import RestException
  32. from desktop.models import DefaultConfiguration
  33. from metadata.optimizer_client import OptimizerApi
  34. from notebook.connectors.base import Api, QueryError, QueryExpired, OperationTimeout, OperationNotSupported
  35. LOG = logging.getLogger(__name__)
  36. try:
  37. from beeswax import conf as beeswax_conf, data_export
  38. from beeswax.api import _autocomplete, _get_sample_data
  39. from beeswax.conf import CONFIG_WHITELIST as hive_settings, DOWNLOAD_CELL_LIMIT
  40. from beeswax.data_export import upload
  41. from beeswax.design import hql_query, strip_trailing_semicolon, split_statements
  42. from beeswax.models import QUERY_TYPES, HiveServerQueryHandle, HiveServerQueryHistory, QueryHistory, Session
  43. from beeswax.server import dbms
  44. from beeswax.server.dbms import get_query_server_config, QueryServerException
  45. from beeswax.views import parse_out_jobs
  46. except ImportError, e:
  47. LOG.warn('Hive and HiveServer2 interfaces are not enabled')
  48. hive_settings = None
  49. try:
  50. from impala import api # Force checking if Impala is enabled
  51. from impala.conf import CONFIG_WHITELIST as impala_settings, SSL as impala_ssl_conf
  52. from impala.server import get_api as get_impalad_api, ImpalaDaemonApiException
  53. except ImportError, e:
  54. LOG.warn("Impala app is not enabled")
  55. impala_settings = None
  56. try:
  57. from jobbrowser.views import get_job
  58. except (AttributeError, ImportError), e:
  59. LOG.warn("Job Browser app is not enabled")
  60. DEFAULT_HIVE_ENGINE = 'mr'
  61. def query_error_handler(func):
  62. def decorator(*args, **kwargs):
  63. try:
  64. return func(*args, **kwargs)
  65. except StructuredException, e:
  66. message = force_unicode(str(e))
  67. if 'timed out' in message:
  68. raise OperationTimeout(e)
  69. else:
  70. raise QueryError(message)
  71. except QueryServerException, e:
  72. message = force_unicode(str(e))
  73. if 'Invalid query handle' in message or 'Invalid OperationHandle' in message:
  74. raise QueryExpired(e)
  75. else:
  76. raise QueryError(message)
  77. return decorator
  78. def is_hive_enabled():
  79. return hive_settings is not None and type(hive_settings) == BoundConfig
  80. def is_impala_enabled():
  81. return impala_settings is not None and type(impala_settings) == BoundConfig
  82. class HiveConfiguration(object):
  83. APP_NAME = 'hive'
  84. PROPERTIES = [
  85. {
  86. "multiple": True,
  87. "defaultValue": [],
  88. "value": [],
  89. "nice_name": _("Files"),
  90. "key": "files",
  91. "help_text": _("Add one or more files, jars, or archives to the list of resources."),
  92. "type": "hdfs-files"
  93. }, {
  94. "multiple": True,
  95. "defaultValue": [],
  96. "value": [],
  97. "nice_name": _("Functions"),
  98. "key": "functions",
  99. "help_text": _("Add one or more registered UDFs (requires function name and fully-qualified class name)."),
  100. "type": "functions"
  101. }, {
  102. "multiple": True,
  103. "defaultValue": [],
  104. "value": [],
  105. "nice_name": _("Settings"),
  106. "key": "settings",
  107. "help_text": _("Hive and Hadoop configuration properties."),
  108. "type": "settings",
  109. "options": [config.lower() for config in hive_settings.get()] if is_hive_enabled() and hasattr(hive_settings, 'get') else []
  110. }
  111. ]
  112. class ImpalaConfiguration(object):
  113. APP_NAME = 'impala'
  114. PROPERTIES = [
  115. {
  116. "multiple": True,
  117. "defaultValue": [],
  118. "value": [],
  119. "nice_name": _("Settings"),
  120. "key": "settings",
  121. "help_text": _("Impala configuration properties."),
  122. "type": "settings",
  123. "options": [config.lower() for config in impala_settings.get()] if is_impala_enabled() else []
  124. }
  125. ]
  126. class HS2Api(Api):
  127. @staticmethod
  128. def get_properties(lang='hive'):
  129. return ImpalaConfiguration.PROPERTIES if lang == 'impala' else HiveConfiguration.PROPERTIES
  130. @query_error_handler
  131. def create_session(self, lang='hive', properties=None):
  132. application = 'beeswax' if lang == 'hive' else lang
  133. session = Session.objects.get_session(self.user, application=application)
  134. if session is None:
  135. session = dbms.get(self.user, query_server=get_query_server_config(name=lang)).open_session(self.user)
  136. response = {
  137. 'type': lang,
  138. 'id': session.id
  139. }
  140. if not properties:
  141. config = None
  142. if USE_DEFAULT_CONFIGURATION.get():
  143. config = DefaultConfiguration.objects.get_configuration_for_user(app=lang, user=self.user)
  144. if config is not None:
  145. properties = config.properties_list
  146. else:
  147. properties = self.get_properties(lang)
  148. response['properties'] = properties
  149. if lang == 'impala':
  150. http_addr = self._get_impala_server_url(session)
  151. response['http_addr'] = http_addr
  152. return response
  153. @query_error_handler
  154. def close_session(self, session):
  155. app_name = session.get('type')
  156. session_id = session.get('id')
  157. query_server = get_query_server_config(name=app_name)
  158. response = {'status': -1, 'message': ''}
  159. try:
  160. filters = {'id': session_id, 'application': query_server['server_name']}
  161. if not self.user.is_superuser:
  162. filters['owner'] = self.user
  163. session = Session.objects.get(**filters)
  164. except Session.DoesNotExist:
  165. response['message'] = _('Session does not exist or you do not have permissions to close the session.')
  166. if session:
  167. session = dbms.get(self.user, query_server).close_session(session)
  168. response['status'] = 0
  169. response['message'] = _('Session successfully closed.')
  170. response['session'] = {'id': session_id, 'application': session.application, 'status': session.status_code}
  171. return response
  172. @query_error_handler
  173. def execute(self, notebook, snippet):
  174. db = self._get_db(snippet)
  175. statement = self._get_current_statement(db, snippet)
  176. session = self._get_session(notebook, snippet['type'])
  177. query = self._prepare_hql_query(snippet, statement['statement'], session)
  178. try:
  179. if statement.get('statement_id') == 0:
  180. db.use(query.database)
  181. handle = db.client.query(query, withMultipleSession=True)
  182. except QueryServerException, ex:
  183. raise QueryError(ex.message, handle=statement)
  184. # All good
  185. server_id, server_guid = handle.get()
  186. response = {
  187. 'secret': server_id,
  188. 'guid': server_guid,
  189. 'operation_type': handle.operation_type,
  190. 'has_result_set': handle.has_result_set,
  191. 'modified_row_count': handle.modified_row_count,
  192. 'log_context': handle.log_context,
  193. 'session_guid': handle.session_guid
  194. }
  195. response.update(statement)
  196. return response
  197. @query_error_handler
  198. def check_status(self, notebook, snippet):
  199. response = {}
  200. db = self._get_db(snippet)
  201. handle = self._get_handle(snippet)
  202. operation = db.get_operation_status(handle)
  203. status = HiveServerQueryHistory.STATE_MAP[operation.operationState]
  204. if status.index in (QueryHistory.STATE.failed.index, QueryHistory.STATE.expired.index):
  205. if operation.errorMessage and 'transition from CANCELED to ERROR' in operation.errorMessage: # Hive case on canceled query
  206. raise QueryExpired()
  207. else:
  208. raise QueryError(operation.errorMessage)
  209. response['status'] = 'running' if status.index in (QueryHistory.STATE.running.index, QueryHistory.STATE.submitted.index) else 'available'
  210. return response
  211. @query_error_handler
  212. def fetch_result(self, notebook, snippet, rows, start_over):
  213. db = self._get_db(snippet)
  214. handle = self._get_handle(snippet)
  215. results = db.fetch(handle, start_over=start_over, rows=rows)
  216. # No escaping...
  217. return {
  218. 'has_more': results.has_more,
  219. 'data': results.rows(),
  220. 'meta': [{
  221. 'name': column.name,
  222. 'type': column.type,
  223. 'comment': column.comment
  224. } for column in results.data_table.cols()],
  225. 'type': 'table'
  226. }
  227. @query_error_handler
  228. def fetch_result_size(self, notebook, snippet):
  229. resp = {
  230. 'rows': None,
  231. 'size': None,
  232. 'message': ''
  233. }
  234. if snippet.get('status') != 'available':
  235. raise QueryError(_('Result status is not available'))
  236. if snippet['type'] not in ('hive', 'impala'):
  237. raise OperationNotSupported(_('Cannot fetch result metadata for snippet type: %s') % snippet['type'])
  238. if snippet['type'] == 'hive':
  239. resp['rows'], resp['size'], resp['message'] = self._get_hive_result_size(notebook, snippet)
  240. else: # Impala
  241. resp['rows'], resp['size'], resp['message'] = self._get_impala_result_size(notebook, snippet)
  242. return resp
  243. @query_error_handler
  244. def cancel(self, notebook, snippet):
  245. db = self._get_db(snippet)
  246. handle = self._get_handle(snippet)
  247. db.cancel_operation(handle)
  248. return {'status': 0}
  249. @query_error_handler
  250. def get_log(self, notebook, snippet, startFrom=None, size=None):
  251. db = self._get_db(snippet)
  252. handle = self._get_handle(snippet)
  253. return db.get_log(handle, start_over=startFrom == 0)
  254. @query_error_handler
  255. def close_statement(self, snippet):
  256. if snippet['type'] == 'impala':
  257. from impala import conf as impala_conf
  258. if (snippet['type'] == 'hive' and beeswax_conf.CLOSE_QUERIES.get()) or (snippet['type'] == 'impala' and impala_conf.CLOSE_QUERIES.get()):
  259. db = self._get_db(snippet)
  260. handle = self._get_handle(snippet)
  261. db.close_operation(handle)
  262. return {'status': 0}
  263. else:
  264. return {'status': -1} # skipped
  265. @query_error_handler
  266. def download(self, notebook, snippet, format):
  267. try:
  268. db = self._get_db(snippet)
  269. handle = self._get_handle(snippet)
  270. # Test handle to verify if still valid
  271. db.fetch(handle, start_over=True, rows=1)
  272. return data_export.download(handle, format, db, id=snippet['id'])
  273. except Exception, e:
  274. title = 'The query result cannot be downloaded.'
  275. LOG.exception(title)
  276. if hasattr(e, 'message') and e.message:
  277. if 'generic failure: Unable to find a callback: 32775' in e.message:
  278. message = e.message + " " + _("Increase the sasl_max_buffer value in hue.ini")
  279. else:
  280. message = e.message
  281. else:
  282. message = e
  283. raise PopupException(_(title), detail=message)
  284. @query_error_handler
  285. def progress(self, snippet, logs):
  286. if snippet['type'] == 'hive':
  287. match = re.search('Total jobs = (\d+)', logs, re.MULTILINE)
  288. total = int(match.group(1)) if match else 1
  289. started = logs.count('Starting Job')
  290. ended = logs.count('Ended Job')
  291. progress = int((started + ended) * 100 / (total * 2))
  292. return max(progress, 5) # Return 5% progress as a minimum
  293. elif snippet['type'] == 'impala':
  294. match = re.findall('(\d+)% Complete', logs, re.MULTILINE)
  295. # Retrieve the last reported progress percentage if it exists
  296. return int(match[-1]) if match and isinstance(match, list) else 0
  297. else:
  298. return 50
  299. @query_error_handler
  300. def get_jobs(self, notebook, snippet, logs):
  301. jobs = []
  302. if snippet['type'] == 'hive':
  303. engine = self._get_hive_execution_engine(notebook, snippet)
  304. jobs_with_state = parse_out_jobs(logs, engine=engine, with_state=True)
  305. jobs = [{
  306. 'name': job.get('job_id', ''),
  307. 'url': reverse('jobbrowser.views.single_job', kwargs={'job': job.get('job_id', '')}),
  308. 'started': job.get('started', False),
  309. 'finished': job.get('finished', False)
  310. } for job in jobs_with_state]
  311. return jobs
  312. @query_error_handler
  313. def autocomplete(self, snippet, database=None, table=None, column=None, nested=None):
  314. db = self._get_db(snippet)
  315. return _autocomplete(db, database, table, column, nested)
  316. @query_error_handler
  317. def get_sample_data(self, snippet, database=None, table=None, column=None):
  318. db = self._get_db(snippet)
  319. return _get_sample_data(db, database, table, column)
  320. @query_error_handler
  321. def explain(self, notebook, snippet):
  322. db = self._get_db(snippet)
  323. response = self._get_current_statement(db, snippet)
  324. session = self._get_session(notebook, snippet['type'])
  325. query = self._prepare_hql_query(snippet, response.pop('statement'), session)
  326. try:
  327. explanation = db.explain(query)
  328. except QueryServerException, ex:
  329. raise QueryError(ex.message)
  330. return {
  331. 'status': 0,
  332. 'explanation': explanation.textual,
  333. 'statement': query.get_query_statement(0),
  334. }
  335. @query_error_handler
  336. def export_data_as_hdfs_file(self, snippet, target_file, overwrite):
  337. db = self._get_db(snippet)
  338. handle = self._get_handle(snippet)
  339. max_cells = DOWNLOAD_CELL_LIMIT.get()
  340. upload(target_file, handle, self.request.user, db, self.request.fs, max_cells=max_cells)
  341. return '/filebrowser/view=%s' % target_file
  342. def export_data_as_table(self, notebook, snippet, destination, is_temporary=False, location=None):
  343. db = self._get_db(snippet)
  344. response = self._get_current_statement(db, snippet)
  345. session = self._get_session(notebook, snippet['type'])
  346. query = self._prepare_hql_query(snippet, response.pop('statement'), session)
  347. if 'select' not in query.hql_query.strip().lower():
  348. raise PopupException(_('Only SELECT statements can be saved. Provided statement: %(query)s') % {'query': query.hql_query})
  349. database = snippet.get('database') or 'default'
  350. table = destination
  351. if '.' in table:
  352. database, table = table.split('.', 1)
  353. db.use(query.database)
  354. hql = 'CREATE %sTABLE `%s`.`%s` %sAS %s' % ('TEMPORARY ' if is_temporary else '', database, table, "LOCATION '%s' " % location if location else '', query.hql_query)
  355. success_url = reverse('metastore:describe_table', kwargs={'database': database, 'table': table})
  356. return hql, success_url
  357. def export_large_data_to_hdfs(self, notebook, snippet, destination):
  358. db = self._get_db(snippet)
  359. response = self._get_current_statement(db, snippet)
  360. session = self._get_session(notebook, snippet['type'])
  361. query = self._prepare_hql_query(snippet, response.pop('statement'), session)
  362. if 'select' not in query.hql_query.strip().lower():
  363. raise PopupException(_('Only SELECT statements can be saved. Provided statement: %(query)s') % {'query': query.hql_query})
  364. db.use(query.database)
  365. hql = "INSERT OVERWRITE DIRECTORY '%s' %s" % (destination, query.hql_query)
  366. success_url = '/filebrowser/view=%s' % destination
  367. return hql, success_url
  368. def statement_risk(self, notebook, snippet):
  369. db = self._get_db(snippet)
  370. response = self._get_current_statement(db, snippet)
  371. query = response['statement']
  372. api = OptimizerApi()
  373. data = api.query_risk(query=query)
  374. data = data.get(snippet['type'] + 'Risk', {})
  375. return {
  376. 'risk': data.get('risk'),
  377. 'riskAnalysis': data.get('riskAnalysis'),
  378. 'riskRecommendation': data.get('riskRecommendation')
  379. }
  380. def statement_compatibility(self, notebook, snippet, source_platform, target_platform):
  381. db = self._get_db(snippet)
  382. response = self._get_current_statement(db, snippet)
  383. query = response['statement']
  384. api = OptimizerApi()
  385. return api.query_compatibility(source_platform, target_platform, query)
  386. def upgrade_properties(self, lang='hive', properties=None):
  387. upgraded_properties = copy.deepcopy(self.get_properties(lang))
  388. # Check that current properties is a list of dictionary objects with 'key' and 'value' keys
  389. if not isinstance(properties, list) or \
  390. not all(isinstance(prop, dict) for prop in properties) or \
  391. not all('key' in prop for prop in properties) or not all('value' in prop for prop in properties):
  392. LOG.warn('Current properties are not formatted correctly, will replace with defaults.')
  393. return upgraded_properties
  394. valid_props_dict = dict((prop["key"], prop) for prop in upgraded_properties)
  395. curr_props_dict = dict((prop['key'], prop) for prop in properties)
  396. # Upgrade based on valid properties as needed
  397. if set(valid_props_dict.keys()) != set(curr_props_dict.keys()):
  398. settings = next((prop for prop in upgraded_properties if prop['key'] == 'settings'), None)
  399. if settings is not None and isinstance(properties, list):
  400. settings['value'] = properties
  401. else: # No upgrade needed so return existing properties
  402. upgraded_properties = properties
  403. return upgraded_properties
  404. def _get_session(self, notebook, type='hive'):
  405. session = next((session for session in notebook['sessions'] if session['type'] == type), None)
  406. return session
  407. def _get_hive_execution_engine(self, notebook, snippet):
  408. # Get hive.execution.engine from snippet properties, if none, then get from session
  409. properties = snippet['properties']
  410. settings = properties.get('settings', [])
  411. if not settings:
  412. session = self._get_session(notebook, 'hive')
  413. if not session:
  414. LOG.warn('Cannot get jobs, failed to find active HS2 session for user: %s' % self.user.username)
  415. else:
  416. properties = session['properties']
  417. settings = next((prop['value'] for prop in properties if prop['key'] == 'settings'), None)
  418. if settings:
  419. engine = next((setting['value'] for setting in settings if setting['key'] == 'hive.execution.engine'), DEFAULT_HIVE_ENGINE)
  420. else:
  421. engine = DEFAULT_HIVE_ENGINE
  422. return engine
  423. def _get_statements(self, hql_query):
  424. hql_query = strip_trailing_semicolon(hql_query)
  425. hql_query_sio = StringIO.StringIO(hql_query)
  426. statements = []
  427. for (start_row, start_col), (end_row, end_col), statement in split_statements(hql_query_sio.read()):
  428. statements.append({
  429. 'start': {
  430. 'row': start_row,
  431. 'column': start_col
  432. },
  433. 'end': {
  434. 'row': end_row,
  435. 'column': end_col
  436. },
  437. 'statement': strip_trailing_semicolon(statement.strip())
  438. })
  439. return statements
  440. def _get_current_statement(self, db, snippet):
  441. # Multiquery, if not first statement or arrived to the last query
  442. statement_id = snippet['result']['handle'].get('statement_id', 0)
  443. statements_count = snippet['result']['handle'].get('statements_count', 1)
  444. if snippet['result']['handle'].get('has_more_statements'):
  445. try:
  446. handle = self._get_handle(snippet)
  447. db.close_operation(handle) # Close all the time past multi queries
  448. except:
  449. LOG.warn('Could not close previous multiquery query')
  450. statement_id += 1
  451. else:
  452. statement_id = 0
  453. statements = self._get_statements(snippet['statement'])
  454. resp = {
  455. 'statement_id': statement_id,
  456. 'has_more_statements': statement_id < len(statements) - 1,
  457. 'statements_count': len(statements)
  458. }
  459. if statements_count != len(statements):
  460. statement_id = 0
  461. resp.update(statements[statement_id])
  462. return resp
  463. def _prepare_hql_query(self, snippet, statement, session):
  464. settings = snippet['properties'].get('settings', None)
  465. file_resources = snippet['properties'].get('files', None)
  466. functions = snippet['properties'].get('functions', None)
  467. properties = session['properties'] if session else []
  468. # Get properties from session if not defined in snippet
  469. if not settings:
  470. settings = next((prop['value'] for prop in properties if prop['key'] == 'settings'), None)
  471. if not file_resources:
  472. file_resources = next((prop['value'] for prop in properties if prop['key'] == 'files'), None)
  473. if not functions:
  474. functions = next((prop['value'] for prop in properties if prop['key'] == 'functions'), None)
  475. database = snippet.get('database') or 'default'
  476. return hql_query(
  477. statement,
  478. query_type=QUERY_TYPES[0],
  479. settings=settings,
  480. file_resources=file_resources,
  481. functions=functions,
  482. database=database
  483. )
  484. def get_select_star_query(self, snippet, database, table):
  485. db = self._get_db(snippet)
  486. table = db.get_table(database, table)
  487. return db.get_select_star_query(database, table, limit=1000)
  488. def _get_handle(self, snippet):
  489. try:
  490. snippet['result']['handle']['secret'], snippet['result']['handle']['guid'] = HiveServerQueryHandle.get_decoded(snippet['result']['handle']['secret'], snippet['result']['handle']['guid'])
  491. except KeyError:
  492. raise Exception('Operation has no valid handle attached')
  493. for key in snippet['result']['handle'].keys():
  494. if key not in ('log_context', 'secret', 'has_result_set', 'operation_type', 'modified_row_count', 'guid'):
  495. snippet['result']['handle'].pop(key)
  496. return HiveServerQueryHandle(**snippet['result']['handle'])
  497. def _get_db(self, snippet):
  498. if snippet['type'] == 'hive':
  499. name = 'beeswax'
  500. elif snippet['type'] == 'impala':
  501. name = 'impala'
  502. else:
  503. name = 'sparksql'
  504. return dbms.get(self.user, query_server=get_query_server_config(name=name))
  505. def _parse_job_counters(self, job_id):
  506. # Attempt to fetch total records from the job's Hive counter
  507. total_records, total_size = None, None
  508. job = get_job(self.request, job_id=job_id)
  509. if not job or not job.counters:
  510. raise PopupException(_('Failed to get job details or job does not contain counters data.'))
  511. counter_groups = job.counters.get('counterGroup') # Returns list of counter groups with 'counterGroupName' and 'counter'
  512. if counter_groups:
  513. # Extract totalCounterValue from HIVE counter group
  514. hive_counters = next((group for group in counter_groups if group.get('counterGroupName', '').upper() == 'HIVE'), None)
  515. if hive_counters:
  516. total_records = next((counter.get('totalCounterValue') for counter in hive_counters['counter'] if counter['name'] == 'RECORDS_OUT_0'), None)
  517. else:
  518. LOG.info("No HIVE counter group found for job: %s" % job_id)
  519. # Extract totalCounterValue from FileSystemCounter counter group
  520. fs_counters = next((group for group in counter_groups if group.get('counterGroupName') == 'org.apache.hadoop.mapreduce.FileSystemCounter'), None)
  521. if fs_counters:
  522. total_size = next((counter.get('totalCounterValue') for counter in fs_counters['counter'] if counter['name'] == 'HDFS_BYTES_WRITTEN'), None)
  523. else:
  524. LOG.info("No FileSystemCounter counter group found for job: %s" % job_id)
  525. return total_records, total_size
  526. def _get_hive_result_size(self, notebook, snippet):
  527. total_records, total_size, msg = None, None, None
  528. engine = self._get_hive_execution_engine(notebook, snippet).lower()
  529. logs = self.get_log(notebook, snippet, startFrom=0)
  530. if engine == 'mr':
  531. jobs = self.get_jobs(notebook, snippet, logs)
  532. if jobs:
  533. last_job_id = jobs[-1].get('name')
  534. LOG.info("Hive query executed %d jobs, last job is: %s" % (len(jobs), last_job_id))
  535. total_records, total_size = self._parse_job_counters(job_id=last_job_id)
  536. else:
  537. msg = _('Hive query did not execute any jobs.')
  538. elif engine == 'spark':
  539. total_records_re = "RECORDS_OUT_0: (?P<total_records>\d+)"
  540. total_size_re = "Spark Job\[[a-z0-9-]+\] Metrics[A-Za-z0-9:\s]+ResultSize: (?P<total_size>\d+)"
  541. total_records_match = re.search(total_records_re, logs, re.MULTILINE)
  542. total_size_match = re.search(total_size_re, logs, re.MULTILINE)
  543. if total_records_match:
  544. total_records = int(total_records_match.group('total_records'))
  545. if total_size_match:
  546. total_size = int(total_size_match.group('total_size'))
  547. return total_records, total_size, msg
  548. def _get_impala_result_size(self, notebook, snippet):
  549. total_records_match = None
  550. total_records, total_size, msg = None, None, None
  551. query_id = self._get_impala_query_id(snippet)
  552. session = Session.objects.get_session(self.user, application='impala')
  553. protocol = 'https' if impala_ssl_conf.ENABLED.get() else 'http'
  554. server_url = '%s://%s' % (protocol, self._get_impala_server_url(session))
  555. if query_id:
  556. LOG.info("Attempting to get Impala query profile at server_url %s for query ID: %s" % (server_url, query_id))
  557. retries = 0
  558. max_retries = 10
  559. total_records_match = None
  560. while not total_records_match and retries < max_retries:
  561. time.sleep(1.0)
  562. fragment = self._get_impala_query_profile(server_url, query_id=query_id)
  563. total_records_re = "Coordinator Fragment F\d\d.+?RowsReturned: \d+(?:.\d+[KMB])? \((?P<total_records>\d+)\).*?(Averaged Fragment F\d\d)"
  564. total_records_match = re.search(total_records_re, fragment, re.MULTILINE | re.DOTALL)
  565. retries += 1
  566. if total_records_match:
  567. total_records = int(total_records_match.group('total_records'))
  568. return total_records, total_size, msg
  569. def _get_impala_query_id(self, snippet):
  570. guid = None
  571. if 'result' in snippet and 'handle' in snippet['result'] and 'guid' in snippet['result']['handle']:
  572. try:
  573. decoded_guid = base64.decodestring(snippet['result']['handle']['guid'])
  574. guid = "%x:%x" % struct.unpack(b"QQ", decoded_guid)
  575. except Exception, e:
  576. LOG.warn('Failed to decode operation handle guid: %s' % e)
  577. else:
  578. LOG.warn('Snippet does not contain a valid result handle, cannot extract Impala query ID.')
  579. return guid
  580. def _get_impala_server_url(self, session):
  581. impala_settings = session.get_formatted_properties()
  582. http_addr = next((setting['value'] for setting in impala_settings if setting['key'].lower() == 'http_addr'), None)
  583. return http_addr
  584. def _get_impala_query_profile(self, server_url, query_id):
  585. api = get_impalad_api(user=self.user, url=server_url)
  586. try:
  587. query_profile = api.get_query_profile(query_id)
  588. profile = query_profile.get('profile')
  589. except (RestException, ImpalaDaemonApiException), e:
  590. raise PopupException(_("Failed to get query profile from Impala Daemon server: %s") % e)
  591. if not profile:
  592. raise PopupException(_("Could not find profile in query profile response from Impala Daemon Server."))
  593. return profile