hiveserver2.py 26 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777
  1. #!/usr/bin/env python
  2. # Licensed to Cloudera, Inc. under one
  3. # or more contributor license agreements. See the NOTICE file
  4. # distributed with this work for additional information
  5. # regarding copyright ownership. Cloudera, Inc. licenses this file
  6. # to you under the Apache License, Version 2.0 (the
  7. # "License"); you may not use this file except in compliance
  8. # with the License. You may obtain a copy of the License at
  9. #
  10. # http://www.apache.org/licenses/LICENSE-2.0
  11. #
  12. # Unless required by applicable law or agreed to in writing, software
  13. # distributed under the License is distributed on an "AS IS" BASIS,
  14. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. # See the License for the specific language governing permissions and
  16. # limitations under the License.
  17. import base64
  18. import copy
  19. import logging
  20. import re
  21. import StringIO
  22. import struct
  23. from django.core.urlresolvers import reverse
  24. from django.utils.translation import ugettext as _
  25. from desktop.conf import USE_DEFAULT_CONFIGURATION
  26. from desktop.lib.conf import BoundConfig
  27. from desktop.lib.exceptions import StructuredException
  28. from desktop.lib.exceptions_renderable import PopupException
  29. from desktop.lib.i18n import force_unicode
  30. from desktop.lib.rest.http_client import RestException
  31. from desktop.models import DefaultConfiguration
  32. from metadata.optimizer_client import OptimizerApi
  33. from notebook.connectors.base import Api, QueryError, QueryExpired, OperationTimeout, OperationNotSupported
  34. LOG = logging.getLogger(__name__)
  35. try:
  36. from beeswax import conf as beeswax_conf, data_export
  37. from beeswax.api import _autocomplete, _get_sample_data
  38. from beeswax.conf import CONFIG_WHITELIST as hive_settings, DOWNLOAD_CELL_LIMIT
  39. from beeswax.data_export import upload
  40. from beeswax.design import hql_query, strip_trailing_semicolon, split_statements
  41. from beeswax.models import QUERY_TYPES, HiveServerQueryHandle, HiveServerQueryHistory, QueryHistory, Session
  42. from beeswax.server import dbms
  43. from beeswax.server.dbms import get_query_server_config, QueryServerException
  44. from beeswax.views import parse_out_jobs
  45. except ImportError, e:
  46. LOG.warn('Hive and HiveServer2 interfaces are not enabled')
  47. hive_settings = None
  48. try:
  49. from impala import api # Force checking if Impala is enabled
  50. from impala.conf import CONFIG_WHITELIST as impala_settings, SSL as impala_ssl_conf
  51. from impala.server import get_api as get_impalad_api, ImpalaDaemonApiException
  52. except ImportError, e:
  53. LOG.warn("Impala app is not enabled")
  54. impala_settings = None
  55. try:
  56. from jobbrowser.views import get_job
  57. except (AttributeError, ImportError), e:
  58. LOG.warn("Job Browser app is not enabled")
  59. DEFAULT_HIVE_ENGINE = 'mr'
  60. def query_error_handler(func):
  61. def decorator(*args, **kwargs):
  62. try:
  63. return func(*args, **kwargs)
  64. except StructuredException, e:
  65. message = force_unicode(str(e))
  66. if 'timed out' in message:
  67. raise OperationTimeout(e)
  68. else:
  69. raise QueryError(message)
  70. except QueryServerException, e:
  71. message = force_unicode(str(e))
  72. if 'Invalid query handle' in message or 'Invalid OperationHandle' in message:
  73. raise QueryExpired(e)
  74. else:
  75. raise QueryError(message)
  76. return decorator
  77. def is_hive_enabled():
  78. return hive_settings is not None and type(hive_settings) == BoundConfig
  79. def is_impala_enabled():
  80. return impala_settings is not None and type(impala_settings) == BoundConfig
  81. class HiveConfiguration(object):
  82. APP_NAME = 'hive'
  83. PROPERTIES = [
  84. {
  85. "multiple": True,
  86. "defaultValue": [],
  87. "value": [],
  88. "nice_name": _("Files"),
  89. "key": "files",
  90. "help_text": _("Add one or more files, jars, or archives to the list of resources."),
  91. "type": "hdfs-files"
  92. }, {
  93. "multiple": True,
  94. "defaultValue": [],
  95. "value": [],
  96. "nice_name": _("Functions"),
  97. "key": "functions",
  98. "help_text": _("Add one or more registered UDFs (requires function name and fully-qualified class name)."),
  99. "type": "functions"
  100. }, {
  101. "multiple": True,
  102. "defaultValue": [],
  103. "value": [],
  104. "nice_name": _("Settings"),
  105. "key": "settings",
  106. "help_text": _("Hive and Hadoop configuration properties."),
  107. "type": "settings",
  108. "options": [config.lower() for config in hive_settings.get()] if is_hive_enabled() and hasattr(hive_settings, 'get') else []
  109. }
  110. ]
  111. class ImpalaConfiguration(object):
  112. APP_NAME = 'impala'
  113. PROPERTIES = [
  114. {
  115. "multiple": True,
  116. "defaultValue": [],
  117. "value": [],
  118. "nice_name": _("Settings"),
  119. "key": "settings",
  120. "help_text": _("Impala configuration properties."),
  121. "type": "settings",
  122. "options": [config.lower() for config in impala_settings.get()] if is_impala_enabled() else []
  123. }
  124. ]
  125. class HS2Api(Api):
  126. @staticmethod
  127. def get_properties(lang='hive'):
  128. return ImpalaConfiguration.PROPERTIES if lang == 'impala' else HiveConfiguration.PROPERTIES
  129. @query_error_handler
  130. def create_session(self, lang='hive', properties=None):
  131. application = 'beeswax' if lang == 'hive' else lang
  132. session = Session.objects.get_session(self.user, application=application)
  133. if session is None:
  134. session = dbms.get(self.user, query_server=get_query_server_config(name=lang)).open_session(self.user)
  135. response = {
  136. 'type': lang,
  137. 'id': session.id
  138. }
  139. if not properties:
  140. config = None
  141. if USE_DEFAULT_CONFIGURATION.get():
  142. config = DefaultConfiguration.objects.get_configuration_for_user(app=lang, user=self.user)
  143. if config is not None:
  144. properties = config.properties_list
  145. else:
  146. properties = self.get_properties(lang)
  147. response['properties'] = properties
  148. if lang == 'impala':
  149. http_addr = self._get_impala_server_url(session)
  150. response['http_addr'] = http_addr
  151. return response
  152. @query_error_handler
  153. def close_session(self, session):
  154. app_name = session.get('type')
  155. session_id = session.get('id')
  156. query_server = get_query_server_config(name=app_name)
  157. response = {'status': -1, 'message': ''}
  158. try:
  159. filters = {'id': session_id, 'application': query_server['server_name']}
  160. if not self.user.is_superuser:
  161. filters['owner'] = self.user
  162. session = Session.objects.get(**filters)
  163. except Session.DoesNotExist:
  164. response['message'] = _('Session does not exist or you do not have permissions to close the session.')
  165. if session:
  166. session = dbms.get(self.user, query_server).close_session(session)
  167. response['status'] = 0
  168. response['message'] = _('Session successfully closed.')
  169. response['session'] = {'id': session_id, 'application': session.application, 'status': session.status_code}
  170. return response
  171. @query_error_handler
  172. def execute(self, notebook, snippet):
  173. db = self._get_db(snippet)
  174. statement = self._get_current_statement(db, snippet)
  175. session = self._get_session(notebook, snippet['type'])
  176. query = self._prepare_hql_query(snippet, statement['statement'], session)
  177. try:
  178. if statement.get('statement_id') == 0:
  179. db.use(query.database)
  180. handle = db.client.query(query, withMultipleSession=True)
  181. except QueryServerException, ex:
  182. raise QueryError(ex.message, handle=statement)
  183. # All good
  184. server_id, server_guid = handle.get()
  185. response = {
  186. 'secret': server_id,
  187. 'guid': server_guid,
  188. 'operation_type': handle.operation_type,
  189. 'has_result_set': handle.has_result_set,
  190. 'modified_row_count': handle.modified_row_count,
  191. 'log_context': handle.log_context,
  192. 'session_guid': handle.session_guid
  193. }
  194. response.update(statement)
  195. return response
  196. @query_error_handler
  197. def check_status(self, notebook, snippet):
  198. response = {}
  199. db = self._get_db(snippet)
  200. handle = self._get_handle(snippet)
  201. operation = db.get_operation_status(handle)
  202. status = HiveServerQueryHistory.STATE_MAP[operation.operationState]
  203. if status.index in (QueryHistory.STATE.failed.index, QueryHistory.STATE.expired.index):
  204. if operation.errorMessage and 'transition from CANCELED to ERROR' in operation.errorMessage: # Hive case on canceled query
  205. raise QueryExpired()
  206. else:
  207. raise QueryError(operation.errorMessage)
  208. response['status'] = 'running' if status.index in (QueryHistory.STATE.running.index, QueryHistory.STATE.submitted.index) else 'available'
  209. return response
  210. @query_error_handler
  211. def fetch_result(self, notebook, snippet, rows, start_over):
  212. db = self._get_db(snippet)
  213. handle = self._get_handle(snippet)
  214. results = db.fetch(handle, start_over=start_over, rows=rows)
  215. # No escaping...
  216. return {
  217. 'has_more': results.has_more,
  218. 'data': results.rows(),
  219. 'meta': [{
  220. 'name': column.name,
  221. 'type': column.type,
  222. 'comment': column.comment
  223. } for column in results.data_table.cols()],
  224. 'type': 'table'
  225. }
  226. @query_error_handler
  227. def fetch_result_size(self, notebook, snippet):
  228. resp = {
  229. 'rows': None,
  230. 'size': None,
  231. 'message': ''
  232. }
  233. if snippet.get('status') != 'available':
  234. raise QueryError(_('Result status is not available'))
  235. if snippet['type'] not in ('hive', 'impala'):
  236. raise OperationNotSupported(_('Cannot fetch result metadata for snippet type: %s') % snippet['type'])
  237. if snippet['type'] == 'hive':
  238. resp['rows'], resp['size'], resp['message'] = self._get_hive_result_size(notebook, snippet)
  239. else: # Impala
  240. resp['rows'], resp['size'], resp['message'] = self._get_impala_result_size(notebook, snippet)
  241. return resp
  242. @query_error_handler
  243. def cancel(self, notebook, snippet):
  244. db = self._get_db(snippet)
  245. handle = self._get_handle(snippet)
  246. db.cancel_operation(handle)
  247. return {'status': 0}
  248. @query_error_handler
  249. def get_log(self, notebook, snippet, startFrom=None, size=None):
  250. db = self._get_db(snippet)
  251. handle = self._get_handle(snippet)
  252. return db.get_log(handle, start_over=startFrom == 0)
  253. @query_error_handler
  254. def close_statement(self, snippet):
  255. if snippet['type'] == 'impala':
  256. from impala import conf as impala_conf
  257. if (snippet['type'] == 'hive' and beeswax_conf.CLOSE_QUERIES.get()) or (snippet['type'] == 'impala' and impala_conf.CLOSE_QUERIES.get()):
  258. db = self._get_db(snippet)
  259. handle = self._get_handle(snippet)
  260. db.close_operation(handle)
  261. return {'status': 0}
  262. else:
  263. return {'status': -1} # skipped
  264. @query_error_handler
  265. def download(self, notebook, snippet, format):
  266. try:
  267. db = self._get_db(snippet)
  268. handle = self._get_handle(snippet)
  269. # Test handle to verify if still valid
  270. db.fetch(handle, start_over=True, rows=1)
  271. return data_export.download(handle, format, db, id=snippet['id'])
  272. except Exception, e:
  273. title = 'The query result cannot be downloaded.'
  274. LOG.exception(title)
  275. if hasattr(e, 'message') and e.message:
  276. message = e.message
  277. else:
  278. message = e
  279. raise PopupException(_(title), detail=message)
  280. @query_error_handler
  281. def progress(self, snippet, logs):
  282. if snippet['type'] == 'hive':
  283. match = re.search('Total jobs = (\d+)', logs, re.MULTILINE)
  284. total = int(match.group(1)) if match else 1
  285. started = logs.count('Starting Job')
  286. ended = logs.count('Ended Job')
  287. progress = int((started + ended) * 100 / (total * 2))
  288. return max(progress, 5) # Return 5% progress as a minimum
  289. elif snippet['type'] == 'impala':
  290. match = re.findall('(\d+)% Complete', logs, re.MULTILINE)
  291. # Retrieve the last reported progress percentage if it exists
  292. return int(match[-1]) if match and isinstance(match, list) else 0
  293. else:
  294. return 50
  295. @query_error_handler
  296. def get_jobs(self, notebook, snippet, logs):
  297. jobs = []
  298. if snippet['type'] == 'hive':
  299. engine = self._get_hive_execution_engine(notebook, snippet)
  300. jobs_with_state = parse_out_jobs(logs, engine=engine, with_state=True)
  301. jobs = [{
  302. 'name': job.get('job_id', ''),
  303. 'url': reverse('jobbrowser.views.single_job', kwargs={'job': job.get('job_id', '')}),
  304. 'started': job.get('started', False),
  305. 'finished': job.get('finished', False)
  306. } for job in jobs_with_state]
  307. return jobs
  308. @query_error_handler
  309. def autocomplete(self, snippet, database=None, table=None, column=None, nested=None):
  310. db = self._get_db(snippet)
  311. return _autocomplete(db, database, table, column, nested)
  312. @query_error_handler
  313. def get_sample_data(self, snippet, database=None, table=None, column=None):
  314. db = self._get_db(snippet)
  315. return _get_sample_data(db, database, table, column)
  316. @query_error_handler
  317. def explain(self, notebook, snippet):
  318. db = self._get_db(snippet)
  319. response = self._get_current_statement(db, snippet)
  320. session = self._get_session(notebook, snippet['type'])
  321. query = self._prepare_hql_query(snippet, response.pop('statement'), session)
  322. try:
  323. explanation = db.explain(query)
  324. except QueryServerException, ex:
  325. raise QueryError(ex.message)
  326. return {
  327. 'status': 0,
  328. 'explanation': explanation.textual,
  329. 'statement': query.get_query_statement(0),
  330. }
  331. @query_error_handler
  332. def export_data_as_hdfs_file(self, snippet, target_file, overwrite):
  333. db = self._get_db(snippet)
  334. handle = self._get_handle(snippet)
  335. max_cells = DOWNLOAD_CELL_LIMIT.get()
  336. upload(target_file, handle, self.request.user, db, self.request.fs, max_cells=max_cells)
  337. return '/filebrowser/view=%s' % target_file
  338. def export_data_as_table(self, notebook, snippet, destination, is_temporary=False, location=None):
  339. db = self._get_db(snippet)
  340. response = self._get_current_statement(db, snippet)
  341. session = self._get_session(notebook, snippet['type'])
  342. query = self._prepare_hql_query(snippet, response.pop('statement'), session)
  343. if 'select' not in query.hql_query.strip().lower():
  344. raise PopupException(_('Only SELECT statements can be saved. Provided statement: %(query)s') % {'query': query.hql_query})
  345. database = snippet.get('database') or 'default'
  346. table = destination
  347. if '.' in table:
  348. database, table = table.split('.', 1)
  349. db.use(query.database)
  350. hql = 'CREATE %sTABLE `%s`.`%s` %sAS %s' % ('TEMPORARY ' if is_temporary else '', database, table, "LOCATION '%s' " % location if location else '', query.hql_query)
  351. success_url = reverse('metastore:describe_table', kwargs={'database': database, 'table': table})
  352. return hql, success_url
  353. def export_large_data_to_hdfs(self, notebook, snippet, destination):
  354. db = self._get_db(snippet)
  355. response = self._get_current_statement(db, snippet)
  356. session = self._get_session(notebook, snippet['type'])
  357. query = self._prepare_hql_query(snippet, response.pop('statement'), session)
  358. if 'select' not in query.hql_query.strip().lower():
  359. raise PopupException(_('Only SELECT statements can be saved. Provided statement: %(query)s') % {'query': query.hql_query})
  360. db.use(query.database)
  361. hql = "INSERT OVERWRITE DIRECTORY '%s' %s" % (destination, query.hql_query)
  362. success_url = '/filebrowser/view=%s' % destination
  363. return hql, success_url
  364. def statement_risk(self, notebook, snippet):
  365. db = self._get_db(snippet)
  366. response = self._get_current_statement(db, snippet)
  367. query = response['statement']
  368. api = OptimizerApi()
  369. data = api.query_risk(query=query)
  370. data = data.get(snippet['type'] + 'Risk', {})
  371. return {
  372. 'risk': data.get('risk'),
  373. 'riskAnalysis': data.get('riskAnalysis'),
  374. 'riskRecommendation': data.get('riskRecommendation')
  375. }
  376. def statement_compatibility(self, notebook, snippet, source_platform, target_platform):
  377. db = self._get_db(snippet)
  378. response = self._get_current_statement(db, snippet)
  379. query = response['statement']
  380. api = OptimizerApi()
  381. return api.query_compatibility(source_platform, target_platform, query)
  382. def upgrade_properties(self, lang='hive', properties=None):
  383. upgraded_properties = copy.deepcopy(self.get_properties(lang))
  384. # Check that current properties is a list of dictionary objects with 'key' and 'value' keys
  385. if not isinstance(properties, list) or \
  386. not all(isinstance(prop, dict) for prop in properties) or \
  387. not all('key' in prop for prop in properties) or not all('value' in prop for prop in properties):
  388. LOG.warn('Current properties are not formatted correctly, will replace with defaults.')
  389. return upgraded_properties
  390. valid_props_dict = dict((prop["key"], prop) for prop in upgraded_properties)
  391. curr_props_dict = dict((prop['key'], prop) for prop in properties)
  392. # Upgrade based on valid properties as needed
  393. if set(valid_props_dict.keys()) != set(curr_props_dict.keys()):
  394. settings = next((prop for prop in upgraded_properties if prop['key'] == 'settings'), None)
  395. if settings is not None and isinstance(properties, list):
  396. settings['value'] = properties
  397. else: # No upgrade needed so return existing properties
  398. upgraded_properties = properties
  399. return upgraded_properties
  400. def _get_session(self, notebook, type='hive'):
  401. session = next((session for session in notebook['sessions'] if session['type'] == type), None)
  402. return session
  403. def _get_hive_execution_engine(self, notebook, snippet):
  404. # Get hive.execution.engine from snippet properties, if none, then get from session
  405. properties = snippet['properties']
  406. settings = properties.get('settings', [])
  407. if not settings:
  408. session = self._get_session(notebook, 'hive')
  409. if not session:
  410. LOG.warn('Cannot get jobs, failed to find active HS2 session for user: %s' % self.user.username)
  411. else:
  412. properties = session['properties']
  413. settings = next((prop['value'] for prop in properties if prop['key'] == 'settings'), None)
  414. if settings:
  415. engine = next((setting['value'] for setting in settings if setting['key'] == 'hive.execution.engine'), DEFAULT_HIVE_ENGINE)
  416. else:
  417. engine = DEFAULT_HIVE_ENGINE
  418. return engine
  419. def _get_statements(self, hql_query):
  420. hql_query = strip_trailing_semicolon(hql_query)
  421. hql_query_sio = StringIO.StringIO(hql_query)
  422. statements = []
  423. for (start_row, start_col), (end_row, end_col), statement in split_statements(hql_query_sio.read()):
  424. statements.append({
  425. 'start': {
  426. 'row': start_row,
  427. 'column': start_col
  428. },
  429. 'end': {
  430. 'row': end_row,
  431. 'column': end_col
  432. },
  433. 'statement': strip_trailing_semicolon(statement.strip())
  434. })
  435. return statements
  436. def _get_current_statement(self, db, snippet):
  437. # Multiquery, if not first statement or arrived to the last query
  438. statement_id = snippet['result']['handle'].get('statement_id', 0)
  439. statements_count = snippet['result']['handle'].get('statements_count', 1)
  440. if snippet['result']['handle'].get('has_more_statements'):
  441. try:
  442. handle = self._get_handle(snippet)
  443. db.close_operation(handle) # Close all the time past multi queries
  444. except:
  445. LOG.warn('Could not close previous multiquery query')
  446. statement_id += 1
  447. else:
  448. statement_id = 0
  449. statements = self._get_statements(snippet['statement'])
  450. resp = {
  451. 'statement_id': statement_id,
  452. 'has_more_statements': statement_id < len(statements) - 1,
  453. 'statements_count': len(statements)
  454. }
  455. if statements_count != len(statements):
  456. statement_id = 0
  457. resp.update(statements[statement_id])
  458. return resp
  459. def _prepare_hql_query(self, snippet, statement, session):
  460. settings = snippet['properties'].get('settings', None)
  461. file_resources = snippet['properties'].get('files', None)
  462. functions = snippet['properties'].get('functions', None)
  463. properties = session['properties'] if session else []
  464. # Get properties from session if not defined in snippet
  465. if not settings:
  466. settings = next((prop['value'] for prop in properties if prop['key'] == 'settings'), None)
  467. if not file_resources:
  468. file_resources = next((prop['value'] for prop in properties if prop['key'] == 'files'), None)
  469. if not functions:
  470. functions = next((prop['value'] for prop in properties if prop['key'] == 'functions'), None)
  471. database = snippet.get('database') or 'default'
  472. return hql_query(
  473. statement,
  474. query_type=QUERY_TYPES[0],
  475. settings=settings,
  476. file_resources=file_resources,
  477. functions=functions,
  478. database=database
  479. )
  480. def get_select_star_query(self, snippet, database, table):
  481. db = self._get_db(snippet)
  482. table = db.get_table(database, table)
  483. return db.get_select_star_query(database, table, limit=1000)
  484. def _get_handle(self, snippet):
  485. try:
  486. snippet['result']['handle']['secret'], snippet['result']['handle']['guid'] = HiveServerQueryHandle.get_decoded(snippet['result']['handle']['secret'], snippet['result']['handle']['guid'])
  487. except KeyError:
  488. raise Exception('Operation has no valid handle attached')
  489. for key in snippet['result']['handle'].keys():
  490. if key not in ('log_context', 'secret', 'has_result_set', 'operation_type', 'modified_row_count', 'guid'):
  491. snippet['result']['handle'].pop(key)
  492. return HiveServerQueryHandle(**snippet['result']['handle'])
  493. def _get_db(self, snippet):
  494. if snippet['type'] == 'hive':
  495. name = 'beeswax'
  496. elif snippet['type'] == 'impala':
  497. name = 'impala'
  498. else:
  499. name = 'sparksql'
  500. return dbms.get(self.user, query_server=get_query_server_config(name=name))
  501. def _parse_job_counters(self, job_id):
  502. # Attempt to fetch total records from the job's Hive counter
  503. total_records, total_size = None, None
  504. job = get_job(self.request, job_id=job_id)
  505. if not job or not job.counters:
  506. raise PopupException(_('Failed to get job details or job does not contain counters data.'))
  507. counter_groups = job.counters.get('counterGroup') # Returns list of counter groups with 'counterGroupName' and 'counter'
  508. if counter_groups:
  509. # Extract totalCounterValue from HIVE counter group
  510. hive_counters = next((group for group in counter_groups if group.get('counterGroupName', '').upper() == 'HIVE'), None)
  511. if hive_counters:
  512. total_records = next((counter.get('totalCounterValue') for counter in hive_counters['counter'] if counter['name'] == 'RECORDS_OUT_0'), None)
  513. else:
  514. LOG.info("No HIVE counter group found for job: %s" % job_id)
  515. # Extract totalCounterValue from FileSystemCounter counter group
  516. fs_counters = next((group for group in counter_groups if group.get('counterGroupName') == 'org.apache.hadoop.mapreduce.FileSystemCounter'), None)
  517. if fs_counters:
  518. total_size = next((counter.get('totalCounterValue') for counter in fs_counters['counter'] if counter['name'] == 'HDFS_BYTES_WRITTEN'), None)
  519. else:
  520. LOG.info("No FileSystemCounter counter group found for job: %s" % job_id)
  521. return total_records, total_size
  522. def _get_hive_result_size(self, notebook, snippet):
  523. total_records, total_size, msg = None, None, None
  524. engine = self._get_hive_execution_engine(notebook, snippet).lower()
  525. logs = self.get_log(notebook, snippet, startFrom=0)
  526. if engine == 'mr':
  527. jobs = self.get_jobs(notebook, snippet, logs)
  528. if jobs:
  529. last_job_id = jobs[-1].get('name')
  530. LOG.info("Hive query executed %d jobs, last job is: %s" % (len(jobs), last_job_id))
  531. total_records, total_size = self._parse_job_counters(job_id=last_job_id)
  532. else:
  533. msg = _('Hive query did not execute any jobs.')
  534. elif engine == 'spark':
  535. total_records_re = "RECORDS_OUT_0: (?P<total_records>\d+)"
  536. total_size_re = "Spark Job\[[a-z0-9-]+\] Metrics[A-Za-z0-9:\s]+ResultSize: (?P<total_size>\d+)"
  537. total_records_match = re.search(total_records_re, logs, re.MULTILINE)
  538. total_size_match = re.search(total_size_re, logs, re.MULTILINE)
  539. if total_records_match:
  540. total_records = int(total_records_match.group('total_records'))
  541. if total_size_match:
  542. total_size = int(total_size_match.group('total_size'))
  543. return total_records, total_size, msg
  544. def _get_impala_result_size(self, notebook, snippet):
  545. total_records_match = None
  546. total_records, total_size, msg = None, None, None
  547. query_id = self._get_impala_query_id(snippet)
  548. session = Session.objects.get_session(self.user, application='impala')
  549. protocol = 'https' if impala_ssl_conf.ENABLED.get() else 'http'
  550. server_url = '%s://%s' % (protocol, self._get_impala_server_url(session))
  551. if query_id:
  552. LOG.info("Attempting to get Impala query profile at server_url %s for query ID: %s" % (server_url, query_id))
  553. fragment = self._get_impala_query_profile(server_url, query_id=query_id)
  554. total_records_re = "Coordinator Fragment F\d\d.+?RowsReturned: \d+(?:.\d+[KMB])? \((?P<total_records>\d+)\).*?Averaged Fragment F\d\d"
  555. total_records_match = re.search(total_records_re, fragment, re.MULTILINE | re.DOTALL)
  556. if total_records_match:
  557. total_records = int(total_records_match.group('total_records'))
  558. return total_records, total_size, msg
  559. def _get_impala_query_id(self, snippet):
  560. guid = None
  561. if 'result' in snippet and 'handle' in snippet['result'] and 'guid' in snippet['result']['handle']:
  562. try:
  563. decoded_guid = base64.decodestring(snippet['result']['handle']['guid'])
  564. guid = "%x:%x" % struct.unpack(b"QQ", decoded_guid)
  565. except Exception, e:
  566. LOG.warn('Failed to decode operation handle guid: %s' % e)
  567. else:
  568. LOG.warn('Snippet does not contain a valid result handle, cannot extract Impala query ID.')
  569. return guid
  570. def _get_impala_server_url(self, session):
  571. impala_settings = session.get_formatted_properties()
  572. http_addr = next((setting['value'] for setting in impala_settings if setting['key'].lower() == 'http_addr'), None)
  573. return http_addr
  574. def _get_impala_query_profile(self, server_url, query_id):
  575. api = get_impalad_api(user=self.user, url=server_url)
  576. try:
  577. query_profile = api.get_query_profile(query_id)
  578. profile = query_profile.get('profile')
  579. except (RestException, ImpalaDaemonApiException), e:
  580. raise PopupException(_("Failed to get query profile from Impala Daemon server: %s") % e)
  581. if not profile:
  582. raise PopupException(_("Could not find profile in query profile response from Impala Daemon Server."))
  583. return profile