hive_server2_lib.py 49 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358
  1. #!/usr/bin/env python
  2. # Licensed to Cloudera, Inc. under one
  3. # or more contributor license agreements. See the NOTICE file
  4. # distributed with this work for additional information
  5. # regarding copyright ownership. Cloudera, Inc. licenses this file
  6. # to you under the Apache License, Version 2.0 (the
  7. # "License"); you may not use this file except in compliance
  8. # with the License. You may obtain a copy of the License at
  9. #
  10. # http://www.apache.org/licenses/LICENSE-2.0
  11. #
  12. # Unless required by applicable law or agreed to in writing, software
  13. # distributed under the License is distributed on an "AS IS" BASIS,
  14. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. # See the License for the specific language governing permissions and
  16. # limitations under the License.
  17. from builtins import next
  18. from builtins import filter
  19. from builtins import map
  20. from builtins import object
  21. import logging
  22. import itertools
  23. import json
  24. import re
  25. import sys
  26. from operator import itemgetter
  27. from django.utils.translation import ugettext as _
  28. from desktop.lib import thrift_util
  29. from desktop.conf import DEFAULT_USER
  30. from desktop.models import Document2
  31. from beeswax import conf
  32. from TCLIService import TCLIService
  33. from TCLIService.ttypes import TOpenSessionReq, TGetTablesReq, TFetchResultsReq,\
  34. TStatusCode, TGetResultSetMetadataReq, TGetColumnsReq, TTypeId,\
  35. TExecuteStatementReq, TGetOperationStatusReq, TFetchOrientation,\
  36. TCloseSessionReq, TGetSchemasReq, TGetLogReq, TCancelOperationReq,\
  37. TCloseOperationReq, TFetchResultsResp, TRowSet
  38. from beeswax import conf as beeswax_conf
  39. from beeswax import hive_site
  40. from beeswax.hive_site import hiveserver2_use_ssl
  41. from beeswax.conf import CONFIG_WHITELIST, LIST_PARTITIONS_LIMIT
  42. from beeswax.models import Session, HiveServerQueryHandle, HiveServerQueryHistory, QueryHistory
  43. from beeswax.server.dbms import Table, DataTable, QueryServerException
  44. LOG = logging.getLogger(__name__)
  45. IMPALA_RESULTSET_CACHE_SIZE = 'impala.resultset.cache.size'
  46. DEFAULT_USER = DEFAULT_USER.get()
  47. class HiveServerTable(Table):
  48. """
  49. We get the table details from a DESCRIBE FORMATTED.
  50. """
  51. def __init__(self, table_results, table_schema, desc_results, desc_schema):
  52. if beeswax_conf.THRIFT_VERSION.get() >= 7:
  53. if not table_results.columns:
  54. raise QueryServerException('No table columns')
  55. self.table = table_results.columns
  56. else: # Deprecated. To remove in Hue 4.
  57. if not table_results.rows:
  58. raise QueryServerException('No table rows')
  59. self.table = table_results.rows and table_results.rows[0] or ''
  60. self.table_schema = table_schema
  61. self.desc_results = desc_results
  62. self.desc_schema = desc_schema
  63. self.is_impala_only = False # Aka Kudu
  64. self.describe = HiveServerTTableSchema(self.desc_results, self.desc_schema).cols()
  65. self._details = None
  66. @property
  67. def name(self):
  68. return HiveServerTRow(self.table, self.table_schema).col('TABLE_NAME')
  69. @property
  70. def is_view(self):
  71. return HiveServerTRow(self.table, self.table_schema).col('TABLE_TYPE') == 'VIEW'
  72. @property
  73. def partition_keys(self):
  74. try:
  75. return [PartitionKeyCompatible(row['col_name'], row['data_type'], row['comment']) for row in self._get_partition_column()]
  76. except:
  77. LOG.exception('failed to get partition keys')
  78. return []
  79. @property
  80. def path_location(self):
  81. if self.is_impala_only:
  82. return None
  83. try:
  84. rows = self.describe
  85. rows = [row for row in rows if row['col_name'].startswith('Location:')]
  86. if rows:
  87. return rows[0]['data_type']
  88. except:
  89. LOG.exception('failed to get path location')
  90. return None
  91. @property
  92. def cols(self):
  93. rows = self.describe
  94. col_row_index = 1
  95. try:
  96. cols = list(map(itemgetter('col_name'), rows[col_row_index:]))
  97. if cols.index('') == 0: # TEZ starts at 1 vs Hive, Impala starts at 2
  98. col_row_index = col_row_index + 1
  99. cols.pop(0)
  100. end_cols_index = cols.index('')
  101. return rows[col_row_index:][:end_cols_index] + self._get_partition_column()
  102. except ValueError: # DESCRIBE on columns and nested columns does not always contain additional rows beyond cols
  103. return rows[col_row_index:]
  104. except:
  105. # Impala does not have it
  106. return rows
  107. def _get_partition_column(self):
  108. rows = self.describe
  109. try:
  110. col_row_index = list(map(itemgetter('col_name'), rows)).index('# Partition Information') + 3
  111. end_cols_index = list(map(itemgetter('col_name'), rows[col_row_index:])).index('')
  112. return rows[col_row_index:][:end_cols_index]
  113. except:
  114. # Impala does not have it
  115. return []
  116. @property
  117. def comment(self):
  118. return HiveServerTRow(self.table, self.table_schema).col('REMARKS')
  119. @property
  120. def properties(self):
  121. rows = self.describe
  122. col_row_index = 2
  123. try:
  124. end_cols_index = list(map(itemgetter('col_name'), rows[col_row_index:])).index('')
  125. except ValueError as e:
  126. end_cols_index = 5000
  127. LOG.warn('Could not guess end column index, so defaulting to %s: %s' (end_cols_index, e))
  128. return [{
  129. 'col_name': prop['col_name'].strip() if prop['col_name'] else prop['col_name'],
  130. 'data_type': prop['data_type'].strip() if prop['data_type'] else prop['data_type'],
  131. 'comment': prop['comment'].strip() if prop['comment'] else prop['comment']
  132. } for prop in rows[col_row_index + end_cols_index + 1:]
  133. ]
  134. @property
  135. def stats(self):
  136. try:
  137. rows = self.properties
  138. col_row_index = list(map(itemgetter('col_name'), rows)).index('Table Parameters:') + 1
  139. end_cols_index = list(map(itemgetter('data_type'), rows[col_row_index:])).index(None)
  140. return rows[col_row_index:][:end_cols_index]
  141. except:
  142. LOG.exception('Table stats could not be retrieved')
  143. return []
  144. @property
  145. def storage_details(self):
  146. rows = self.properties
  147. col_row_index = list(map(itemgetter('col_name'), rows)).index('Storage Desc Params:') + 1
  148. return rows[col_row_index:][:col_row_index + 2]
  149. @property
  150. def has_complex(self):
  151. has_complex = False
  152. complex_types = ["struct", "array", "map", "uniontype"]
  153. patterns = [re.compile(typ) for typ in complex_types]
  154. for column in self.cols:
  155. if isinstance(column, dict) and 'data_type' in column:
  156. column_type = column['data_type']
  157. else: # Col object
  158. column_type = column.type
  159. if column_type and any(p.match(column_type.lower()) for p in patterns):
  160. has_complex = True
  161. break
  162. return has_complex
  163. @property
  164. def details(self):
  165. if self._details is None:
  166. props = dict([(stat['col_name'], stat['data_type']) for stat in self.properties if stat['col_name'] != 'Table Parameters:'])
  167. serde = props.get('SerDe Library:', '')
  168. self._details = {
  169. 'stats': dict([(stat['data_type'], stat['comment']) for stat in self.stats]),
  170. 'properties': {
  171. 'owner': props.get('Owner:'),
  172. 'create_time': props.get('CreateTime:'),
  173. 'table_type': props.get('Table Type:', 'MANAGED_TABLE'),
  174. 'format': 'parquet' if 'ParquetHiveSerDe' in serde else ('text' if 'LazySimpleSerDe' in serde else ('kudu' if self.is_impala_only else serde.rsplit('.', 1)[-1])),
  175. }
  176. }
  177. return self._details
  178. class HiveServerTRowSet2(object):
  179. def __init__(self, row_set, schema):
  180. self.row_set = row_set
  181. self.rows = row_set.rows
  182. self.schema = schema
  183. self.startRowOffset = row_set.startRowOffset
  184. def is_empty(self):
  185. return not self.row_set.columns or not HiveServerTColumnValue2(self.row_set.columns[0]).val
  186. def cols(self, col_names):
  187. cols_rows = []
  188. rs = HiveServerTRow2(self.row_set.columns, self.schema)
  189. cols = [rs.full_col(name) for name in col_names]
  190. for cols_row in zip(*cols):
  191. cols_rows.append(dict(zip(col_names, cols_row)))
  192. return cols_rows
  193. def __iter__(self):
  194. return self
  195. def __next__(self):
  196. if self.row_set.columns:
  197. return HiveServerTRow2(self.row_set.columns, self.schema)
  198. else:
  199. raise StopIteration
  200. class HiveServerTRow2(object):
  201. def __init__(self, cols, schema):
  202. self.cols = cols
  203. self.schema = schema
  204. def col(self, colName):
  205. pos = self._get_col_position(colName)
  206. return HiveServerTColumnValue2(self.cols[pos]).val[0] # Return only first element
  207. def full_col(self, colName):
  208. pos = self._get_col_position(colName)
  209. return HiveServerTColumnValue2(self.cols[pos]).val # Return the full column and its values
  210. def _get_col_position(self, column_name):
  211. return list(filter(lambda i_col1: i_col1[1].columnName == column_name, enumerate(self.schema.columns)))[0][0]
  212. def fields(self):
  213. try:
  214. return [HiveServerTColumnValue2(field).val.pop(0) for field in self.cols]
  215. except IndexError:
  216. raise StopIteration
  217. class HiveServerTColumnValue2(object):
  218. def __init__(self, tcolumn_value):
  219. self.column_value = tcolumn_value
  220. @property
  221. def val(self):
  222. # Could directly get index from schema but would need to cache the schema
  223. if self.column_value.stringVal:
  224. return self._get_val(self.column_value.stringVal)
  225. elif self.column_value.i16Val is not None:
  226. return self._get_val(self.column_value.i16Val)
  227. elif self.column_value.i32Val is not None:
  228. return self._get_val(self.column_value.i32Val)
  229. elif self.column_value.i64Val is not None:
  230. return self._get_val(self.column_value.i64Val)
  231. elif self.column_value.doubleVal is not None:
  232. return self._get_val(self.column_value.doubleVal)
  233. elif self.column_value.boolVal is not None:
  234. return self._get_val(self.column_value.boolVal)
  235. elif self.column_value.byteVal is not None:
  236. return self._get_val(self.column_value.byteVal)
  237. elif self.column_value.binaryVal is not None:
  238. return self._get_val(self.column_value.binaryVal)
  239. @classmethod
  240. def _get_val(cls, column):
  241. column.values = cls.set_nulls(column.values, column.nulls)
  242. column.nulls = '' # Clear the null values for not re-marking again the column with nulls at the next call
  243. return column.values
  244. @classmethod
  245. def mark_nulls(cls, values, bytestring):
  246. if sys.version_info[0] < 3 or isinstance(bytestring, bytes):
  247. mask = bytearray(bytestring)
  248. else:
  249. mask = bytearray(bytestring, 'utf-8')
  250. for n in mask:
  251. yield n & 0x01
  252. yield n & 0x02
  253. yield n & 0x04
  254. yield n & 0x08
  255. yield n & 0x10
  256. yield n & 0x20
  257. yield n & 0x40
  258. yield n & 0x80
  259. @classmethod
  260. def set_nulls(cls, values, bytestring):
  261. can_decode = True
  262. if sys.version_info[0] == 3 and isinstance(bytestring, bytes):
  263. try:
  264. bytestring = bytestring.decode('utf-8')
  265. except:
  266. can_decode = False
  267. if bytestring == '' or (can_decode and re.match('^(\x00)+$', bytestring)): # HS2 has just \x00 or '', Impala can have \x00\x00...
  268. return values
  269. else:
  270. _values = [None if is_null else value for value, is_null in zip(values, cls.mark_nulls(values, bytestring))]
  271. if len(values) != len(_values): # HS2 can have just \x00\x01 instead of \x00\x01\x00...
  272. _values.extend(values[len(_values):])
  273. return _values
  274. class HiveServerDataTable(DataTable):
  275. def __init__(self, results, schema, operation_handle, query_server):
  276. self.schema = schema and schema.schema
  277. self.row_set = HiveServerTRowSet(results.results, schema)
  278. self.operation_handle = operation_handle
  279. if query_server['server_name'].startswith('impala'):
  280. self.has_more = results.hasMoreRows
  281. else:
  282. self.has_more = not self.row_set.is_empty() # Should be results.hasMoreRows but always True in HS2
  283. self.startRowOffset = self.row_set.startRowOffset # Always 0 in HS2
  284. @property
  285. def ready(self):
  286. return True
  287. def cols(self):
  288. if self.schema:
  289. return [HiveServerTColumnDesc(col) for col in self.schema.columns]
  290. else:
  291. return []
  292. def rows(self):
  293. for row in self.row_set:
  294. yield row.fields()
  295. class HiveServerTTableSchema(object):
  296. def __init__(self, columns, schema):
  297. self.columns = columns
  298. self.schema = schema
  299. def cols(self):
  300. try:
  301. return HiveServerTRowSet(self.columns, self.schema).cols(('col_name', 'data_type', 'comment'))
  302. except:
  303. # Impala API is different
  304. cols = HiveServerTRowSet(self.columns, self.schema).cols(('name', 'type', 'comment'))
  305. for col in cols:
  306. col['col_name'] = col.pop('name')
  307. col['data_type'] = col.pop('type')
  308. return cols
  309. def col(self, colName):
  310. pos = self._get_col_position(colName)
  311. return HiveServerTColumnDesc(self.columns[pos]).val
  312. def _get_col_position(self, column_name):
  313. return list(filter(lambda i_col2: i_col2[1].columnName == column_name, enumerate(self.schema.columns)))[0][0]
  314. if hasattr(beeswax_conf.THRIFT_VERSION, 'get') and beeswax_conf.THRIFT_VERSION.get() >= 7:
  315. HiveServerTRow = HiveServerTRow2
  316. HiveServerTRowSet = HiveServerTRowSet2
  317. else:
  318. # Deprecated. To remove in Hue 4.
  319. class HiveServerTRow(object):
  320. def __init__(self, row, schema):
  321. self.row = row
  322. self.schema = schema
  323. def col(self, colName):
  324. pos = self._get_col_position(colName)
  325. return HiveServerTColumnValue(self.row.colVals[pos]).val
  326. def _get_col_position(self, column_name):
  327. return list(filter(lambda i_col: i_col[1].columnName == column_name, enumerate(self.schema.columns)))[0][0]
  328. def fields(self):
  329. return [HiveServerTColumnValue(field).val for field in self.row.colVals]
  330. class HiveServerTRowSet(object):
  331. def __init__(self, row_set, schema):
  332. self.row_set = row_set
  333. self.rows = row_set.rows
  334. self.schema = schema
  335. self.startRowOffset = row_set.startRowOffset
  336. def is_empty(self):
  337. return len(self.rows) == 0
  338. def cols(self, col_names):
  339. cols_rows = []
  340. for row in self.rows:
  341. row = HiveServerTRow(row, self.schema)
  342. cols = {}
  343. for col_name in col_names:
  344. cols[col_name] = row.col(col_name)
  345. cols_rows.append(cols)
  346. return cols_rows
  347. def __iter__(self):
  348. return self
  349. def __next__(self):
  350. if self.rows:
  351. return HiveServerTRow(self.rows.pop(0), self.schema)
  352. else:
  353. raise StopIteration
  354. class HiveServerTColumnValue(object):
  355. def __init__(self, tcolumn_value):
  356. self.column_value = tcolumn_value
  357. @property
  358. def val(self):
  359. if self.column_value.boolVal is not None:
  360. return self.column_value.boolVal.value
  361. elif self.column_value.byteVal is not None:
  362. return self.column_value.byteVal.value
  363. elif self.column_value.i16Val is not None:
  364. return self.column_value.i16Val.value
  365. elif self.column_value.i32Val is not None:
  366. return self.column_value.i32Val.value
  367. elif self.column_value.i64Val is not None:
  368. return self.column_value.i64Val.value
  369. elif self.column_value.doubleVal is not None:
  370. return self.column_value.doubleVal.value
  371. elif self.column_value.stringVal is not None:
  372. return self.column_value.stringVal.value
  373. class HiveServerTColumnDesc(object):
  374. def __init__(self, column):
  375. self.column = column
  376. @property
  377. def name(self):
  378. return self.column.columnName
  379. @property
  380. def comment(self):
  381. return self.column.comment
  382. @property
  383. def type(self):
  384. return self.get_type(self.column.typeDesc)
  385. @classmethod
  386. def get_type(self, typeDesc):
  387. for ttype in typeDesc.types:
  388. if ttype.primitiveEntry is not None:
  389. return TTypeId._VALUES_TO_NAMES[ttype.primitiveEntry.type]
  390. elif ttype.mapEntry is not None:
  391. return ttype.mapEntry
  392. elif ttype.unionEntry is not None:
  393. return ttype.unionEntry
  394. elif ttype.arrayEntry is not None:
  395. return ttype.arrayEntry
  396. elif ttype.structEntry is not None:
  397. return ttype.structEntry
  398. elif ttype.userDefinedTypeEntry is not None:
  399. return ttype.userDefinedTypeEntry
  400. class HiveServerClient(object):
  401. HS2_MECHANISMS = {
  402. 'KERBEROS': 'GSSAPI',
  403. 'NONE': 'PLAIN',
  404. 'NOSASL': 'NOSASL',
  405. 'LDAP': 'PLAIN',
  406. 'PAM': 'PLAIN',
  407. 'CUSTOM': 'PLAIN',
  408. }
  409. DEFAULT_TABLE_TYPES = [
  410. 'TABLE',
  411. 'VIEW',
  412. 'EXTERNAL_TABLE',
  413. ]
  414. def __init__(self, query_server, user):
  415. self.query_server = query_server
  416. self.user = user
  417. self.coordinator_host = ''
  418. use_sasl, mechanism, kerberos_principal_short_name, impersonation_enabled, auth_username, auth_password = self.get_security()
  419. LOG.info(
  420. '%s: server_host=%s, use_sasl=%s, mechanism=%s, kerberos_principal_short_name=%s, impersonation_enabled=%s, auth_username=%s' % (
  421. self.query_server['server_name'], self.query_server['server_host'], use_sasl, mechanism, kerberos_principal_short_name, impersonation_enabled, auth_username)
  422. )
  423. self.use_sasl = use_sasl
  424. self.kerberos_principal_short_name = kerberos_principal_short_name
  425. self.impersonation_enabled = impersonation_enabled
  426. if self.query_server['server_name'].startswith('impala'):
  427. from impala import conf as impala_conf
  428. ssl_enabled = impala_conf.SSL.ENABLED.get()
  429. ca_certs = impala_conf.SSL.CACERTS.get()
  430. keyfile = impala_conf.SSL.KEY.get()
  431. certfile = impala_conf.SSL.CERT.get()
  432. validate = impala_conf.SSL.VALIDATE.get()
  433. timeout = impala_conf.SERVER_CONN_TIMEOUT.get()
  434. else:
  435. ssl_enabled = hiveserver2_use_ssl()
  436. ca_certs = beeswax_conf.SSL.CACERTS.get()
  437. keyfile = beeswax_conf.SSL.KEY.get()
  438. certfile = beeswax_conf.SSL.CERT.get()
  439. validate = beeswax_conf.SSL.VALIDATE.get()
  440. timeout = beeswax_conf.SERVER_CONN_TIMEOUT.get()
  441. if auth_username:
  442. username = auth_username
  443. password = auth_password
  444. else:
  445. username = user.username
  446. password = None
  447. thrift_class = TCLIService
  448. if self.query_server['server_name'].startswith('impala'):
  449. from ImpalaService import ImpalaHiveServer2Service
  450. thrift_class = ImpalaHiveServer2Service
  451. LOG.debug('Using %s for host_name %s' % (thrift_class, query_server['server_host']))
  452. self._client = thrift_util.get_client(
  453. thrift_class.Client,
  454. query_server['server_host'],
  455. query_server['server_port'],
  456. service_name=query_server['server_name'],
  457. kerberos_principal=kerberos_principal_short_name,
  458. use_sasl=use_sasl,
  459. mechanism=mechanism,
  460. username=username,
  461. password=password,
  462. timeout_seconds=timeout,
  463. use_ssl=ssl_enabled,
  464. ca_certs=ca_certs,
  465. keyfile=keyfile,
  466. certfile=certfile,
  467. validate=validate,
  468. transport_mode=query_server.get('transport_mode', 'socket'),
  469. http_url=query_server.get('http_url', ''),
  470. coordinator_host=self.coordinator_host
  471. )
  472. def get_security(self):
  473. principal = self.query_server['principal']
  474. impersonation_enabled = False
  475. auth_username = self.query_server['auth_username'] # Pass-through LDAP/PAM authentication
  476. auth_password = self.query_server['auth_password']
  477. if principal:
  478. kerberos_principal_short_name = principal.split('/', 1)[0]
  479. else:
  480. kerberos_principal_short_name = None
  481. use_sasl = self.query_server['use_sasl']
  482. if self.query_server['server_name'].startswith('impala'):
  483. if auth_password: # Force LDAP/PAM.. auth if auth_password is provided
  484. mechanism = HiveServerClient.HS2_MECHANISMS['NONE']
  485. else:
  486. mechanism = HiveServerClient.HS2_MECHANISMS['KERBEROS']
  487. impersonation_enabled = self.query_server['impersonation_enabled']
  488. else:
  489. hive_mechanism = hive_site.get_hiveserver2_authentication()
  490. if hive_mechanism not in HiveServerClient.HS2_MECHANISMS:
  491. raise Exception(_('%s server authentication not supported. Valid are %s.') % (hive_mechanism, list(HiveServerClient.HS2_MECHANISMS.keys())))
  492. mechanism = HiveServerClient.HS2_MECHANISMS[hive_mechanism]
  493. impersonation_enabled = hive_site.hiveserver2_impersonation_enabled()
  494. return use_sasl, mechanism, kerberos_principal_short_name, impersonation_enabled, auth_username, auth_password
  495. def open_session(self, user):
  496. self.user = user
  497. kwargs = {
  498. 'client_protocol': beeswax_conf.THRIFT_VERSION.get() - 1,
  499. 'username': user.username, # If SASL or LDAP, it gets the username from the authentication mechanism" since it dependents on it.
  500. 'configuration': {},
  501. }
  502. if self.impersonation_enabled:
  503. kwargs.update({'username': DEFAULT_USER})
  504. if self.query_server['server_name'].startswith('impala'): # Only when Impala accepts it
  505. kwargs['configuration'].update({'impala.doas.user': user.username})
  506. if self.query_server['server_name'] == 'beeswax': # All the time
  507. kwargs['configuration'].update({'hive.server2.proxy.user': user.username})
  508. if self.query_server['server_name'] == 'llap': # All the time
  509. kwargs['configuration'].update({'hive.server2.proxy.user': user.username})
  510. if self.query_server['server_name'] == 'sparksql': # All the time
  511. kwargs['configuration'].update({'hive.server2.proxy.user': user.username})
  512. if self.query_server['server_name'].startswith('impala') and self.query_server['SESSION_TIMEOUT_S'] > 0:
  513. kwargs['configuration'].update({'idle_session_timeout': str(self.query_server['SESSION_TIMEOUT_S'])})
  514. LOG.info('Opening %s thrift session for user %s' % (self.query_server['server_name'], user.username))
  515. req = TOpenSessionReq(**kwargs)
  516. res = self._client.OpenSession(req)
  517. self.coordinator_host = self._client.get_coordinator_host()
  518. if self.coordinator_host:
  519. res.configuration['coordinator_host'] = self.coordinator_host
  520. if res.status is not None and res.status.statusCode not in (TStatusCode.SUCCESS_STATUS,):
  521. if hasattr(res.status, 'errorMessage') and res.status.errorMessage:
  522. message = res.status.errorMessage
  523. else:
  524. message = ''
  525. raise QueryServerException(Exception('Bad status for request %s:\n%s' % (req, res)), message=message)
  526. sessionId = res.sessionHandle.sessionId
  527. LOG.info('Session %s opened' % repr(sessionId.guid))
  528. encoded_status, encoded_guid = HiveServerQueryHandle(secret=sessionId.secret, guid=sessionId.guid).get()
  529. properties = json.dumps(res.configuration)
  530. session = Session.objects.create(
  531. owner=user,
  532. application=self.query_server['server_name'],
  533. status_code=res.status.statusCode,
  534. secret=encoded_status,
  535. guid=encoded_guid,
  536. server_protocol_version=res.serverProtocolVersion,
  537. properties=properties
  538. )
  539. # HS2 does not return properties in TOpenSessionResp
  540. # TEZ returns properties, but we need the configuration to detect engine
  541. properties = session.get_properties()
  542. if not properties or self.query_server['server_name'] == 'beeswax':
  543. configuration = self.get_configuration()
  544. properties.update(configuration)
  545. session.properties = json.dumps(properties)
  546. session.save()
  547. return session
  548. def call(self, fn, req, status=TStatusCode.SUCCESS_STATUS, with_multiple_session=False): # Note: with_multiple_session currently ignored
  549. (res, session) = self.call_return_result_and_session(fn, req, status, with_multiple_session)
  550. return res
  551. def call_return_result_and_session(self, fn, req, status=TStatusCode.SUCCESS_STATUS, with_multiple_session=False):
  552. n_sessions = conf.MAX_NUMBER_OF_SESSIONS.get()
  553. # When a single session is allowed, avoid multiple session logic
  554. with_multiple_session = n_sessions > 1
  555. session = None
  556. if not with_multiple_session:
  557. # Default behaviour: get one session
  558. session = Session.objects.get_session(self.user, self.query_server['server_name'])
  559. else:
  560. session = self._get_tez_session(n_sessions)
  561. if session is None:
  562. session = self.open_session(self.user)
  563. if hasattr(req, 'sessionHandle') and req.sessionHandle is None:
  564. req.sessionHandle = session.get_handle()
  565. res = fn(req)
  566. # Not supported currently in HS2 and Impala: TStatusCode.INVALID_HANDLE_STATUS
  567. if res.status.statusCode == TStatusCode.ERROR_STATUS and \
  568. re.search('Invalid SessionHandle|Invalid session|Client session expired', res.status.errorMessage or '', re.I):
  569. LOG.info('Retrying with a new session because for %s of %s' % (self.user, res))
  570. session.status_code = TStatusCode.INVALID_HANDLE_STATUS
  571. session.save()
  572. session = self.open_session(self.user)
  573. req.sessionHandle = session.get_handle()
  574. # Get back the name of the function to call
  575. res = getattr(self._client, fn.attr)(req)
  576. if status is not None and res.status.statusCode not in (
  577. TStatusCode.SUCCESS_STATUS, TStatusCode.SUCCESS_WITH_INFO_STATUS, TStatusCode.STILL_EXECUTING_STATUS):
  578. if hasattr(res.status, 'errorMessage') and res.status.errorMessage:
  579. message = res.status.errorMessage
  580. else:
  581. message = ''
  582. raise QueryServerException(Exception('Bad status for request %s:\n%s' % (req, res)), message=message)
  583. else:
  584. return (res, session)
  585. def _get_tez_session(self, n_sessions):
  586. # Get 2 + n_sessions sessions and filter out the busy ones
  587. sessions = Session.objects.get_n_sessions(self.user, n=2 + n_sessions, application=self.query_server['server_name'])
  588. LOG.debug('%s sessions found' % len(sessions))
  589. if sessions:
  590. # Include trashed documents to keep the query lazy
  591. # and avoid retrieving all documents
  592. docs = Document2.objects.get_history(doc_type='query-hive', user=self.user, include_trashed=True)
  593. busy_sessions = set()
  594. # Only check last 40 documents for performance
  595. for doc in docs[:40]:
  596. try:
  597. snippet_data = json.loads(doc.data)['snippets'][0]
  598. except (KeyError, IndexError):
  599. # data might not contain a 'snippets' field or it might be empty
  600. LOG.warn('No snippets in Document2 object of type query-hive')
  601. continue
  602. session_guid = snippet_data.get('result', {}).get('handle', {}).get('session_guid')
  603. status = snippet_data.get('status')
  604. if status in [str(QueryHistory.STATE.submitted), str(QueryHistory.STATE.running)]:
  605. if session_guid is not None and session_guid not in busy_sessions:
  606. busy_sessions.add(session_guid)
  607. n_busy_sessions = 0
  608. available_sessions = []
  609. for session in sessions:
  610. if session.guid not in busy_sessions:
  611. available_sessions.append(session)
  612. else:
  613. n_busy_sessions += 1
  614. if n_busy_sessions == n_sessions:
  615. raise Exception('Too many open sessions. Stop a running query before starting a new one')
  616. if available_sessions:
  617. session = available_sessions[0]
  618. else:
  619. session = None # No available session found
  620. return session
  621. def close_session(self, sessionHandle):
  622. req = TCloseSessionReq(sessionHandle=sessionHandle)
  623. return self._client.CloseSession(req)
  624. def get_databases(self, schemaName=None):
  625. # GetCatalogs() is not implemented in HS2
  626. req = TGetSchemasReq()
  627. if schemaName is not None:
  628. req.schemaName = schemaName
  629. if self.query_server['server_name'].startswith('impala'):
  630. req.schemaName = None
  631. res = self.call(self._client.GetSchemas, req)
  632. results, schema = self.fetch_result(res.operationHandle, orientation=TFetchOrientation.FETCH_NEXT, max_rows=5000)
  633. self.close_operation(res.operationHandle)
  634. col = 'TABLE_SCHEM'
  635. return HiveServerTRowSet(results.results, schema.schema).cols((col,))
  636. def get_database(self, database):
  637. query = 'DESCRIBE DATABASE EXTENDED `%s`' % (database)
  638. (desc_results, desc_schema), operation_handle = self.execute_statement(query, max_rows=5000, orientation=TFetchOrientation.FETCH_NEXT)
  639. self.close_operation(operation_handle)
  640. if self.query_server['server_name'].startswith('impala'):
  641. cols = ('name', 'location', 'comment') # Skip owner as on a new line
  642. else:
  643. cols = ('db_name', 'comment', 'location', 'owner_name', 'owner_type', 'parameters')
  644. # try:
  645. # if len(HiveServerTRowSet(desc_results.results, desc_schema.schema).cols(cols)) != 1:
  646. # raise ValueError(_("%(query)s returned more than 1 row") % {'query': query})
  647. # except Exception, e:
  648. # print e
  649. # raise e
  650. return HiveServerTRowSet(desc_results.results, desc_schema.schema).cols(cols)[0] # Should only contain one row
  651. def get_tables_meta(self, database, table_names, table_types=None):
  652. if not table_types:
  653. table_types = self.DEFAULT_TABLE_TYPES
  654. req = TGetTablesReq(schemaName=database, tableName=table_names, tableTypes=table_types)
  655. res = self.call(self._client.GetTables, req)
  656. results, schema = self.fetch_result(res.operationHandle, orientation=TFetchOrientation.FETCH_NEXT, max_rows=5000)
  657. self.close_operation(res.operationHandle)
  658. cols = ('TABLE_NAME', 'TABLE_TYPE', 'REMARKS')
  659. return HiveServerTRowSet(results.results, schema.schema).cols(cols)
  660. def get_tables(self, database, table_names, table_types=None):
  661. if not table_types:
  662. table_types = self.DEFAULT_TABLE_TYPES
  663. req = TGetTablesReq(schemaName=database, tableName=table_names, tableTypes=table_types)
  664. res = self.call(self._client.GetTables, req)
  665. results, schema = self.fetch_result(res.operationHandle, orientation=TFetchOrientation.FETCH_NEXT, max_rows=5000)
  666. self.close_operation(res.operationHandle)
  667. return HiveServerTRowSet(results.results, schema.schema).cols(('TABLE_NAME',))
  668. def get_table(self, database, table_name, partition_spec=None):
  669. req = TGetTablesReq(schemaName=database.lower(), tableName=table_name.lower()) # Impala returns empty if not lower case
  670. res = self.call(self._client.GetTables, req)
  671. table_results, table_schema = self.fetch_result(res.operationHandle, orientation=TFetchOrientation.FETCH_NEXT)
  672. self.close_operation(res.operationHandle)
  673. if partition_spec:
  674. query = 'DESCRIBE FORMATTED `%s`.`%s` PARTITION(%s)' % (database, table_name, partition_spec)
  675. else:
  676. query = 'DESCRIBE FORMATTED `%s`.`%s`' % (database, table_name)
  677. try:
  678. (desc_results, desc_schema), operation_handle = self.execute_statement(query, max_rows=10000, orientation=TFetchOrientation.FETCH_NEXT)
  679. self.close_operation(operation_handle)
  680. except Exception as e:
  681. ex_string = str(e)
  682. if 'cannot find field' in ex_string: # Workaround until Hive 2.0 and HUE-3751
  683. (desc_results, desc_schema), operation_handle = self.execute_statement('USE `%s`' % database)
  684. self.close_operation(operation_handle)
  685. if partition_spec:
  686. query = 'DESCRIBE FORMATTED `%s` PARTITION(%s)' % (table_name, partition_spec)
  687. else:
  688. query = 'DESCRIBE FORMATTED `%s`' % table_name
  689. (desc_results, desc_schema), operation_handle = self.execute_statement(query, max_rows=10000, orientation=TFetchOrientation.FETCH_NEXT)
  690. self.close_operation(operation_handle)
  691. elif 'not have privileges for DESCTABLE' in ex_string or 'AuthorizationException' in ex_string: # HUE-5608: No table permission but some column permissions
  692. query = 'DESCRIBE `%s`.`%s`' % (database, table_name)
  693. (desc_results, desc_schema), operation_handle = self.execute_statement(query, max_rows=10000, orientation=TFetchOrientation.FETCH_NEXT)
  694. self.close_operation(operation_handle)
  695. desc_results.results.columns[0].stringVal.values.insert(0, '# col_name')
  696. desc_results.results.columns[0].stringVal.values.insert(1, '')
  697. desc_results.results.columns[1].stringVal.values.insert(0, 'data_type')
  698. desc_results.results.columns[1].stringVal.values.insert(1, None)
  699. desc_results.results.columns[2].stringVal.values.insert(0, 'comment')
  700. desc_results.results.columns[2].stringVal.values.insert(1, None)
  701. try:
  702. part_index = desc_results.results.columns[0].stringVal.values.index('# Partition Information')
  703. desc_results.results.columns[0].stringVal.values = desc_results.results.columns[0].stringVal.values[:part_index] # Strip duplicate columns of partitioned tables
  704. desc_results.results.columns[1].stringVal.values = desc_results.results.columns[1].stringVal.values[:part_index]
  705. desc_results.results.columns[2].stringVal.values = desc_results.results.columns[2].stringVal.values[:part_index]
  706. desc_results.results.columns[1].stringVal.nulls = '' # Important to not clear the last two types
  707. desc_results.results.columns[1].stringVal.values[-1] = None
  708. desc_results.results.columns[2].stringVal.values[-1] = None
  709. except ValueError:
  710. desc_results.results.columns[0].stringVal.values.append('')
  711. desc_results.results.columns[1].stringVal.values.append(None)
  712. desc_results.results.columns[2].stringVal.values.append(None)
  713. else:
  714. raise e
  715. return HiveServerTable(table_results.results, table_schema.schema, desc_results.results, desc_schema.schema)
  716. def execute_query(self, query, max_rows=1000):
  717. configuration = self._get_query_configuration(query)
  718. return self.execute_query_statement(statement=query.query['query'], max_rows=max_rows, configuration=configuration)
  719. def execute_query_statement(self, statement, max_rows=1000, configuration={}, orientation=TFetchOrientation.FETCH_FIRST,
  720. close_operation=False):
  721. (results, schema), operation_handle = self.execute_statement(statement=statement, max_rows=max_rows, configuration=configuration, orientation=orientation)
  722. if close_operation:
  723. self.close_operation(operation_handle)
  724. return HiveServerDataTable(results, schema, operation_handle, self.query_server)
  725. def execute_async_query(self, query, statement=0, with_multiple_session=False):
  726. if statement == 0:
  727. # Impala just has settings currently
  728. if self.query_server['server_name'] == 'beeswax':
  729. for resource in query.get_configuration_statements():
  730. self.execute_statement(resource.strip())
  731. configuration = {}
  732. if self.query_server['server_name'].startswith('impala') and self.query_server['querycache_rows'] > 0:
  733. configuration[IMPALA_RESULTSET_CACHE_SIZE] = str(self.query_server['querycache_rows'])
  734. # The query can override the default configuration
  735. configuration.update(self._get_query_configuration(query))
  736. query_statement = query.get_query_statement(statement)
  737. return self.execute_async_statement(statement=query_statement, confOverlay=configuration, with_multiple_session=with_multiple_session)
  738. def execute_statement(self, statement, max_rows=1000, configuration={}, orientation=TFetchOrientation.FETCH_NEXT):
  739. if self.query_server['server_name'].startswith('impala') and self.query_server['QUERY_TIMEOUT_S'] > 0:
  740. configuration['QUERY_TIMEOUT_S'] = str(self.query_server['QUERY_TIMEOUT_S'])
  741. req = TExecuteStatementReq(statement=statement.encode('utf-8'), confOverlay=configuration)
  742. res = self.call(self._client.ExecuteStatement, req)
  743. return self.fetch_result(res.operationHandle, max_rows=max_rows, orientation=orientation), res.operationHandle
  744. def execute_async_statement(self, statement, confOverlay, with_multiple_session=False):
  745. if self.query_server['server_name'].startswith('impala') and self.query_server['QUERY_TIMEOUT_S'] > 0:
  746. confOverlay['QUERY_TIMEOUT_S'] = str(self.query_server['QUERY_TIMEOUT_S'])
  747. req = TExecuteStatementReq(statement=statement.encode('utf-8'), confOverlay=confOverlay, runAsync=True)
  748. (res, session) = self.call_return_result_and_session(self._client.ExecuteStatement, req, with_multiple_session=with_multiple_session)
  749. return HiveServerQueryHandle(
  750. secret=res.operationHandle.operationId.secret,
  751. guid=res.operationHandle.operationId.guid,
  752. operation_type=res.operationHandle.operationType,
  753. has_result_set=res.operationHandle.hasResultSet,
  754. modified_row_count=res.operationHandle.modifiedRowCount,
  755. session_guid=session.guid
  756. )
  757. def fetch_data(self, operation_handle, orientation=TFetchOrientation.FETCH_NEXT, max_rows=1000):
  758. # Fetch until the result is empty dues to a HS2 bug instead of looking at hasMoreRows
  759. results, schema = self.fetch_result(operation_handle, orientation, max_rows)
  760. return HiveServerDataTable(results, schema, operation_handle, self.query_server)
  761. def cancel_operation(self, operation_handle):
  762. req = TCancelOperationReq(operationHandle=operation_handle)
  763. return self.call(self._client.CancelOperation, req)
  764. def close_operation(self, operation_handle):
  765. req = TCloseOperationReq(operationHandle=operation_handle)
  766. return self.call(self._client.CloseOperation, req)
  767. def get_columns(self, database, table):
  768. req = TGetColumnsReq(schemaName=database, tableName=table)
  769. res = self.call(self._client.GetColumns, req)
  770. res, schema = self.fetch_result(res.operationHandle, orientation=TFetchOrientation.FETCH_NEXT)
  771. self.close_operation(res.operationHandle)
  772. return res, schema
  773. def fetch_result(self, operation_handle, orientation=TFetchOrientation.FETCH_FIRST, max_rows=1000):
  774. if operation_handle.hasResultSet:
  775. fetch_req = TFetchResultsReq(operationHandle=operation_handle, orientation=orientation, maxRows=max_rows)
  776. res = self.call(self._client.FetchResults, fetch_req)
  777. else:
  778. res = TFetchResultsResp(results=TRowSet(startRowOffset=0, rows=[], columns=[]))
  779. if operation_handle.hasResultSet and TFetchOrientation.FETCH_FIRST: # Only fetch for the first call that should be with start_over
  780. meta_req = TGetResultSetMetadataReq(operationHandle=operation_handle)
  781. schema = self.call(self._client.GetResultSetMetadata, meta_req)
  782. else:
  783. schema = None
  784. return res, schema
  785. def fetch_log(self, operation_handle, orientation=TFetchOrientation.FETCH_NEXT, max_rows=1000):
  786. req = TFetchResultsReq(operationHandle=operation_handle, orientation=orientation, maxRows=max_rows, fetchType=1)
  787. res = self.call(self._client.FetchResults, req)
  788. if beeswax_conf.THRIFT_VERSION.get() >= 7:
  789. lines = res.results.columns[0].stringVal.values
  790. else:
  791. lines = list(map(lambda r: r.colVals[0].stringVal.value, res.results.rows))
  792. return '\n'.join(lines)
  793. def get_operation_status(self, operation_handle):
  794. req = TGetOperationStatusReq(operationHandle=operation_handle)
  795. return self.call(self._client.GetOperationStatus, req)
  796. def explain(self, query):
  797. query_statement = query.get_query_statement(0)
  798. configuration = self._get_query_configuration(query)
  799. return self.execute_query_statement(statement='EXPLAIN %s' % query_statement, configuration=configuration, orientation=TFetchOrientation.FETCH_NEXT)
  800. def get_log(self, operation_handle):
  801. try:
  802. req = TGetLogReq(operationHandle=operation_handle)
  803. res = self.call(self._client.GetLog, req)
  804. return res.log
  805. except Exception as e:
  806. if 'Invalid query handle' in str(e):
  807. message = 'Invalid query handle'
  808. LOG.error('%s: %s' % (message, e))
  809. else:
  810. message = 'Error when fetching the logs of the operation.'
  811. LOG.exception(message)
  812. return message
  813. def get_partitions(self, database, table_name, partition_spec=None, max_parts=None, reverse_sort=True):
  814. table = self.get_table(database, table_name)
  815. query = 'SHOW PARTITIONS `%s`.`%s`' % (database, table_name)
  816. if self.query_server['server_name'] == 'beeswax' and partition_spec:
  817. query += ' PARTITION(%s)' % partition_spec
  818. # We fetch N partitions then reverse the order later and get the max_parts. Use partition_spec to refine more the initial list.
  819. # Need to fetch more like this until SHOW PARTITIONS offers a LIMIT and ORDER BY
  820. partition_table = self.execute_query_statement(query, max_rows=10000, orientation=TFetchOrientation.FETCH_NEXT, close_operation=True)
  821. if self.query_server['server_name'].startswith('impala'):
  822. try:
  823. # Fetch all partition key names, which are listed before the #Rows column
  824. cols = [col.name for col in partition_table.cols()]
  825. stop = cols.index('#Rows')
  826. partition_keys = cols[:stop]
  827. num_parts = len(partition_keys)
  828. # Get all partition values
  829. rows = partition_table.rows()
  830. partition_values = [partition[:num_parts] for partition in rows]
  831. # Truncate last row which is the Total
  832. partition_values = partition_values[:-1]
  833. partitions_formatted = []
  834. # Format partition key and values into Hive format: [key1=val1/key2=value2]
  835. for values in partition_values:
  836. zipped_parts = zip(partition_keys, values)
  837. partitions_formatted.append(['/'.join(['%s=%s' % (str(part[0]), str(part[1])) for part in zipped_parts if all(part)])])
  838. partitions = [PartitionValueCompatible(partition, table) for partition in partitions_formatted]
  839. except Exception:
  840. raise ValueError(_('Failed to determine partition keys for Impala table: `%s`.`%s`') % (database, table_name))
  841. else:
  842. partitions = [PartitionValueCompatible(partition, table) for partition in partition_table.rows()]
  843. if reverse_sort:
  844. partitions.reverse()
  845. if max_parts is None or max_parts <= 0:
  846. max_parts = LIST_PARTITIONS_LIMIT.get()
  847. return partitions[:max_parts]
  848. def get_configuration(self):
  849. configuration = {}
  850. if self.query_server['server_name'].startswith('impala'): # Return all configuration settings
  851. query = 'SET'
  852. results = self.execute_query_statement(query, orientation=TFetchOrientation.FETCH_NEXT, close_operation=True)
  853. configuration = dict((row[0], row[1]) for row in results.rows())
  854. else: # For Hive, only return white-listed configurations
  855. query = 'SET -v'
  856. results = self.execute_query_statement(query, orientation=TFetchOrientation.FETCH_FIRST, max_rows=-1, close_operation=True)
  857. config_whitelist = [config.lower() for config in CONFIG_WHITELIST.get()]
  858. properties = [(row[0].split('=')[0], row[0].split('=')[1]) for row in results.rows() if '=' in row[0]]
  859. configuration = dict((prop, value) for prop, value in properties if prop.lower() in config_whitelist)
  860. return configuration
  861. def _get_query_configuration(self, query):
  862. return dict([(setting['key'], setting['value']) for setting in query.settings])
  863. class HiveServerTableCompatible(HiveServerTable):
  864. """Same API as Beeswax"""
  865. def __init__(self, hive_table):
  866. self.table = hive_table.table
  867. self.table_schema = hive_table.table_schema
  868. self.desc_results = hive_table.desc_results
  869. self.desc_schema = hive_table.desc_schema
  870. self.describe = HiveServerTTableSchema(self.desc_results, self.desc_schema).cols()
  871. self._details = None
  872. try:
  873. self.is_impala_only = 'org.apache.kudu.mapreduce.KuduTableOutputFormat' in str(hive_table.properties)
  874. except Exception as e:
  875. LOG.warn('Autocomplete data fetching error: %s' % e)
  876. self.is_impala_only = False
  877. @property
  878. def cols(self):
  879. return [
  880. type('Col', (object,), {
  881. 'name': col.get('col_name', '').strip() if col.get('col_name') else '',
  882. 'type': col.get('data_type', '').strip() if col.get('data_type') else '',
  883. 'comment': col.get('comment', '').strip() if col.get('comment') else ''
  884. }) for col in HiveServerTable.cols.fget(self)
  885. ]
  886. class ResultCompatible(object):
  887. def __init__(self, data_table):
  888. self.data_table = data_table
  889. self.rows = data_table.rows
  890. self.has_more = data_table.has_more
  891. self.start_row = data_table.startRowOffset
  892. self.ready = True
  893. @property
  894. def columns(self):
  895. return self.cols()
  896. def cols(self):
  897. return [col.name for col in self.data_table.cols()]
  898. def full_cols(self):
  899. return [{'name': col.name, 'type': col.type, 'comment': col.comment} for col in self.data_table.cols()]
  900. class PartitionKeyCompatible(object):
  901. def __init__(self, name, type, comment):
  902. self.name = name
  903. self.type = type
  904. self.comment = comment
  905. def __eq__(self, other):
  906. return isinstance(other, PartitionKeyCompatible) and \
  907. self.name == other.name and \
  908. self.type == other.type and \
  909. self.comment == other.comment
  910. def __repr__(self):
  911. return 'PartitionKey(name:%s, type:%s, comment:%s)' % (self.name, self.type, self.comment)
  912. class PartitionValueCompatible(object):
  913. def __init__(self, partition_row, table, properties=None):
  914. self.partition_keys = table.partition_keys
  915. if properties is None:
  916. properties = {}
  917. # Parses: ['datehour=2013022516'] or ['month=2011-07/dt=2011-07-01/hr=12']
  918. partition = partition_row[0]
  919. parts = partition.split('/')
  920. self.partition_spec = ','.join([self._get_partition_spec(pv[0], pv[1]) for pv in [part.split('=') for part in parts]])
  921. self.values = [pv[1] for pv in [part.split('=') for part in parts]]
  922. self.sd = type('Sd', (object,), properties,)
  923. def __repr__(self):
  924. return 'PartitionValueCompatible(spec:%s, values:%s, sd:%s)' % (self.partition_spec, self.values, self.sd)
  925. def _get_partition_spec(self, name, value):
  926. partition_spec = "`%s`='%s'" % (name, value)
  927. partition_key = next((key for key in self.partition_keys if key.name == name), None)
  928. if partition_key and partition_key.type.upper() not in ('STRING', 'CHAR', 'VARCHAR', 'TIMESTAMP', 'DATE'):
  929. partition_spec = "`%s`=%s" % (name, value)
  930. return partition_spec
  931. class ExplainCompatible(object):
  932. def __init__(self, data_table):
  933. self.textual = '\n'.join([line[0] for line in data_table.rows()])
  934. class ResultMetaCompatible(object):
  935. def __init__(self):
  936. self.in_tablename = True
  937. class HiveServerClientCompatible(object):
  938. """Same API as Beeswax"""
  939. def __init__(self, client):
  940. self._client = client
  941. self.user = client.user
  942. self.query_server = client.query_server
  943. def query(self, query, statement=0, with_multiple_session=False):
  944. return self._client.execute_async_query(query, statement, with_multiple_session=with_multiple_session)
  945. def get_state(self, handle):
  946. operationHandle = handle.get_rpc_handle()
  947. res = self._client.get_operation_status(operationHandle)
  948. return HiveServerQueryHistory.STATE_MAP[res.operationState]
  949. def get_operation_status(self, handle):
  950. operationHandle = handle.get_rpc_handle()
  951. return self._client.get_operation_status(operationHandle)
  952. def use(self, query):
  953. data = self._client.execute_query(query)
  954. self._client.close_operation(data.operation_handle)
  955. return data
  956. def explain(self, query):
  957. data_table = self._client.explain(query)
  958. data = ExplainCompatible(data_table)
  959. self._client.close_operation(data_table.operation_handle)
  960. return data
  961. def fetch(self, handle, start_over=False, max_rows=None):
  962. operationHandle = handle.get_rpc_handle()
  963. if max_rows is None:
  964. max_rows = 1000
  965. if start_over and not (self.query_server['server_name'].startswith('impala') and self.query_server['querycache_rows'] == 0): # Backward compatibility for impala
  966. orientation = TFetchOrientation.FETCH_FIRST
  967. else:
  968. orientation = TFetchOrientation.FETCH_NEXT
  969. data_table = self._client.fetch_data(operationHandle, orientation=orientation, max_rows=max_rows)
  970. return ResultCompatible(data_table)
  971. def cancel_operation(self, handle):
  972. operationHandle = handle.get_rpc_handle()
  973. return self._client.cancel_operation(operationHandle)
  974. def close(self, handle):
  975. return self.close_operation(handle)
  976. def close_operation(self, handle):
  977. operationHandle = handle.get_rpc_handle()
  978. return self._client.close_operation(operationHandle)
  979. def close_session(self, session):
  980. operationHandle = session.get_handle()
  981. return self._client.close_session(operationHandle)
  982. def dump_config(self):
  983. return 'Does not exist in HS2'
  984. def get_log(self, handle, start_over=True):
  985. operationHandle = handle.get_rpc_handle()
  986. if beeswax_conf.USE_GET_LOG_API.get() or self.query_server['server_name'].startswith('impala'):
  987. return self._client.get_log(operationHandle)
  988. else:
  989. if start_over:
  990. orientation = TFetchOrientation.FETCH_FIRST
  991. else:
  992. orientation = TFetchOrientation.FETCH_NEXT
  993. return self._client.fetch_log(operationHandle, orientation=orientation, max_rows=-1)
  994. def get_databases(self, schemaName=None):
  995. col = 'TABLE_SCHEM'
  996. return [table[col] for table in self._client.get_databases(schemaName)]
  997. def get_database(self, database):
  998. return self._client.get_database(database)
  999. def get_tables_meta(self, database, table_names, table_types=None):
  1000. tables = self._client.get_tables_meta(database, table_names, table_types)
  1001. massaged_tables = []
  1002. for table in tables:
  1003. massaged_tables.append({
  1004. 'name': table['TABLE_NAME'],
  1005. 'comment': table['REMARKS'],
  1006. 'type': table['TABLE_TYPE'].capitalize()}
  1007. )
  1008. return massaged_tables
  1009. def get_tables(self, database, table_names, table_types=None):
  1010. tables = [table['TABLE_NAME'] for table in self._client.get_tables(database, table_names, table_types)]
  1011. tables.sort()
  1012. return tables
  1013. def get_table(self, database, table_name, partition_spec=None):
  1014. table = self._client.get_table(database, table_name, partition_spec)
  1015. return HiveServerTableCompatible(table)
  1016. def get_columns(self, database, table):
  1017. return self._client.get_columns(database, table)
  1018. def get_default_configuration(self, *args, **kwargs):
  1019. return []
  1020. def get_results_metadata(self, handle):
  1021. # We just need to mock
  1022. return ResultMetaCompatible()
  1023. def create_database(self, name, description): raise NotImplementedError()
  1024. def alter_table(self, dbname, tbl_name, new_tbl): raise NotImplementedError()
  1025. def open_session(self, user):
  1026. return self._client.open_session(user)
  1027. def add_partition(self, new_part): raise NotImplementedError()
  1028. def get_partition(self, *args, **kwargs): raise NotImplementedError()
  1029. def get_partitions(self, database, table_name, partition_spec, max_parts, reverse_sort=True):
  1030. return self._client.get_partitions(database, table_name, partition_spec, max_parts, reverse_sort)
  1031. def alter_partition(self, db_name, tbl_name, new_part): raise NotImplementedError()
  1032. def get_configuration(self):
  1033. return self._client.get_configuration()