models.py 7.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298
  1. #!/usr/bin/env python
  2. # Licensed to Cloudera, Inc. under one
  3. # or more contributor license agreements. See the NOTICE file
  4. # distributed with this work for additional information
  5. # regarding copyright ownership. Cloudera, Inc. licenses this file
  6. # to you under the Apache License, Version 2.0 (the
  7. # "License"); you may not use this file except in compliance
  8. # with the License. You may obtain a copy of the License at
  9. #
  10. # http://www.apache.org/licenses/LICENSE-2.0
  11. #
  12. # Unless required by applicable law or agreed to in writing, software
  13. # distributed under the License is distributed on an "AS IS" BASIS,
  14. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. # See the License for the specific language governing permissions and
  16. # limitations under the License.
  17. import json
  18. import re
  19. from desktop.lib.i18n import smart_str, force_unicode
  20. from desktop.lib.rest.http_client import RestException
  21. from beeswax import models as beeswax_models
  22. from beeswax.design import hql_query
  23. from beeswax.models import QUERY_TYPES, HiveServerQueryHandle, QueryHistory
  24. from beeswax.views import safe_get_design, save_design
  25. from beeswax.server import dbms
  26. from beeswax.server.dbms import get_query_server_config, QueryServerException
  27. from spark.job_server_api import get_api as get_spark_api
  28. # To move to Editor API
  29. class SessionExpired(Exception):
  30. pass
  31. class QueryExpired(Exception):
  32. pass
  33. class QueryError(Exception):
  34. def __init__(self, message):
  35. self.message = message
  36. def __str__(self):
  37. return force_unicode(str(self.message))
  38. class Notebook():
  39. def __init__(self, document=None):
  40. self.document = None
  41. if document is not None:
  42. self.data = document.data
  43. self.document = document
  44. else:
  45. self.data = json.dumps({
  46. 'name': 'My Notebook',
  47. 'snippets': [{'type': 'scala', 'result': {}}]
  48. })
  49. def get_json(self):
  50. _data = self.get_data()
  51. return json.dumps(_data)
  52. def get_data(self):
  53. _data = json.loads(self.data)
  54. if self.document is not None:
  55. _data['id'] = self.document.id
  56. return _data
  57. def get_api(user, snippet):
  58. if snippet['type'] in ('hive', 'impala', 'spark-sql'):
  59. return HS2Api(user)
  60. elif snippet['type'] == 'text':
  61. return TextApi(user)
  62. else:
  63. return SparkApi(user)
  64. def _get_snippet_session(notebook, snippet):
  65. return [session for session in notebook['sessions'] if session['type'] == snippet['type']][0]
  66. class TextApi():
  67. def __init__(self, user):
  68. self.user = user
  69. def create_session(self, lang):
  70. return {
  71. 'type': lang,
  72. 'id': None
  73. }
  74. # HS2
  75. def query_error_handler(func):
  76. def decorator(*args, **kwargs):
  77. try:
  78. return func(*args, **kwargs)
  79. except QueryServerException, e:
  80. message = force_unicode(str(e))
  81. if 'Invalid query handle' in message or 'Invalid OperationHandle' in message:
  82. raise QueryExpired(e)
  83. else:
  84. raise QueryError(message)
  85. return decorator
  86. class HS2Api():
  87. def __init__(self, user):
  88. self.user = user
  89. def _get_handle(self, snippet):
  90. snippet['result']['handle']['secret'], snippet['result']['handle']['guid'] = HiveServerQueryHandle.get_decoded(snippet['result']['handle']['secret'], snippet['result']['handle']['guid'])
  91. return HiveServerQueryHandle(**snippet['result']['handle'])
  92. def _get_db(self, snippet):
  93. if snippet['type'] == 'hive':
  94. name = 'beeswax'
  95. elif snippet['type'] == 'impala':
  96. name = 'impala'
  97. else:
  98. name = 'spark-sql'
  99. return dbms.get(self.user, query_server=get_query_server_config(name=name))
  100. def create_session(self, lang):
  101. return {
  102. 'type': lang,
  103. 'id': None # Real one at some point
  104. }
  105. def execute(self, notebook, snippet):
  106. db = self._get_db(snippet)
  107. query = hql_query(snippet['statement'], QUERY_TYPES[0])
  108. try:
  109. handle = db.client.query(query)
  110. except QueryServerException, ex:
  111. raise QueryError(ex.message)
  112. # All good
  113. server_id, server_guid = handle.get()
  114. return {
  115. 'secret': server_id,
  116. 'guid': server_guid,
  117. 'operation_type': handle.operation_type,
  118. 'has_result_set': handle.has_result_set,
  119. 'modified_row_count': handle.modified_row_count,
  120. 'log_context': handle.log_context
  121. }
  122. @query_error_handler
  123. def check_status(self, notebook, snippet):
  124. db = self._get_db(snippet)
  125. handle = self._get_handle(snippet)
  126. status = db.get_state(handle)
  127. return {
  128. 'status':
  129. 'running' if status.index in (QueryHistory.STATE.running.index, QueryHistory.STATE.submitted.index)
  130. else (
  131. 'failed' if status.index in (QueryHistory.STATE.failed.index, QueryHistory.STATE.expired.index)
  132. else 'available'
  133. )
  134. }
  135. @query_error_handler
  136. def fetch_result(self, notebook, snippet, rows, start_over):
  137. db = self._get_db(snippet)
  138. handle = self._get_handle(snippet)
  139. results = db.fetch(handle, start_over=start_over, rows=rows)
  140. # No escaping...
  141. return {
  142. 'has_more': results.has_more,
  143. 'data': list(results.rows()),
  144. 'meta': [{
  145. 'name': column.name,
  146. 'type': column.type,
  147. 'comment': column.comment
  148. } for column in results.data_table.cols()]
  149. }
  150. @query_error_handler
  151. def fetch_result_metadata(self):
  152. pass
  153. @query_error_handler
  154. def cancel(self, notebook, snippet):
  155. db = self._get_db(snippet)
  156. handle = self._get_handle(snippet)
  157. db.cancel_operation(handle)
  158. return {'status': 'canceled'}
  159. @query_error_handler
  160. def get_log(self, snippet):
  161. db = self._get_db(snippet)
  162. handle = self._get_handle(snippet)
  163. return db.get_log(handle)
  164. def _progress(self, snippet, logs):
  165. if snippet['type'] == 'hive':
  166. match = re.search('Total jobs = (\d+)', logs, re.MULTILINE)
  167. total = (int(match.group(1)) if match else 1) * 2
  168. started = logs.count('Starting Job')
  169. ended = logs.count('Ended Job')
  170. return int((started + ended) * 100 / total)
  171. elif snippet['type'] == 'impala':
  172. match = re.search('(\d+)% Complete', logs, re.MULTILINE)
  173. return int(match.group(1)) if match else 0
  174. else:
  175. return 50
  176. # Spark
  177. class SparkApi():
  178. def __init__(self, user):
  179. self.user = user
  180. def create_session(self, lang='scala'):
  181. api = get_spark_api(self.user)
  182. return {
  183. 'type': lang,
  184. 'id': api.create_session(lang=lang)
  185. }
  186. def execute(self, notebook, snippet):
  187. api = get_spark_api(self.user)
  188. session = _get_snippet_session(notebook, snippet)
  189. try:
  190. return {'id': api.submit_statement(session['id'], snippet['statement']).split('cells/')[1]}
  191. except Exception, e:
  192. message = force_unicode(str(e))
  193. if 'session not found' in message:
  194. raise SessionExpired(e)
  195. else:
  196. raise e
  197. def check_status(self, notebook, snippet):
  198. try:
  199. return {'status': 'available'}
  200. except Exception, e:
  201. message = force_unicode(str(e))
  202. if 'session not found' in message:
  203. raise SessionExpired(e)
  204. else:
  205. raise e
  206. def fetch_result(self, notebook, snippet, rows, start_over):
  207. api = get_spark_api(self.user)
  208. session = _get_snippet_session(notebook, snippet)
  209. cell = snippet['result']['handle']['id']
  210. try:
  211. data = api.fetch_data(session['id'], cell)
  212. except Exception, e:
  213. message = force_unicode(str(e))
  214. if 'session not found' in message:
  215. raise SessionExpired(e)
  216. else:
  217. raise e
  218. return {
  219. 'data': [data['output']] if start_over else [], # start_over not supported
  220. 'meta': [{'name': 'Header', 'type': 'String', 'comment': ''}]
  221. }
  222. def cancel(self, notebook, snippet):
  223. pass
  224. def get_log(self, snippet):
  225. return 'Not available'
  226. def _progress(self, snippet, logs):
  227. return 50