create_table.py 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505
  1. #!/usr/bin/env python
  2. # Licensed to Cloudera, Inc. under one
  3. # or more contributor license agreements. See the NOTICE file
  4. # distributed with this work for additional information
  5. # regarding copyright ownership. Cloudera, Inc. licenses this file
  6. # to you under the Apache License, Version 2.0 (the
  7. # "License"); you may not use this file except in compliance
  8. # with the License. You may obtain a copy of the License at
  9. #
  10. # http://www.apache.org/licenses/LICENSE-2.0
  11. #
  12. # Unless required by applicable law or agreed to in writing, software
  13. # distributed under the License is distributed on an "AS IS" BASIS,
  14. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. # See the License for the specific language governing permissions and
  16. # limitations under the License.
  17. import csv
  18. import gzip
  19. import json
  20. import logging
  21. import re
  22. from django.urls import reverse
  23. from django.http import QueryDict
  24. from django.utils.translation import ugettext as _
  25. from aws.s3.s3fs import S3FileSystemException
  26. from desktop.context_processors import get_app_name
  27. from desktop.lib import django_mako, i18n
  28. from desktop.lib.django_util import render
  29. from desktop.lib.exceptions_renderable import PopupException
  30. from desktop.lib.django_forms import MultiForm
  31. from hadoop.fs import hadoopfs
  32. from beeswax.common import TERMINATORS
  33. from beeswax.design import hql_query
  34. from beeswax.forms import CreateTableForm, ColumnTypeFormSet,\
  35. PartitionTypeFormSet, CreateByImportFileForm, CreateByImportDelimForm,\
  36. TERMINATOR_CHOICES
  37. from beeswax.server import dbms
  38. from beeswax.server.dbms import QueryServerException
  39. from beeswax.views import execute_directly
  40. LOG = logging.getLogger(__name__)
  41. def create_table(request, database='default'):
  42. """Create a table by specifying its attributes manually"""
  43. db = dbms.get(request.user)
  44. dbs = db.get_databases()
  45. databases = [{'name':db, 'url':reverse('beeswax:create_table', kwargs={'database': db})} for db in dbs]
  46. form = MultiForm(
  47. table=CreateTableForm,
  48. columns=ColumnTypeFormSet,
  49. partitions=PartitionTypeFormSet
  50. )
  51. if request.method == "POST":
  52. form.bind(request.POST)
  53. form.table.db = db # curry is invalid
  54. form.table.database = database
  55. if request.POST.get('create'):
  56. if form.is_valid():
  57. columns = [ f.cleaned_data for f in form.columns.forms ]
  58. partition_columns = [ f.cleaned_data for f in form.partitions.forms ]
  59. proposed_query = django_mako.render_to_string("create_table_statement.mako", {
  60. 'databases': databases,
  61. 'database': database,
  62. 'table': form.table.cleaned_data,
  63. 'columns': columns,
  64. 'partition_columns': partition_columns
  65. }
  66. )
  67. # Mako outputs bytestring in utf8
  68. proposed_query = proposed_query.decode('utf-8')
  69. table_name = form.table.cleaned_data['name']
  70. return _submit_create_and_load(request, proposed_query, table_name, None, False, database=database)
  71. else:
  72. form.bind()
  73. return render("create_table_manually.mako", request, {
  74. 'action': "#",
  75. 'databases': databases,
  76. 'table_form': form.table,
  77. 'columns_form': form.columns,
  78. 'partitions_form': form.partitions,
  79. 'has_tables': len(dbms.get(request.user).get_tables()) > 0,
  80. 'database': database,
  81. })
  82. IMPORT_PEEK_SIZE = 5 * 1024**2
  83. IMPORT_PEEK_NLINES = 10
  84. DELIMITERS = [ hive_val for hive_val, desc, ascii in TERMINATORS ]
  85. DELIMITER_READABLE = {'\\001' : _('ctrl-As'),
  86. '\\002' : _('ctrl-Bs'),
  87. '\\003' : _('ctrl-Cs'),
  88. '\\t' : _('tabs'),
  89. ',' : _('commas'),
  90. ' ' : _('spaces')}
  91. FILE_READERS = []
  92. def import_wizard(request, database='default'):
  93. """
  94. Help users define table and based on a file they want to import to Hive.
  95. Limitations:
  96. - Rows are delimited (no serde).
  97. - No detection for map and array types.
  98. - No detection for the presence of column header in the first row.
  99. - No partition table.
  100. - Does not work with binary data.
  101. """
  102. encoding = i18n.get_site_encoding()
  103. app_name = get_app_name(request)
  104. db = dbms.get(request.user)
  105. dbs = db.get_databases()
  106. databases = [{'name':db, 'url':reverse('beeswax:import_wizard', kwargs={'database': db})} for db in dbs]
  107. if request.method == 'POST':
  108. #
  109. # General processing logic:
  110. # - We have 3 steps. Each requires the previous.
  111. # * Step 1 : Table name and file location
  112. # * Step 2a : Display sample with auto chosen delim
  113. # * Step 2b : Display sample with user chosen delim (if user chooses one)
  114. # * Step 3 : Display sample, and define columns
  115. # - Each step is represented by a different form. The form of an earlier step
  116. # should be present when submitting to a later step.
  117. # - To preserve the data from the earlier steps, we send the forms back as
  118. # hidden fields. This way, when users revisit a previous step, the data would
  119. # be there as well.
  120. #
  121. delim_is_auto = False
  122. fields_list, n_cols = [[]], 0
  123. s3_col_formset = None
  124. s1_file_form = CreateByImportFileForm(request.POST, db=db)
  125. if s1_file_form.is_valid():
  126. do_s2_auto_delim = request.POST.get('submit_file') # Step 1 -> 2
  127. do_s2_user_delim = request.POST.get('submit_preview') # Step 2 -> 2
  128. do_s3_column_def = request.POST.get('submit_delim') # Step 2 -> 3
  129. do_hive_create = request.POST.get('submit_create') # Step 3 -> execute
  130. cancel_s2_user_delim = request.POST.get('cancel_delim') # Step 2 -> 1
  131. cancel_s3_column_def = request.POST.get('cancel_create') # Step 3 -> 2
  132. # Exactly one of these should be True
  133. if len(filter(None, (do_s2_auto_delim, do_s2_user_delim, do_s3_column_def, do_hive_create, cancel_s2_user_delim, cancel_s3_column_def))) != 1:
  134. raise PopupException(_('Invalid form submission'))
  135. if not do_s2_auto_delim:
  136. # We should have a valid delim form
  137. s2_delim_form = CreateByImportDelimForm(request.POST)
  138. if not s2_delim_form.is_valid():
  139. # Go back to picking delimiter
  140. do_s2_user_delim, do_s3_column_def, do_hive_create = True, False, False
  141. if do_hive_create:
  142. # We should have a valid columns formset
  143. s3_col_formset = ColumnTypeFormSet(prefix='cols', data=request.POST)
  144. if not s3_col_formset.is_valid():
  145. # Go back to define columns
  146. do_s3_column_def, do_hive_create = True, False
  147. load_data = s1_file_form.cleaned_data.get('load_data', 'IMPORT').upper()
  148. path = s1_file_form.cleaned_data['path']
  149. #
  150. # Go to step 2: We've just picked the file. Preview it.
  151. #
  152. if do_s2_auto_delim:
  153. try:
  154. if load_data == 'IMPORT':
  155. if not request.fs.isfile(path):
  156. raise PopupException(_('Path location must refer to a file if "Import Data" is selected.'))
  157. elif load_data == 'EXTERNAL':
  158. if not request.fs.isdir(path):
  159. raise PopupException(_('Path location must refer to a directory if "Create External Table" is selected.'))
  160. except (IOError, S3FileSystemException), e:
  161. raise PopupException(_('Path location "%s" is invalid: %s') % (path, e))
  162. delim_is_auto = True
  163. fields_list, n_cols, s2_delim_form = _delim_preview(request.fs, s1_file_form, encoding, [reader.TYPE for reader in FILE_READERS], DELIMITERS)
  164. if (do_s2_user_delim or do_s3_column_def or cancel_s3_column_def) and s2_delim_form.is_valid():
  165. # Delimit based on input
  166. fields_list, n_cols, s2_delim_form = _delim_preview(request.fs, s1_file_form, encoding, (s2_delim_form.cleaned_data['file_type'],),
  167. (s2_delim_form.cleaned_data['delimiter'],))
  168. if do_s2_auto_delim or do_s2_user_delim or cancel_s3_column_def:
  169. return render('import_wizard_choose_delimiter.mako', request, {
  170. 'action': reverse(app_name + ':import_wizard', kwargs={'database': database}),
  171. 'delim_readable': DELIMITER_READABLE.get(s2_delim_form['delimiter'].data[0], s2_delim_form['delimiter'].data[1]),
  172. 'initial': delim_is_auto,
  173. 'file_form': s1_file_form,
  174. 'delim_form': s2_delim_form,
  175. 'fields_list': fields_list,
  176. 'delimiter_choices': TERMINATOR_CHOICES,
  177. 'n_cols': n_cols,
  178. 'database': database,
  179. 'databases': databases
  180. })
  181. #
  182. # Go to step 3: Define column.
  183. #
  184. if do_s3_column_def:
  185. if s3_col_formset is None:
  186. columns = []
  187. for i in range(n_cols):
  188. columns.append({
  189. 'column_name': 'col_%s' % (i,),
  190. 'column_type': 'string',
  191. })
  192. s3_col_formset = ColumnTypeFormSet(prefix='cols', initial=columns)
  193. try:
  194. fields_list_for_json = list(fields_list)
  195. if fields_list_for_json:
  196. fields_list_for_json[0] = map(lambda a: re.sub('[^\w]', '', a), fields_list_for_json[0]) # Cleaning headers
  197. return render('import_wizard_define_columns.mako', request, {
  198. 'action': reverse(app_name + ':import_wizard', kwargs={'database': database}),
  199. 'file_form': s1_file_form,
  200. 'delim_form': s2_delim_form,
  201. 'column_formset': s3_col_formset,
  202. 'fields_list': fields_list,
  203. 'fields_list_json': json.dumps(fields_list_for_json),
  204. 'n_cols': n_cols,
  205. 'database': database,
  206. 'databases': databases
  207. })
  208. except Exception, e:
  209. raise PopupException(_("The selected delimiter is creating an un-even number of columns. Please make sure you don't have empty columns."), detail=e)
  210. #
  211. # Final: Execute
  212. #
  213. if do_hive_create:
  214. delim = s2_delim_form.cleaned_data['delimiter']
  215. table_name = s1_file_form.cleaned_data['name']
  216. proposed_query = django_mako.render_to_string("create_table_statement.mako", {
  217. 'table': {
  218. 'name': table_name,
  219. 'comment': s1_file_form.cleaned_data['comment'],
  220. 'row_format': 'Delimited',
  221. 'field_terminator': delim,
  222. 'file_format': 'TextFile',
  223. 'load_data': load_data,
  224. 'path': path,
  225. 'skip_header': request.GET.get('removeHeader', 'off').lower() == 'on'
  226. },
  227. 'columns': [ f.cleaned_data for f in s3_col_formset.forms ],
  228. 'partition_columns': [],
  229. 'database': database,
  230. 'databases': databases
  231. }
  232. )
  233. try:
  234. return _submit_create_and_load(request, proposed_query, table_name, path, load_data, database=database)
  235. except QueryServerException, e:
  236. raise PopupException(_('The table could not be created.'), detail=e.message)
  237. else:
  238. s1_file_form = CreateByImportFileForm()
  239. return render('import_wizard_choose_file.mako', request, {
  240. 'action': reverse(app_name + ':import_wizard', kwargs={'database': database}),
  241. 'file_form': s1_file_form,
  242. 'database': database,
  243. 'databases': databases
  244. })
  245. def _submit_create_and_load(request, create_hql, table_name, path, load_data, database):
  246. """
  247. Submit the table creation, and setup the load to happen (if ``load_data`` == IMPORT).
  248. """
  249. on_success_params = QueryDict('', mutable=True)
  250. app_name = get_app_name(request)
  251. if load_data == 'IMPORT':
  252. on_success_params['table'] = table_name
  253. on_success_params['path'] = path
  254. on_success_url = reverse(app_name + ':load_after_create', kwargs={'database': database}) + '?' + on_success_params.urlencode()
  255. else:
  256. on_success_url = reverse('metastore:describe_table', kwargs={'database': database, 'table': table_name})
  257. query = hql_query(create_hql, database=database)
  258. return execute_directly(request, query,
  259. on_success_url=on_success_url,
  260. on_success_params=on_success_params)
  261. def _delim_preview(fs, file_form, encoding, file_types, delimiters):
  262. """
  263. _delim_preview(fs, file_form, encoding, file_types, delimiters)
  264. -> (fields_list, n_cols, delim_form)
  265. Look at the beginning of the file and parse it according to the list of
  266. available file_types and delimiters.
  267. """
  268. assert file_form.is_valid()
  269. path = file_form.cleaned_data['path']
  270. try:
  271. # If path is a directory, find first file object
  272. if fs.isdir(path):
  273. children = fs.listdir(path)
  274. if children:
  275. path = '%s/%s' % (path, children[0])
  276. file_obj = fs.open(path)
  277. delim, file_type, fields_list = _parse_fields(path, file_obj, encoding, file_types, delimiters)
  278. file_obj.close()
  279. except IOError, ex:
  280. msg = "Failed to open file '%s': %s" % (path, ex)
  281. LOG.exception(msg)
  282. raise PopupException(msg)
  283. n_cols = max([ len(row) for row in fields_list ])
  284. # ``delimiter`` is a MultiValueField. delimiter_0 and delimiter_1 are the sub-fields.
  285. delimiter_0 = delim
  286. delimiter_1 = ''
  287. # If custom delimiter
  288. if not filter(lambda val: val[0] == delim, TERMINATOR_CHOICES):
  289. delimiter_0 = '__other__'
  290. delimiter_1 = delim
  291. delim_form = CreateByImportDelimForm(dict(delimiter_0=delimiter_0,
  292. delimiter_1=delimiter_1,
  293. file_type=file_type,
  294. n_cols=n_cols))
  295. if not delim_form.is_valid():
  296. assert False, _('Internal error when constructing the delimiter form: %(error)s.') % {'error': delim_form.errors}
  297. return fields_list, n_cols, delim_form
  298. def _parse_fields(path, file_obj, encoding, filetypes, delimiters):
  299. """
  300. _parse_fields(path, file_obj, encoding, filetypes, delimiters) -> (delimiter, filetype, fields_list)
  301. Go through the list of ``filetypes`` (gzip, text) and stop at the first one
  302. that works for the data. Then apply the list of ``delimiters`` and pick the
  303. most appropriate one.
  304. ``path`` is used for debugging only.
  305. Return the best delimiter, filetype and the data broken down into rows of fields.
  306. """
  307. file_readers = [reader for reader in FILE_READERS if reader.TYPE in filetypes]
  308. for reader in file_readers:
  309. LOG.debug("Trying %s for file: %s" % (reader.TYPE, path))
  310. file_obj.seek(0, hadoopfs.SEEK_SET)
  311. lines = reader.readlines(file_obj, encoding)
  312. if lines is not None:
  313. delim, fields_list = _readfields(lines, delimiters)
  314. return delim, reader.TYPE, fields_list
  315. else:
  316. # Even TextFileReader doesn't work
  317. msg = _("Failed to decode file '%(path)s' into printable characters under %(encoding)s.") % {'path': path, 'encoding': encoding}
  318. LOG.error(msg)
  319. raise PopupException(msg)
  320. def _readfields(lines, delimiters):
  321. """
  322. readfields(lines, delimiters) -> (delim, a list of lists of fields)
  323. ``delimiters`` is a list of escaped characters, e.g. r'\\t', r'\\001', ','
  324. Choose the best delimiter from the given list of delimiters. Return that delimiter
  325. and the fields parsed by using that delimiter.
  326. """
  327. def score_delim(fields_list):
  328. """
  329. How good is this fields_list? Score based on variance of the number of fields
  330. The score is always non-negative. The higher the better.
  331. """
  332. n_lines = len(fields_list)
  333. len_list = [len(fields) for fields in fields_list]
  334. if not len_list:
  335. raise PopupException(_("Could not find any columns to import"))
  336. # All lines should break into multiple fields
  337. if min(len_list) == 1:
  338. return 0
  339. avg_n_fields = sum(len_list) / n_lines
  340. sq_of_exp = avg_n_fields * avg_n_fields
  341. len_list_sq = [l * l for l in len_list]
  342. exp_of_sq = sum(len_list_sq) / n_lines
  343. var = exp_of_sq - sq_of_exp
  344. # Favour more fields
  345. return (1000.0 / (var + 1)) + avg_n_fields
  346. max_score = -1
  347. res = (None, None)
  348. for delim in delimiters:
  349. # Unescape the delimiter back to its character value
  350. delimiter = delim.decode('string_escape')
  351. try:
  352. fields_list = _get_rows(lines, delimiter)
  353. except:
  354. LOG.exception('failed to get rows')
  355. fields_list = [line.split(delimiter) for line in lines if line]
  356. score = score_delim(fields_list)
  357. LOG.debug("'%s' gives score of %s" % (delim, score))
  358. if score > max_score:
  359. max_score = score
  360. res = (delim, fields_list)
  361. return res
  362. def _get_rows(lines, delimiter):
  363. column_reader = csv.reader(lines, delimiter=delimiter)
  364. return [row for row in column_reader if row]
  365. def _peek_file(fs, file_form):
  366. """_peek_file(fs, file_form) -> (path, initial data)"""
  367. try:
  368. path = file_form.cleaned_data['path']
  369. file_obj = fs.open(path)
  370. file_head = file_obj.read(IMPORT_PEEK_SIZE)
  371. file_obj.close()
  372. return (path, file_head)
  373. except IOError, ex:
  374. msg = _("Failed to open file '%(path)s': %(error)s.") % {'path': path, 'error': ex}
  375. LOG.exception(msg)
  376. raise PopupException(msg)
  377. class GzipFileReader(object):
  378. """Class for extracting lines from a gzipped file"""
  379. TYPE = 'gzip'
  380. @staticmethod
  381. def readlines(fileobj, encoding):
  382. """readlines(fileobj, encoding) -> list of lines"""
  383. gz = gzip.GzipFile(fileobj=fileobj, mode='rb')
  384. try:
  385. data = gz.read(IMPORT_PEEK_SIZE)
  386. except IOError:
  387. return None
  388. try:
  389. return unicode(data, encoding, errors='replace').splitlines()[:IMPORT_PEEK_NLINES]
  390. except UnicodeError:
  391. return None
  392. FILE_READERS.append(GzipFileReader)
  393. class TextFileReader(object):
  394. """Class for extracting lines from a regular text file"""
  395. TYPE = 'text'
  396. @staticmethod
  397. def readlines(fileobj, encoding):
  398. """readlines(fileobj, encoding) -> list of lines"""
  399. try:
  400. data = fileobj.read(IMPORT_PEEK_SIZE)
  401. return unicode(data, encoding, errors='replace').splitlines()[:IMPORT_PEEK_NLINES]
  402. except UnicodeError:
  403. return None
  404. FILE_READERS.append(TextFileReader)
  405. def load_after_create(request, database):
  406. """
  407. Automatically load data into a newly created table.
  408. We get here from the create's on_success_url, and expect to find
  409. ``table`` and ``path`` from the parameters.
  410. """
  411. tablename = request.GET.get('table')
  412. path = request.GET.get('path')
  413. if not tablename or not path:
  414. msg = _('Internal error: Missing needed parameter to load data into table.')
  415. LOG.error(msg)
  416. raise PopupException(msg)
  417. LOG.debug("Auto loading data from %s into table %s" % (path, tablename))
  418. hql = "LOAD DATA INPATH '%s' INTO TABLE `%s.%s`" % (path, database, tablename)
  419. query = hql_query(hql)
  420. on_success_url = reverse('metastore:describe_table', kwargs={'database': database, 'table': tablename})
  421. try:
  422. return execute_directly(request, query, on_success_url=on_success_url)
  423. except Exception, e:
  424. message = 'The table data could not be loaded'
  425. LOG.exception(message)
  426. detail = e.message if hasattr(e, 'message') and e.message else None
  427. raise PopupException(_(message), detail=detail)