upload.py 6.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198
  1. #!/usr/bin/env python
  2. # Licensed to Cloudera, Inc. under one
  3. # or more contributor license agreements. See the NOTICE file
  4. # distributed with this work for additional information
  5. # regarding copyright ownership. Cloudera, Inc. licenses this file
  6. # to you under the Apache License, Version 2.0 (the
  7. # "License"); you may not use this file except in compliance
  8. # with the License. You may obtain a copy of the License at
  9. #
  10. # http://www.apache.org/licenses/LICENSE-2.0
  11. #
  12. # Unless required by applicable law or agreed to in writing, software
  13. # distributed under the License is distributed on an "AS IS" BASIS,
  14. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. # See the License for the specific language governing permissions and
  16. # limitations under the License.
  17. """
  18. Classes for a custom upload handler to stream into HDFS.
  19. Note that since our middlewares inspect request.POST, we cannot inject a custom
  20. handler into a specific view. Therefore we always use the HDFSfileUploadHandler,
  21. which is triggered by a magic prefix ("HDFS") in the field name.
  22. See http://docs.djangoproject.com/en/1.2/topics/http/file-uploads/
  23. """
  24. import errno
  25. import logging
  26. import time
  27. from django.core.files.uploadhandler import FileUploadHandler, StopFutureHandlers, StopUpload, UploadFileException, SkipFile
  28. from django.utils.translation import ugettext as _
  29. from desktop.lib import fsmanager
  30. import hadoop.cluster
  31. from hadoop.conf import UPLOAD_CHUNK_SIZE
  32. from hadoop.fs.exceptions import WebHdfsException
  33. LOG = logging.getLogger(__name__)
  34. UPLOAD_SUBDIR = 'hue-uploads'
  35. class HDFSerror(UploadFileException):
  36. pass
  37. class HDFStemporaryUploadedFile(object):
  38. """
  39. A temporary HDFS file to store upload data.
  40. This class does not have any file read methods.
  41. """
  42. def __init__(self, request, name, destination):
  43. self.name = name
  44. self.size = None
  45. self._do_cleanup = False
  46. try:
  47. self._fs = request.fs
  48. except AttributeError:
  49. self._fs = hadoop.cluster.get_hdfs()
  50. # Don't want to handle this upload if we don't have an HDFS
  51. if not self._fs:
  52. raise HDFSerror(_("No HDFS found"))
  53. # We want to set the user to be the user doing the upload
  54. self._fs.setuser(request.user.username)
  55. self._path = self._fs.mkswap(name, suffix='tmp', basedir=destination)
  56. # Check access permissions before attempting upload
  57. try:
  58. self._fs.check_access(destination, 'rw-')
  59. except WebHdfsException, e:
  60. LOG.exception(e)
  61. raise HDFSerror(_('User %s does not have permissions to write to path "%s".') % (request.user.username, destination))
  62. if self._fs.exists(self._path):
  63. self._fs._delete(self._path)
  64. self._file = self._fs.open(self._path, 'w')
  65. self._do_cleanup = True
  66. def __del__(self):
  67. if self._do_cleanup:
  68. # Do not do cleanup here. It's hopeless. The self._fs threadlocal states
  69. # are going to be all wrong.
  70. LOG.error("Left-over upload file is not cleaned up: %s" % (self._path,))
  71. def get_temp_path(self):
  72. return self._path
  73. def finish_upload(self, size):
  74. try:
  75. self.size = size
  76. self.close()
  77. except Exception, ex:
  78. LOG.exception('Error uploading file to %s' % (self._path,))
  79. raise
  80. def remove(self):
  81. try:
  82. self._fs.remove(self._path, True)
  83. self._do_cleanup = False
  84. except IOError, ex:
  85. if ex.errno != errno.ENOENT:
  86. LOG.exception('Failed to remove temporary upload file "%s". '
  87. 'Please cleanup manually: %s' % (self._path, ex))
  88. def write(self, data):
  89. self._file.write(data)
  90. def flush(self):
  91. self._file.flush()
  92. def close(self):
  93. self._file.close()
  94. class HDFSfileUploadHandler(FileUploadHandler):
  95. """
  96. Handle file upload by storing data in a temp HDFS file.
  97. This handler is triggered by any upload field whose name starts with
  98. "HDFS" (case insensitive).
  99. In practice, the middlewares (which access the request.REQUEST/POST/FILES objects) triggers
  100. the upload before reaching the view in case of permissions error. Read about Django
  101. uploading documentation.
  102. This might trigger the upload before executing the hue auth middleware. HDFS destination
  103. permissions will be doing the checks.
  104. """
  105. def __init__(self, request):
  106. FileUploadHandler.__init__(self, request)
  107. self._file = None
  108. self._starttime = 0
  109. self._activated = False
  110. self._destination = request.GET.get('dest', None) # GET param avoids infinite looping
  111. self.request = request
  112. fs = fsmanager.get_filesystem('default')
  113. fs.setuser(request.user.username)
  114. FileUploadHandler.chunk_size = fs.get_upload_chuck_size(self._destination) if self._destination else UPLOAD_CHUNK_SIZE.get()
  115. LOG.debug("Chunk size = %d" % FileUploadHandler.chunk_size)
  116. def new_file(self, field_name, file_name, *args, **kwargs):
  117. # Detect "HDFS" in the field name.
  118. if field_name.upper().startswith('HDFS'):
  119. LOG.info('Using HDFSfileUploadHandler to handle file upload.')
  120. try:
  121. fs_ref = self.request.REQUEST.get('fs', 'default')
  122. self.request.fs = fsmanager.get_filesystem(fs_ref)
  123. self.request.fs.setuser(self.request.user.username)
  124. self._file = HDFStemporaryUploadedFile(self.request, file_name, self._destination)
  125. LOG.debug('Upload attempt to %s' % (self._file.get_temp_path(),))
  126. self._activated = True
  127. self._starttime = time.time()
  128. except Exception, ex:
  129. LOG.error("Not using HDFS upload handler: %s" % (ex,))
  130. self.request.META['upload_failed'] = ex
  131. raise StopFutureHandlers()
  132. def receive_data_chunk(self, raw_data, start):
  133. LOG.debug("HDFSfileUploadHandler receive_data_chunk")
  134. if not self._activated:
  135. if self.request.META.get('PATH_INFO').startswith('/filebrowser') and self.request.META.get('PATH_INFO') != '/filebrowser/upload/archive':
  136. raise SkipFile()
  137. return raw_data
  138. try:
  139. self._file.write(raw_data)
  140. self._file.flush()
  141. return None
  142. except IOError:
  143. LOG.exception('Error storing upload data in temporary file "%s"' %
  144. (self._file.get_temp_path(),))
  145. raise StopUpload()
  146. def file_complete(self, file_size):
  147. if not self._activated:
  148. return None
  149. try:
  150. self._file.finish_upload(file_size)
  151. except IOError:
  152. LOG.exception('Error closing uploaded temporary file "%s"' %
  153. (self._file.get_temp_path(),))
  154. raise
  155. elapsed = time.time() - self._starttime
  156. LOG.info('Uploaded %s bytes to HDFS in %s seconds' % (file_size, elapsed))
  157. return self._file