s3fs.py 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503
  1. # Licensed to Cloudera, Inc. under one
  2. # or more contributor license agreements. See the NOTICE file
  3. # distributed with this work for additional information
  4. # regarding copyright ownership. Cloudera, Inc. licenses this file
  5. # to you under the Apache License, Version 2.0 (the
  6. # "License"); you may not use this file except in compliance
  7. # with the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. from __future__ import absolute_import
  17. import itertools
  18. import logging
  19. import os
  20. import posixpath
  21. import re
  22. import time
  23. from boto.exception import BotoClientError, S3ResponseError
  24. from boto.s3.connection import Location
  25. from boto.s3.key import Key
  26. from boto.s3.prefix import Prefix
  27. from django.utils.translation import ugettext as _
  28. from aws import s3
  29. from aws.conf import get_default_region, get_locations
  30. from aws.s3 import normpath, s3file, translate_s3_error, S3A_ROOT
  31. from aws.s3.s3stat import S3Stat
  32. DEFAULT_READ_SIZE = 1024 * 1024 # 1MB
  33. PERMISSION_ACTION_S3 = "s3_access"
  34. BUCKET_NAME_PATTERN = re.compile("^((?:(?:[a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9_\-]*[a-zA-Z0-9])\.)*(?:[A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9_\-]*[A-Za-z0-9]))$")
  35. LOG = logging.getLogger(__name__)
  36. class S3FileSystemException(IOError):
  37. def __init__(self, *args, **kwargs):
  38. super(S3FileSystemException, self).__init__(*args, **kwargs)
  39. def auth_error_handler(view_fn):
  40. def decorator(*args, **kwargs):
  41. try:
  42. return view_fn(*args, **kwargs)
  43. except (S3ResponseError, IOError), e:
  44. if 'Forbidden' in str(e) or (hasattr(e, 'status') and e.status == 403):
  45. path = kwargs.get('path')
  46. if not path and len(args) > 1:
  47. path = args[1] # We assume that the path is the first argument
  48. msg = _('User is not authorized to perform the attempted operation. Check that the user has appropriate permissions.')
  49. if path:
  50. msg = _('User is not authorized to write or modify path: %s. Check that the user has write permissions.') % path
  51. raise S3FileSystemException(msg)
  52. else:
  53. msg = str(e)
  54. if isinstance(e, S3ResponseError):
  55. msg = e.message or e.reason
  56. raise S3FileSystemException(msg)
  57. except Exception, e:
  58. raise e
  59. return decorator
  60. class S3FileSystem(object):
  61. def __init__(self, s3_connection):
  62. self._s3_connection = s3_connection
  63. self._filebrowser_action = PERMISSION_ACTION_S3
  64. def _get_bucket(self, name):
  65. return self._s3_connection.get_bucket(name)
  66. def _get_or_create_bucket(self, name):
  67. try:
  68. bucket = self._get_bucket(name)
  69. except BotoClientError, e:
  70. raise S3FileSystemException(_('Failed to create bucket named "%s": %s') % (name, e.reason))
  71. except S3ResponseError, e:
  72. if e.status == 403 or e.status == 301:
  73. raise S3FileSystemException(_('User is not authorized to access bucket named "%s". '
  74. 'If you are attempting to create a bucket, this bucket name is already reserved.') % name)
  75. elif e.status == 404:
  76. kwargs = {}
  77. if self._get_location():
  78. kwargs['location'] = self._get_location()
  79. bucket = self._create_bucket(name, **kwargs)
  80. elif e.status == 400:
  81. raise S3FileSystemException(_('Failed to create bucket named "%s": %s') % (name, e.reason))
  82. else:
  83. raise S3FileSystemException(e.message or e.reason)
  84. return bucket
  85. def _create_bucket(self, name, **kwargs):
  86. # S3 API throws an exception when using us-east-1 and specifying CreateBucketConfiguration
  87. # Boto specifies CreateBucketConfiguration whenever the location is not default
  88. # We change location to default to fix issue
  89. # More information: https://github.com/boto/boto3/issues/125
  90. if kwargs.get('location') == 'us-east-1':
  91. kwargs['location'] = ''
  92. return self._s3_connection.create_bucket(name, **kwargs)
  93. def _delete_bucket(self, name):
  94. try:
  95. # Verify that bucket exists and user has permissions to access it
  96. bucket = self._get_bucket(name)
  97. # delete keys from bucket first
  98. for key in bucket.list():
  99. key.delete()
  100. self._s3_connection.delete_bucket(name)
  101. LOG.info('Successfully deleted bucket name "%s" and all its contents.' % name)
  102. except S3ResponseError, e:
  103. if e.status == 403:
  104. raise S3FileSystemException(_('User is not authorized to access bucket named "%s". '
  105. 'If you are attempting to create a bucket, this bucket name is already reserved.') % name)
  106. else:
  107. raise S3FileSystemException(e.message or e.reason)
  108. def _get_key(self, path, validate=True):
  109. bucket_name, key_name = s3.parse_uri(path)[:2]
  110. bucket = self._get_bucket(bucket_name)
  111. try:
  112. return bucket.get_key(key_name, validate=validate)
  113. except BotoClientError, e:
  114. raise S3FileSystemException(_('Failed to access path at "%s": %s') % (path, e.reason))
  115. except S3ResponseError, e:
  116. if e.status in (301, 400):
  117. raise S3FileSystemException(_('Failed to access path: "%s" '
  118. 'Check that you have access to read this bucket and that the region is correct: %s') % (path, e.message or e.reason))
  119. elif e.status == 403:
  120. raise S3FileSystemException(_('User is not authorized to access path at "%s".' % path))
  121. else:
  122. raise S3FileSystemException(e.message or e.reason)
  123. def _get_location(self):
  124. if get_default_region() in get_locations():
  125. return get_default_region()
  126. else:
  127. return Location.DEFAULT
  128. def _stats(self, path):
  129. if s3.is_root(path):
  130. return S3Stat.for_s3_root()
  131. try:
  132. key = self._get_key(path, validate=True)
  133. except BotoClientError, e:
  134. raise S3FileSystemException(_('Failed to access path "%s": %s') % (path, e.reason))
  135. except S3ResponseError as e:
  136. if e.status == 404:
  137. return None
  138. elif e.status == 403:
  139. raise S3FileSystemException(_('User is not authorized to access path: "%s"') % path)
  140. else:
  141. raise S3FileSystemException(_('Failed to access path "%s": %s') % (path, e.reason))
  142. if key is None:
  143. key = self._get_key(path, validate=False)
  144. return self._stats_key(key)
  145. @staticmethod
  146. def _stats_key(key):
  147. if key.size is not None:
  148. is_directory_name = not key.name or key.name[-1] == '/'
  149. return S3Stat.from_key(key, is_dir=is_directory_name)
  150. else:
  151. key.name = S3FileSystem._append_separator(key.name)
  152. ls = key.bucket.get_all_keys(prefix=key.name, max_keys=1)
  153. if len(ls) > 0:
  154. return S3Stat.from_key(key, is_dir=True)
  155. return None
  156. @staticmethod
  157. def _append_separator(path):
  158. if path and not path.endswith('/'):
  159. path += '/'
  160. return path
  161. @staticmethod
  162. def _cut_separator(path):
  163. return path.endswith('/') and path[:-1] or path
  164. @staticmethod
  165. def isroot(path):
  166. return s3.is_root(path)
  167. @staticmethod
  168. def join(*comp_list):
  169. return s3.join(*comp_list)
  170. @staticmethod
  171. def normpath(path):
  172. return normpath(path)
  173. def netnormpath(self, path):
  174. return normpath(path)
  175. @staticmethod
  176. def parent_path(path):
  177. parent_dir = S3FileSystem._append_separator(path)
  178. if not s3.is_root(parent_dir):
  179. bucket_name, key_name, basename = s3.parse_uri(path)
  180. if not basename: # bucket is top-level so return root
  181. parent_dir = S3A_ROOT
  182. else:
  183. bucket_path = '%s%s' % (S3A_ROOT, bucket_name)
  184. key_path = '/'.join(key_name.split('/')[:-1])
  185. parent_dir = s3.abspath(bucket_path, key_path)
  186. return parent_dir
  187. @translate_s3_error
  188. def open(self, path, mode='r'):
  189. key = self._get_key(path, validate=True)
  190. if key is None:
  191. raise S3FileSystemException("No such file or directory: '%s'" % path)
  192. return s3file.open(key, mode=mode)
  193. @translate_s3_error
  194. def read(self, path, offset, length):
  195. fh = self.open(path, 'r')
  196. fh.seek(offset, os.SEEK_SET)
  197. return fh.read(length)
  198. @translate_s3_error
  199. def isfile(self, path):
  200. stat = self._stats(path)
  201. if stat is None:
  202. return False
  203. return not stat.isDir
  204. @translate_s3_error
  205. def isdir(self, path):
  206. stat = self._stats(path)
  207. if stat is None:
  208. return False
  209. return stat.isDir
  210. @translate_s3_error
  211. def exists(self, path):
  212. return self._stats(path) is not None
  213. @translate_s3_error
  214. def stats(self, path):
  215. path = normpath(path)
  216. stats = self._stats(path)
  217. if stats:
  218. return stats
  219. raise S3FileSystemException("No such file or directory: '%s'" % path)
  220. @translate_s3_error
  221. def listdir_stats(self, path, glob=None):
  222. if glob is not None:
  223. raise NotImplementedError(_("Option `glob` is not implemented"))
  224. if s3.is_root(path):
  225. try:
  226. return sorted([S3Stat.from_bucket(b) for b in self._s3_connection.get_all_buckets()], key=lambda x: x.name)
  227. except S3FileSystemException, e:
  228. raise e
  229. except S3ResponseError, e:
  230. raise S3FileSystemException(_('Failed to retrieve buckets: %s') % e.reason)
  231. except Exception, e:
  232. raise S3FileSystemException(_('Failed to retrieve buckets: %s') % e)
  233. bucket_name, prefix = s3.parse_uri(path)[:2]
  234. bucket = self._get_bucket(bucket_name)
  235. prefix = self._append_separator(prefix)
  236. res = []
  237. for item in bucket.list(prefix=prefix, delimiter='/'):
  238. if isinstance(item, Prefix):
  239. res.append(S3Stat.from_key(Key(item.bucket, item.name), is_dir=True))
  240. else:
  241. if item.name == prefix:
  242. continue
  243. res.append(self._stats_key(item))
  244. return res
  245. @translate_s3_error
  246. def listdir(self, path, glob=None):
  247. return [s3.parse_uri(x.path)[2] for x in self.listdir_stats(path, glob)]
  248. @translate_s3_error
  249. @auth_error_handler
  250. def rmtree(self, path, skipTrash=True):
  251. if not skipTrash:
  252. raise NotImplementedError(_('Moving to trash is not implemented for S3'))
  253. bucket_name, key_name = s3.parse_uri(path)[:2]
  254. if bucket_name and not key_name:
  255. self._delete_bucket(bucket_name)
  256. else:
  257. key = self._get_key(path, validate=False)
  258. if key.exists():
  259. to_delete = iter([key])
  260. else:
  261. to_delete = iter([])
  262. if self.isdir(path):
  263. # add `/` to prevent removing of `s3://b/a_new` trying to remove `s3://b/a`
  264. prefix = self._append_separator(key.name)
  265. keys = key.bucket.list(prefix=prefix)
  266. to_delete = itertools.chain(keys, to_delete)
  267. result = key.bucket.delete_keys(to_delete)
  268. if result.errors:
  269. msg = "%d errors occurred while attempting to delete the following S3 paths:\n%s" % (
  270. len(result.errors), '\n'.join(['%s: %s' % (error.key, error.message) for error in result.errors])
  271. )
  272. LOG.error(msg)
  273. raise S3FileSystemException(msg)
  274. @translate_s3_error
  275. @auth_error_handler
  276. def remove(self, path, skip_trash=True):
  277. self.rmtree(path, skipTrash=skip_trash)
  278. def restore(self, *args, **kwargs):
  279. raise NotImplementedError(_('Moving to trash is not implemented for S3'))
  280. def filebrowser_action(self):
  281. return self._filebrowser_action
  282. @translate_s3_error
  283. @auth_error_handler
  284. def mkdir(self, path, *args, **kwargs):
  285. """
  286. Creates a directory and any parent directory if necessary.
  287. Actually it creates an empty object: s3://[bucket]/[path]/
  288. """
  289. bucket_name, key_name = s3.parse_uri(path)[:2]
  290. if not BUCKET_NAME_PATTERN.match(bucket_name):
  291. raise S3FileSystemException(_('Invalid bucket name: %s') % bucket_name)
  292. try:
  293. self._get_or_create_bucket(bucket_name)
  294. except S3FileSystemException, e:
  295. raise e
  296. except S3ResponseError, e:
  297. raise S3FileSystemException(_('Failed to create S3 bucket "%s": %s') % (bucket_name, e.reason))
  298. except Exception, e:
  299. raise S3FileSystemException(_('Failed to create S3 bucket "%s": %s') % (bucket_name, e))
  300. stats = self._stats(path)
  301. if stats:
  302. if stats.isDir:
  303. return None
  304. else:
  305. raise S3FileSystemException("'%s' already exists and is not a directory" % path)
  306. path = self._append_separator(path) # folder-key should ends by /
  307. self.create(path) # create empty object
  308. @translate_s3_error
  309. @auth_error_handler
  310. def copy(self, src, dst, recursive=False, *args, **kwargs):
  311. self._copy(src, dst, recursive=recursive, use_src_basename=True)
  312. @translate_s3_error
  313. @auth_error_handler
  314. def copyfile(self, src, dst, *args, **kwargs):
  315. if self.isdir(dst):
  316. raise S3FileSystemException("Copy dst '%s' is a directory" % dst)
  317. self._copy(src, dst, recursive=False, use_src_basename=False)
  318. @translate_s3_error
  319. @auth_error_handler
  320. def copy_remote_dir(self, src, dst, *args, **kwargs):
  321. self._copy(src, dst, recursive=True, use_src_basename=False)
  322. def _copy(self, src, dst, recursive, use_src_basename):
  323. src_st = self.stats(src)
  324. if src_st.isDir and not recursive:
  325. return # omitting directory
  326. dst = s3.abspath(src, dst)
  327. dst_st = self._stats(dst)
  328. if src_st.isDir and dst_st and not dst_st.isDir:
  329. raise S3FileSystemException("Cannot overwrite non-directory '%s' with directory '%s'" % (dst, src))
  330. src_bucket, src_key = s3.parse_uri(src)[:2]
  331. dst_bucket, dst_key = s3.parse_uri(dst)[:2]
  332. keep_src_basename = use_src_basename and dst_st and dst_st.isDir
  333. src_bucket = self._get_bucket(src_bucket)
  334. dst_bucket = self._get_bucket(dst_bucket)
  335. if keep_src_basename:
  336. cut = len(posixpath.dirname(src_key)) # cut of an parent directory name
  337. if cut:
  338. cut += 1
  339. else:
  340. cut = len(src_key)
  341. if not src_key.endswith('/'):
  342. cut += 1
  343. for key in src_bucket.list(prefix=src_key):
  344. if not key.name.startswith(src_key):
  345. raise S3FileSystemException(_("Invalid key to transform: %s") % key.name)
  346. dst_name = posixpath.normpath(s3.join(dst_key, key.name[cut:]))
  347. if self.isdir(normpath(self.join(S3A_ROOT, key.bucket.name, key.name))):
  348. dst_name = self._append_separator(dst_name)
  349. key.copy(dst_bucket, dst_name)
  350. @translate_s3_error
  351. @auth_error_handler
  352. def rename(self, old, new):
  353. new = s3.abspath(old, new)
  354. self.copy(old, new, recursive=True)
  355. self.rmtree(old, skipTrash=True)
  356. @translate_s3_error
  357. @auth_error_handler
  358. def rename_star(self, old_dir, new_dir):
  359. if not self.isdir(old_dir):
  360. raise S3FileSystemException("'%s' is not a directory" % old_dir)
  361. if self.isfile(new_dir):
  362. raise S3FileSystemException("'%s' is not a directory" % new_dir)
  363. ls = self.listdir(old_dir)
  364. for entry in ls:
  365. self.rename(s3.join(old_dir, entry), s3.join(new_dir, entry))
  366. @translate_s3_error
  367. @auth_error_handler
  368. def create(self, path, overwrite=False, data=None):
  369. key = self._get_key(path, validate=False)
  370. key.set_contents_from_string(data or '', replace=overwrite)
  371. @translate_s3_error
  372. @auth_error_handler
  373. def copyFromLocal(self, local_src, remote_dst, *args, **kwargs):
  374. local_src = self._cut_separator(local_src)
  375. remote_dst = self._cut_separator(remote_dst)
  376. def _copy_file(src, dst):
  377. key = self._get_key(dst, validate=False)
  378. fp = open(src, 'r')
  379. key.set_contents_from_file(fp)
  380. if os.path.isdir(local_src):
  381. for (local_dir, sub_dirs, files) in os.walk(local_src, followlinks=False):
  382. remote_dir = local_dir.replace(local_src, remote_dst)
  383. if not sub_dirs and not files:
  384. self.mkdir(remote_dir)
  385. else:
  386. for file_name in files:
  387. _copy_file(os.path.join(local_dir, file_name), os.path.join(remote_dir, file_name))
  388. else:
  389. file_name = os.path.split(local_src)[1]
  390. if self.isdir(remote_dst):
  391. remote_file = os.path.join(remote_dst, file_name)
  392. else:
  393. remote_file = remote_dst
  394. _copy_file(local_src, remote_file)
  395. @translate_s3_error
  396. def upload(self, file, path, *args, **kwargs):
  397. pass # upload is handled by S3FileUploadHandler
  398. @translate_s3_error
  399. @auth_error_handler
  400. def append(self, path, data):
  401. key = self._get_key(path, validate=False)
  402. current_data = key.get_contents_as_string() or ''
  403. new_data = data or ''
  404. key.set_contents_from_string(current_data + new_data, replace=True)
  405. @translate_s3_error
  406. def check_access(self, path, permission='READ'):
  407. permission = permission.upper()
  408. try:
  409. if permission == 'WRITE':
  410. tmp_file = 'temp_%s' % str(int(time.time() * 1000))
  411. tmp_path = '%s/%s' % (path, tmp_file)
  412. self.create(path=tmp_path, overwrite=True)
  413. self.remove(path=tmp_path)
  414. else:
  415. self.open(path)
  416. except Exception, e:
  417. LOG.warn('S3 check_access encountered error verifying %s permission at path "%s": %s' % (permission, path, str(e)))
  418. return False
  419. return True
  420. def setuser(self, user):
  421. pass # user-concept doesn't have sense for this implementation
  422. def get_upload_chuck_size(self):
  423. from hadoop.conf import UPLOAD_CHUNK_SIZE # circular dependency
  424. return UPLOAD_CHUNK_SIZE.get()