浏览代码

[oozie] A cloned job has a wrong deployment directory

Copy all files of a cloned workflow
Fix cloned coordinator deployment directory
Added a new copy_remote_dir to HDFS lib
Added missing file for generating graph
Fix oozie_setup for also copying all the directories content
Added new oozie and lib hdfs tests
Romain Rigaux 13 年之前
父节点
当前提交
1adc980d8d

+ 0 - 2
apps/jobsub/src/jobsub/management/commands/jobsub_setup.py

@@ -29,8 +29,6 @@ from django.core.management.base import NoArgsCommand
 from django.contrib.auth.models import User
 from django.contrib.auth.models import User
 
 
 from hadoop import cluster
 from hadoop import cluster
-from hadoop.fs.hadoopfs import Hdfs
-import hadoop.conf
 import jobsub.conf
 import jobsub.conf
 from jobsub.submit import Submission
 from jobsub.submit import Submission
 
 

+ 62 - 0
apps/oozie/examples/sleep/workflow.xml

@@ -0,0 +1,62 @@
+
+<workflow-app name="SleepWorkflow" xmlns="uri:oozie:workflow:0.2">
+    <start to="Sleep"/>
+    <action name="Sleep">
+        <map-reduce>
+            <job-tracker>${jobTracker}</job-tracker>
+            <name-node>${nameNode}</name-node>
+            <configuration>
+                <property>
+                    <name>mapred.reduce.tasks</name>
+                    <value>1</value>
+                </property>
+                <property>
+                    <name>mapred.mapper.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob</value>
+                </property>
+                <property>
+                    <name>mapred.reducer.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob</value>
+                </property>
+                <property>
+                    <name>mapred.mapoutput.key.class</name>
+                    <value>org.apache.hadoop.io.IntWritable</value>
+                </property>
+                <property>
+                    <name>mapred.mapoutput.value.class</name>
+                    <value>org.apache.hadoop.io.NullWritable</value>
+                </property>
+                <property>
+                    <name>mapred.output.format.class</name>
+                    <value>org.apache.hadoop.mapred.lib.NullOutputFormat</value>
+                </property>
+                <property>
+                    <name>mapred.input.format.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob$SleepInputFormat</value>
+                </property>
+                <property>
+                    <name>mapred.partitioner.class</name>
+                    <value>org.apache.hadoop.examples.SleepJob</value>
+                </property>
+                <property>
+                    <name>mapred.speculative.execution</name>
+                    <value>false</value>
+                </property>
+                <property>
+                    <name>sleep.job.map.sleep.time</name>
+                    <value>0</value>
+                </property>
+                <property>
+                    <name>sleep.job.reduce.sleep.time</name>
+                    <value>${REDUCER_SLEEP_TIME}</value>
+                </property>
+            </configuration>
+        </map-reduce>
+        <ok to="end"/>
+        <error to="kill"/>
+    </action>
+    <kill name="kill">
+        <message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
+    </kill>
+    <end name="end"/>
+</workflow-app>

+ 2 - 2
apps/oozie/src/oozie/fixtures/initial_data.json

@@ -5,7 +5,7 @@
     "fields": {
     "fields": {
       "is_shared": true,
       "is_shared": true,
       "name": "SleepWorkflow",
       "name": "SleepWorkflow",
-      "deployment_dir": "/user/hue/oozie/demo/sleep",
+      "deployment_dir": "/user/hue/oozie/examples/sleep",
       "schema_version": "",
       "schema_version": "",
       "last_modified": "2012-08-20 13:13:34",
       "last_modified": "2012-08-20 13:13:34",
       "owner": 1,
       "owner": 1,
@@ -18,7 +18,7 @@
     "fields": {
     "fields": {
       "is_shared": true,
       "is_shared": true,
       "name": "DailySleep",
       "name": "DailySleep",
-      "deployment_dir": "/user/hue/oozie/demo/sleep",
+      "deployment_dir": "/user/hue/oozie/examples/sleep",
       "schema_version": "",
       "schema_version": "",
       "last_modified": "2012-08-20 13:56:53",
       "last_modified": "2012-08-20 13:56:53",
       "owner": 1,
       "owner": 1,

+ 45 - 16
apps/oozie/src/oozie/management/commands/oozie_setup.py

@@ -24,9 +24,9 @@ from django.core.management.base import NoArgsCommand
 from django.utils.translation import ugettext as _
 from django.utils.translation import ugettext as _
 
 
 from hadoop import cluster
 from hadoop import cluster
+from hadoop.fs.hadoopfs import Hdfs
 
 
-from oozie.conf import LOCAL_SAMPLE_DATA_DIR, LOCAL_SAMPLE_DIR
-from oozie.models import Workflow
+from oozie.conf import LOCAL_SAMPLE_DATA_DIR, LOCAL_SAMPLE_DIR, REMOTE_DEPLOYMENT_DIR, REMOTE_SAMPLE_DIR
 
 
 
 
 LOG = logging.getLogger(__name__)
 LOG = logging.getLogger(__name__)
@@ -34,41 +34,48 @@ LOG = logging.getLogger(__name__)
 
 
 class Command(NoArgsCommand):
 class Command(NoArgsCommand):
   def handle_noargs(self, **options):
   def handle_noargs(self, **options):
-    remote_fs = cluster.get_hdfs()
-    remote_dir = Workflow.objects.create_data_dir(remote_fs)
+    fs = cluster.get_hdfs()
+    remote_dir = create_data_dir(fs)
 
 
     # Copy examples binaries
     # Copy examples binaries
-    for demo in ('lib', 'pig'):
-      local_dir = posixpath.join(LOCAL_SAMPLE_DIR.get(), demo)
-      remote_data_dir = posixpath.join(remote_dir, demo)
+    for name in os.listdir(LOCAL_SAMPLE_DIR.get()):
+      local_dir = posixpath.join(LOCAL_SAMPLE_DIR.get(), name)
+      remote_data_dir = posixpath.join(remote_dir, name)
       LOG.info(_('Copying examples %(local_dir)s to %(remote_data_dir)s\n') % {
       LOG.info(_('Copying examples %(local_dir)s to %(remote_data_dir)s\n') % {
                   'local_dir': local_dir, 'remote_data_dir': remote_data_dir})
                   'local_dir': local_dir, 'remote_data_dir': remote_data_dir})
-      copy_dir(local_dir, remote_fs, remote_data_dir)
+      copy_dir(fs, local_dir, remote_data_dir)
 
 
     # Copy sample data
     # Copy sample data
     local_dir = LOCAL_SAMPLE_DATA_DIR.get()
     local_dir = LOCAL_SAMPLE_DATA_DIR.get()
     remote_data_dir = posixpath.join(remote_dir, 'data')
     remote_data_dir = posixpath.join(remote_dir, 'data')
     LOG.info(_('Copying data %(local_dir)s to %(remote_data_dir)s\n') % {
     LOG.info(_('Copying data %(local_dir)s to %(remote_data_dir)s\n') % {
                 'local_dir': local_dir, 'remote_data_dir': remote_data_dir})
                 'local_dir': local_dir, 'remote_data_dir': remote_data_dir})
-    copy_dir(local_dir, remote_fs, remote_data_dir)
+    copy_dir(fs, local_dir, remote_data_dir)
 
 
     # Load jobs
     # Load jobs
     management.call_command('loaddata', 'apps/oozie/src/oozie/fixtures/initial_data.json', verbosity=2)
     management.call_command('loaddata', 'apps/oozie/src/oozie/fixtures/initial_data.json', verbosity=2)
 
 
 
 
-def copy_dir(local_dir, remote_fs, remote_dir, mode=755):
-  remote_fs.do_as_user(remote_fs.DEFAULT_USER, remote_fs.mkdir, remote_dir, mode=mode)
+
+# This should probably be refactored and some parts moved to the HDFS lib. Jobsub could be updated to.
+
+def copy_dir(fs, local_dir, remote_dir, mode=0755):
+  fs.do_as_user(fs.DEFAULT_USER, fs.mkdir, remote_dir, mode=mode)
 
 
   for f in os.listdir(local_dir):
   for f in os.listdir(local_dir):
     local_src = os.path.join(local_dir, f)
     local_src = os.path.join(local_dir, f)
     remote_dst = posixpath.join(remote_dir, f)
     remote_dst = posixpath.join(remote_dir, f)
-    copy_file(local_src, remote_fs, remote_dst)
+    print f, local_src, remote_dst
+    if os.path.isdir(remote_dst):
+      copy_dir(fs, local_src, remote_dst, mode)
+    else:
+      copy_file(fs, local_src, remote_dst)
 
 
 
 
 CHUNK_SIZE = 1024 * 1024
 CHUNK_SIZE = 1024 * 1024
 
 
-def copy_file(local_src, remote_fs, remote_dst):
-  if remote_fs.exists(remote_dst):
+def copy_file(fs, local_src, remote_dst):
+  if fs.exists(remote_dst):
     LOG.info(_('%(remote_dst)s already exists.  Skipping.') % {'remote_dst': remote_dst})
     LOG.info(_('%(remote_dst)s already exists.  Skipping.') % {'remote_dst': remote_dst})
     return
     return
   else:
   else:
@@ -78,10 +85,10 @@ def copy_file(local_src, remote_fs, remote_dst):
     src = file(local_src)
     src = file(local_src)
     try:
     try:
       try:
       try:
-        remote_fs.do_as_user(remote_fs.DEFAULT_USER, remote_fs.create, remote_dst, permission=01755)
+        fs.do_as_user(fs.DEFAULT_USER, fs.create, remote_dst, permission=01755)
         chunk = src.read(CHUNK_SIZE)
         chunk = src.read(CHUNK_SIZE)
         while chunk:
         while chunk:
-          remote_fs.do_as_user(remote_fs.DEFAULT_USER, remote_fs.append, remote_dst, chunk)
+          fs.do_as_user(fs.DEFAULT_USER, fs.append, remote_dst, chunk)
           chunk = src.read(CHUNK_SIZE)
           chunk = src.read(CHUNK_SIZE)
         LOG.info(_('Copied %s -> %s') % (local_src, remote_dst))
         LOG.info(_('Copied %s -> %s') % (local_src, remote_dst))
       except:
       except:
@@ -91,3 +98,25 @@ def copy_file(local_src, remote_fs, remote_dst):
       src.close()
       src.close()
   else:
   else:
     LOG.info(_('Skipping %s (not a file)') % local_src)
     LOG.info(_('Skipping %s (not a file)') % local_src)
+
+
+def create_data_dir(fs):
+  # If needed, create the remote home, deployment and data directories
+  directories = (REMOTE_DEPLOYMENT_DIR.get(), REMOTE_SAMPLE_DIR.get())
+  user = fs.user
+
+  try:
+    fs.setuser(fs.DEFAULT_USER)
+    for directory in directories:
+      if not fs.exists(directory):
+        remote_home_dir = Hdfs.join('/user', fs.user)
+        if directory.startswith(remote_home_dir):
+          # Home is 755
+          fs.create_home_dir(remote_home_dir)
+        # Shared by all the users
+        fs.mkdir(directory, 01777)
+        fs.chmod(directory, 01777) # To remove after https://issues.apache.org/jira/browse/HDFS-3491
+  finally:
+    fs.setuser(user)
+
+  return REMOTE_SAMPLE_DIR.get()

+ 14 - 25
apps/oozie/src/oozie/models.py

@@ -32,11 +32,13 @@ from django.contrib.auth.models import User
 from django.utils.translation import ugettext as _, ugettext_lazy as _t
 from django.utils.translation import ugettext as _, ugettext_lazy as _t
 
 
 from desktop.lib import django_mako
 from desktop.lib import django_mako
+from hadoop.fs.exceptions import WebHdfsException
 
 
 from hadoop.fs.hadoopfs import Hdfs
 from hadoop.fs.hadoopfs import Hdfs
 from liboozie.submittion import Submission
 from liboozie.submittion import Submission
 
 
-from oozie.conf import REMOTE_SAMPLE_DIR, REMOTE_DEPLOYMENT_DIR
+from oozie.management.commands import oozie_setup
+from oozie.conf import REMOTE_SAMPLE_DIR
 from timezones import TIMEZONES
 from timezones import TIMEZONES
 
 
 
 
@@ -136,33 +138,12 @@ class WorkflowManager(models.Manager):
     workflow.end = end
     workflow.end = end
     workflow.save()
     workflow.save()
 
 
-    WorkflowManager.create_data_dir(fs)
+    # Recheck if deployement dir exists 
+    oozie_setup.create_data_dir(fs)
     Submission(workflow.owner, workflow, fs, {})._create_deployment_dir()
     Submission(workflow.owner, workflow, fs, {})._create_deployment_dir()
 
 
     return workflow
     return workflow
 
 
-  @classmethod
-  def create_data_dir(cls, fs):
-    # If needed, create the remote home, deployment and data directories
-    directories = (REMOTE_DEPLOYMENT_DIR.get(), REMOTE_SAMPLE_DIR.get())
-    user = fs.user
-
-    try:
-      fs.setuser(fs.DEFAULT_USER)
-      for directory in directories:
-        if not fs.exists(directory):
-          remote_home_dir = Hdfs.join('/user', fs.user)
-          if directory.startswith(remote_home_dir):
-            # Home is 755
-            fs.create_home_dir(remote_home_dir)
-          # Shared by all the users
-          fs.mkdir(directory, 01777)
-          fs.chmod(directory, 01777) # To remove after https://issues.apache.org/jira/browse/HDFS-3491
-    finally:
-      fs.setuser(user)
-
-    return REMOTE_SAMPLE_DIR.get()
-
 
 
 class Workflow(Job):
 class Workflow(Job):
   """
   """
@@ -350,7 +331,8 @@ class Workflow(Job):
 
 
     node.delete()
     node.delete()
 
 
-  def clone(self, new_owner=None):
+  def clone(self, fs, new_owner=None):
+    source_deployment_dir = self.deployment_dir # Needed
     nodes = self.node_set.all()
     nodes = self.node_set.all()
     links = Link.objects.filter(parent__workflow=self)
     links = Link.objects.filter(parent__workflow=self)
 
 
@@ -358,6 +340,7 @@ class Workflow(Job):
     copy.pk = None
     copy.pk = None
     copy.id = None
     copy.id = None
     copy.name += '-copy'
     copy.name += '-copy'
+    copy.deployment_dir = ''
     if new_owner is not None:
     if new_owner is not None:
       copy.owner = new_owner
       copy.owner = new_owner
     copy.save()
     copy.save()
@@ -383,6 +366,11 @@ class Workflow(Job):
     copy.start = old_nodes_mapping[self.start.id]
     copy.start = old_nodes_mapping[self.start.id]
     copy.end = old_nodes_mapping[self.end.id]
     copy.end = old_nodes_mapping[self.end.id]
     copy.save()
     copy.save()
+    
+    try:
+      fs.copy_remote_dir(source_deployment_dir, copy.deployment_dir, owner=copy.owner)
+    except WebHdfsException, e:
+      LOG.error('The copy of the deployment directory failed: %s', e)
 
 
     return copy
     return copy
 
 
@@ -875,6 +863,7 @@ class Coordinator(Job):
     copy.pk = None
     copy.pk = None
     copy.id = None
     copy.id = None
     copy.name += '-copy'
     copy.name += '-copy'
+    copy.deployment_dir = ''
     if new_owner is not None:
     if new_owner is not None:
       copy.owner = new_owner
       copy.owner = new_owner
     copy.save()
     copy.save()

+ 71 - 0
apps/oozie/src/oozie/templates/editor/gen/workflow-graph-readonly.xml.mako

@@ -0,0 +1,71 @@
+## Licensed to Cloudera, Inc. under one
+## or more contributor license agreements.  See the NOTICE file
+## distributed with this work for additional information
+## regarding copyright ownership.  Cloudera, Inc. licenses this file
+## to you under the Apache License, Version 2.0 (the
+## "License"); you may not use this file except in compliance
+## with the License.  You may obtain a copy of the License at
+##
+##     http://www.apache.org/licenses/LICENSE-2.0
+##
+## Unless required by applicable law or agreed to in writing, software
+## distributed under the License is distributed on an "AS IS" BASIS,
+## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+## See the License for the specific language governing permissions and
+## limitations under the License.
+
+<%!
+from django.utils.translation import ugettext as _
+%>
+
+<%namespace name="graph" file="workflow-graph.xml.mako" />
+<%namespace name="utils" file="../../utils.inc.mako" />
+
+
+<%def name="print_readonly_node(form)">
+<%
+  node = form.instance.get_full_node()
+%>
+<div class="row-fluid">
+  % if not node.is_visible():
+     <div class="hide">
+         ${ form }
+     </div>
+  % else:
+    <div class="span12 action">
+      % for hidden in form.hidden_fields():
+        ${ hidden }
+      % endfor
+      <div class="row-fluid">
+          % if node.can_edit():
+          <div class="span10 action-link" data-edit="${ node.get_edit_link() }" title="${ _('View') }">
+        % else:
+          <div class="span10">
+        % endif
+            <span class="label label-info"><b>${ node }</b></span>
+        </div>
+          <div class="span2"></div>
+      </div>
+      <div class="row-fluid">
+        % if node.can_edit():
+          <div class="span10 action-link" data-edit="${ node.get_edit_link() }" title="${ _('View') }">
+        % else:
+          <div class="span10">
+        % endif
+
+        <span class="">${ node.node_type }</span>
+
+        <br/>
+        ${ node.description }
+      </div>
+      <div class="span2">
+      </div>
+      </div>
+      <div class="row-fluid">
+      </div>
+    </div>
+  % endif
+</div>
+</%def>
+
+${ graph.display_graph(nodes, print_readonly_node) }

+ 12 - 10
apps/oozie/src/oozie/tests.py

@@ -198,9 +198,8 @@ class TestEditor:
     for node in wf2.node_set.all():
     for node in wf2.node_set.all():
       assert_false(node.id in node_ids)
       assert_false(node.id in node_ids)
 
 
-    raise SkipTest
-    # To Fix
     assert_not_equal(self.wf.deployment_dir, wf2.deployment_dir)
     assert_not_equal(self.wf.deployment_dir, wf2.deployment_dir)
+    assert_not_equal('', wf2.deployment_dir)
 
 
 
 
   def test_clone_action(self):
   def test_clone_action(self):
@@ -484,26 +483,29 @@ class TestEditor:
 
 
     response = self.c.post(reverse('oozie:clone_coordinator', args=[coord.id]), {}, follow=True)
     response = self.c.post(reverse('oozie:clone_coordinator', args=[coord.id]), {}, follow=True)
 
 
-    wf2 = Coordinator.objects.latest('id')
-    assert_not_equal(coord.id, wf2.id)
+    coord2 = Coordinator.objects.latest('id')
+    assert_not_equal(coord.id, coord2.id)
     assert_equal(coordinator_count + 1, Coordinator.objects.count(), response)
     assert_equal(coordinator_count + 1, Coordinator.objects.count(), response)
 
 
-    assert_equal(coord.dataset_set.count(), wf2.dataset_set.count())
-    assert_equal(coord.datainput_set.count(), wf2.datainput_set.count())
-    assert_equal(coord.dataoutput_set.count(), wf2.dataoutput_set.count())
+    assert_equal(coord.dataset_set.count(), coord2.dataset_set.count())
+    assert_equal(coord.datainput_set.count(), coord2.datainput_set.count())
+    assert_equal(coord.dataoutput_set.count(), coord2.dataoutput_set.count())
 
 
     ds_ids = set(coord.dataset_set.values_list('id', flat=True))
     ds_ids = set(coord.dataset_set.values_list('id', flat=True))
-    for node in wf2.dataset_set.all():
+    for node in coord2.dataset_set.all():
       assert_false(node.id in ds_ids)
       assert_false(node.id in ds_ids)
 
 
     data_input_ids = set(coord.datainput_set.values_list('id', flat=True))
     data_input_ids = set(coord.datainput_set.values_list('id', flat=True))
-    for node in wf2.datainput_set.all():
+    for node in coord2.datainput_set.all():
       assert_false(node.id in data_input_ids)
       assert_false(node.id in data_input_ids)
 
 
     data_output_ids = set(coord.dataoutput_set.values_list('id', flat=True))
     data_output_ids = set(coord.dataoutput_set.values_list('id', flat=True))
-    for node in wf2.dataoutput_set.all():
+    for node in coord2.dataoutput_set.all():
       assert_false(node.id in data_output_ids)
       assert_false(node.id in data_output_ids)
 
 
+    assert_not_equal(coord.deployment_dir, coord2.deployment_dir)
+    assert_not_equal('', coord2.deployment_dir)
+
 
 
   def test_coordinator_permissions(self):
   def test_coordinator_permissions(self):
     coord = create_coordinator(self.wf)
     coord = create_coordinator(self.wf)

+ 4 - 3
apps/oozie/src/oozie/views/editor.py

@@ -276,7 +276,7 @@ def clone_workflow(request, workflow):
   if request.method != 'POST':
   if request.method != 'POST':
     raise PopupException(_('A POST request is required.'))
     raise PopupException(_('A POST request is required.'))
 
 
-  clone = workflow.clone(request.user)
+  clone = workflow.clone(request.fs, request.user)
 
 
   response = {'url': reverse('oozie:edit_workflow', kwargs={'workflow': clone.id})}
   response = {'url': reverse('oozie:edit_workflow', kwargs={'workflow': clone.id})}
 
 
@@ -343,8 +343,9 @@ def new_action(request, workflow, node_type, parent_action_id):
       'node_type': node_type,
       'node_type': node_type,
       'properties_hint': _STD_PROPERTIES_JSON,
       'properties_hint': _STD_PROPERTIES_JSON,
       'form_url': reverse('oozie:new_action', kwargs={'workflow': workflow.id,
       'form_url': reverse('oozie:new_action', kwargs={'workflow': workflow.id,
-                                                     'node_type': node_type,
-                                                     'parent_action_id': parent_action_id}),
+                                                      'node_type': node_type,
+                                                      'parent_action_id': parent_action_id}),
+      'can_edit_action': True,
     })
     })
 
 
 
 

+ 41 - 1
desktop/libs/hadoop/src/hadoop/fs/test_webhdfs.py

@@ -115,7 +115,6 @@ def test_seek_across_blocks():
   finally:
   finally:
     fs.remove("/fortest-blocks.txt")
     fs.remove("/fortest-blocks.txt")
 
 
-
 @attr('requires_hadoop')
 @attr('requires_hadoop')
 def test_exceptions():
 def test_exceptions():
   """
   """
@@ -134,6 +133,47 @@ def test_exceptions():
   assert_raises(WebHdfsException, f.read)
   assert_raises(WebHdfsException, f.read)
   assert_raises(IOError, fs.open, "/test/doesnotexist.txt")
   assert_raises(IOError, fs.open, "/test/doesnotexist.txt")
 
 
+@attr('requires_hadoop')
+def test_copy_remote_dir():
+  cluster = pseudo_hdfs4.shared_cluster()
+  fs = cluster.fs
+  fs.setuser(cluster.superuser)
+
+  src_dir = '/copy_remote_dir'
+  fs.mkdir(src_dir)
+  f1 = fs.open("/copy_remote_dir/test_one.txt", "w")
+  f1.write("foo")
+  f1.close()
+  f2 = fs.open("/copy_remote_dir/test_two.txt", "w")
+  f2.write("bar")
+  f2.close()
+
+  new_owner = 'testcopy'
+  new_owner_home = '/user/testcopy'
+  new_owner_dir = new_owner_home + '/test-copy'
+  fs.mkdir(new_owner_home)
+  fs.chown(new_owner_home, new_owner, new_owner)
+
+  fs.copy_remote_dir(src_dir, new_owner_dir, dir_mode=0755, owner=new_owner)
+
+  dir_stat = fs.stats(new_owner_dir)
+  assert_equals(new_owner, dir_stat.user)
+  assert_equals(new_owner, dir_stat.group)
+  assert_equals('40755', '%o' % dir_stat.mode)
+
+  src_stat = fs.listdir_stats(src_dir)
+  dest_stat = fs.listdir_stats(new_owner_dir)
+
+  src_names = set([stat.name for stat in src_stat])
+  dest_names = set([stat.name for stat in dest_stat])
+  assert_true(src_names)
+  assert_equals(src_names, dest_names)
+
+  for stat in dest_stat:
+    assert_equals('testcopy', stat.user)
+    assert_equals('testcopy', stat.group)
+    assert_equals('100644', '%o' % stat.mode)
+
 @attr('requires_hadoop')
 @attr('requires_hadoop')
 def test_two_files_open():
 def test_two_files_open():
   """
   """

+ 19 - 0
desktop/libs/hadoop/src/hadoop/fs/webhdfs.py

@@ -419,6 +419,23 @@ class WebHdfs(Hdfs):
       offset += cnt
       offset += cnt
 
 
 
 
+  def copy_remote_dir(self, source, destination, dir_mode=0755, owner=None):
+    if owner is None:
+      owner = self.DEFAULT_USER
+    self.do_as_user(owner, self.mkdir, destination, mode=dir_mode)
+    self.do_as_user(owner, self.chmod, destination, mode=dir_mode) # To remove after HDFS-3491
+
+    for stat in self.listdir_stats(source):
+      source_file = stat.path
+      destination_file = posixpath.join(destination, stat.name)
+      print source_file, destination_file
+      if stat.isDir:
+        self.copy_remote_dir(source_file, destination_file, dir_mode, owner)
+      else:
+        self.copyfile(source_file, destination_file)
+        self.do_as_superuser(self.chown, destination_file, owner, owner)
+
+
   @staticmethod
   @staticmethod
   def urlsplit(url):
   def urlsplit(url):
     return Hdfs.urlsplit(url)
     return Hdfs.urlsplit(url)
@@ -427,6 +444,7 @@ class WebHdfs(Hdfs):
   def get_hdfs_path(self, path):
   def get_hdfs_path(self, path):
     return posixpath.join(self.fs_defaultfs, path.lstrip('/'))
     return posixpath.join(self.fs_defaultfs, path.lstrip('/'))
 
 
+
   def create_home_dir(self, home_path=None):
   def create_home_dir(self, home_path=None):
     if home_path is None:
     if home_path is None:
       home_path = self.get_home_dir()
       home_path = self.get_home_dir()
@@ -447,6 +465,7 @@ class WebHdfs(Hdfs):
       finally:
       finally:
         self.setuser(user)
         self.setuser(user)
 
 
+
   def _invoke_with_redirect(self, method, path, params=None, data=None):
   def _invoke_with_redirect(self, method, path, params=None, data=None):
     """
     """
     Issue a request, and expect a redirect, and then submit the data to
     Issue a request, and expect a redirect, and then submit the data to