submittion2_tests.py 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464
  1. #!/usr/bin/env python
  2. # Licensed to Cloudera, Inc. under one
  3. # or more contributor license agreements. See the NOTICE file
  4. # distributed with this work for additional information
  5. # regarding copyright ownership. Cloudera, Inc. licenses this file
  6. # to you under the Apache License, Version 2.0 (the
  7. # "License"); you may not use this file except in compliance
  8. # with the License. You may obtain a copy of the License at
  9. #
  10. # http://www.apache.org/licenses/LICENSE-2.0
  11. #
  12. # Unless required by applicable law or agreed to in writing, software
  13. # distributed under the License is distributed on an "AS IS" BASIS,
  14. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. # See the License for the specific language governing permissions and
  16. # limitations under the License.
  17. from __future__ import print_function
  18. from builtins import object
  19. import logging
  20. from nose.plugins.attrib import attr
  21. from nose.tools import assert_equal, assert_true, assert_not_equal, assert_raises
  22. import beeswax
  23. from hadoop import cluster, pseudo_hdfs4
  24. from hadoop.conf import HDFS_CLUSTERS, MR_CLUSTERS, YARN_CLUSTERS
  25. from desktop.conf import ENABLE_ORGANIZATIONS
  26. from desktop.lib.test_utils import clear_sys_caches
  27. from desktop.lib.django_test_util import make_logged_in_client
  28. from desktop.lib.exceptions_renderable import PopupException
  29. from useradmin.views import ensure_home_directory
  30. from oozie.models2 import Node
  31. from oozie.tests import OozieMockBase
  32. from liboozie.conf import USE_LIBPATH_FOR_JARS
  33. from liboozie.credentials import Credentials
  34. from liboozie.credentials_tests import TestCredentials
  35. from liboozie.submission2 import Submission
  36. if ENABLE_ORGANIZATIONS.get():
  37. from useradmin.models2 import OrganizationUser as User
  38. else:
  39. from django.contrib.auth.models import User
  40. LOG = logging.getLogger(__name__)
  41. @attr('integration')
  42. @attr('requires_hadoop')
  43. def test_copy_files():
  44. cluster = pseudo_hdfs4.shared_cluster()
  45. try:
  46. c = make_logged_in_client()
  47. user = User.objects.get(username='test')
  48. ensure_home_directory(cluster.fs, user)
  49. prefix = '/tmp/test_copy_files'
  50. if cluster.fs.exists(prefix):
  51. cluster.fs.rmtree(prefix)
  52. # Jars in various locations
  53. deployment_dir = '%s/workspace' % prefix
  54. external_deployment_dir = '%s/deployment' % prefix
  55. jar_1 = '%s/udf1.jar' % prefix
  56. jar_2 = '%s/lib/udf2.jar' % prefix
  57. jar_3 = '%s/udf3.jar' % deployment_dir
  58. jar_4 = '%s/lib/udf4.jar' % deployment_dir # Doesn't move
  59. jar_5 = 'udf5.jar'
  60. jar_6 = 'lib/udf6.jar' # Doesn't move
  61. cluster.fs.mkdir(prefix)
  62. cluster.fs.create(jar_1)
  63. cluster.fs.create(jar_2)
  64. cluster.fs.create(jar_3)
  65. cluster.fs.create(jar_4)
  66. cluster.fs.create(deployment_dir + '/' + jar_5)
  67. cluster.fs.create(deployment_dir + '/' + jar_6)
  68. class MockJob(object):
  69. XML_FILE_NAME = 'workflow.xml'
  70. def __init__(self):
  71. self.deployment_dir = deployment_dir
  72. self.nodes = [
  73. Node({'id': '1', 'type': 'mapreduce', 'properties': {'jar_path': jar_1}}),
  74. Node({'id': '2', 'type': 'mapreduce', 'properties': {'jar_path': jar_2}}),
  75. Node({'id': '3', 'type': 'java', 'properties': {'jar_path': jar_3}}),
  76. Node({'id': '4', 'type': 'java', 'properties': {'jar_path': jar_4}}),
  77. # Workspace relative paths
  78. Node({'id': '5', 'type': 'java', 'properties': {'jar_path': jar_5}}),
  79. Node({'id': '6', 'type': 'java', 'properties': {'jar_path': jar_6}})
  80. ]
  81. submission = Submission(user, job=MockJob(), fs=cluster.fs, jt=cluster.jt)
  82. submission._copy_files(deployment_dir, "<xml>My XML</xml>", {'prop1': 'val1'})
  83. submission._copy_files(external_deployment_dir, "<xml>My XML</xml>", {'prop1': 'val1'})
  84. assert_true(cluster.fs.exists(deployment_dir + '/workflow.xml'), deployment_dir)
  85. assert_true(cluster.fs.exists(deployment_dir + '/job.properties'), deployment_dir)
  86. # All sources still there
  87. assert_true(cluster.fs.exists(jar_1))
  88. assert_true(cluster.fs.exists(jar_2))
  89. assert_true(cluster.fs.exists(jar_3))
  90. assert_true(cluster.fs.exists(jar_4))
  91. assert_true(cluster.fs.exists(deployment_dir + '/' + jar_5))
  92. assert_true(cluster.fs.exists(deployment_dir + '/' + jar_6))
  93. # Lib
  94. deployment_dir = deployment_dir + '/lib'
  95. external_deployment_dir = external_deployment_dir + '/lib'
  96. if USE_LIBPATH_FOR_JARS.get():
  97. assert_true(jar_1 in submission.properties['oozie.libpath'])
  98. assert_true(jar_2 in submission.properties['oozie.libpath'])
  99. assert_true(jar_3 in submission.properties['oozie.libpath'])
  100. assert_true(jar_4 in submission.properties['oozie.libpath'])
  101. print(deployment_dir + '/' + jar_5)
  102. assert_true((deployment_dir + '/' + jar_5) in submission.properties['oozie.libpath'], submission.properties['oozie.libpath'])
  103. assert_true((deployment_dir + '/' + jar_6) in submission.properties['oozie.libpath'], submission.properties['oozie.libpath'])
  104. else:
  105. list_dir_workspace = cluster.fs.listdir(deployment_dir)
  106. list_dir_deployement = cluster.fs.listdir(external_deployment_dir)
  107. # All destinations there
  108. assert_true(cluster.fs.exists(deployment_dir + '/udf1.jar'), list_dir_workspace)
  109. assert_true(cluster.fs.exists(deployment_dir + '/udf2.jar'), list_dir_workspace)
  110. assert_true(cluster.fs.exists(deployment_dir + '/udf3.jar'), list_dir_workspace)
  111. assert_true(cluster.fs.exists(deployment_dir + '/udf4.jar'), list_dir_workspace)
  112. assert_true(cluster.fs.exists(deployment_dir + '/udf5.jar'), list_dir_workspace)
  113. assert_true(cluster.fs.exists(deployment_dir + '/udf6.jar'), list_dir_workspace)
  114. assert_true(cluster.fs.exists(external_deployment_dir + '/udf1.jar'), list_dir_deployement)
  115. assert_true(cluster.fs.exists(external_deployment_dir + '/udf2.jar'), list_dir_deployement)
  116. assert_true(cluster.fs.exists(external_deployment_dir + '/udf3.jar'), list_dir_deployement)
  117. assert_true(cluster.fs.exists(external_deployment_dir + '/udf4.jar'), list_dir_deployement)
  118. assert_true(cluster.fs.exists(external_deployment_dir + '/udf5.jar'), list_dir_deployement)
  119. assert_true(cluster.fs.exists(external_deployment_dir + '/udf6.jar'), list_dir_deployement)
  120. stats_udf1 = cluster.fs.stats(deployment_dir + '/udf1.jar')
  121. stats_udf2 = cluster.fs.stats(deployment_dir + '/udf2.jar')
  122. stats_udf3 = cluster.fs.stats(deployment_dir + '/udf3.jar')
  123. stats_udf4 = cluster.fs.stats(deployment_dir + '/udf4.jar')
  124. stats_udf5 = cluster.fs.stats(deployment_dir + '/udf5.jar')
  125. stats_udf6 = cluster.fs.stats(deployment_dir + '/udf6.jar')
  126. submission._copy_files('%s/workspace' % prefix, "<xml>My XML</xml>", {'prop1': 'val1'})
  127. assert_not_equal(stats_udf1['fileId'], cluster.fs.stats(deployment_dir + '/udf1.jar')['fileId'])
  128. assert_not_equal(stats_udf2['fileId'], cluster.fs.stats(deployment_dir + '/udf2.jar')['fileId'])
  129. assert_not_equal(stats_udf3['fileId'], cluster.fs.stats(deployment_dir + '/udf3.jar')['fileId'])
  130. assert_equal(stats_udf4['fileId'], cluster.fs.stats(deployment_dir + '/udf4.jar')['fileId'])
  131. assert_not_equal(stats_udf5['fileId'], cluster.fs.stats(deployment_dir + '/udf5.jar')['fileId'])
  132. assert_equal(stats_udf6['fileId'], cluster.fs.stats(deployment_dir + '/udf6.jar')['fileId'])
  133. # Test _create_file()
  134. submission._create_file(deployment_dir, 'test.txt', data='Test data')
  135. assert_true(cluster.fs.exists(deployment_dir + '/test.txt'), list_dir_workspace)
  136. finally:
  137. try:
  138. cluster.fs.rmtree(prefix)
  139. except:
  140. LOG.exception('failed to remove %s' % prefix)
  141. class MockFs(object):
  142. def __init__(self, logical_name=None):
  143. self.fs_defaultfs = 'hdfs://curacao:8020'
  144. self.logical_name = logical_name if logical_name else ''
  145. class MockJt(object):
  146. def __init__(self, logical_name=None):
  147. self.logical_name = logical_name if logical_name else ''
  148. class TestSubmission(OozieMockBase):
  149. def test_get_properties(self):
  150. submission = Submission(self.user, fs=MockFs())
  151. assert_equal({'security_enabled': False}, submission.properties)
  152. submission._update_properties('curacao:8032', '/deployment_dir')
  153. assert_equal({
  154. 'jobTracker': 'curacao:8032',
  155. 'nameNode': 'hdfs://curacao:8020',
  156. 'security_enabled': False
  157. }, submission.properties)
  158. def test_get_logical_properties(self):
  159. submission = Submission(self.user, fs=MockFs(logical_name='fsname'), jt=MockJt(logical_name='jtname'))
  160. assert_equal({'security_enabled': False}, submission.properties)
  161. submission._update_properties('curacao:8032', '/deployment_dir')
  162. assert_equal({
  163. 'jobTracker': 'jtname',
  164. 'nameNode': 'fsname',
  165. 'security_enabled': False
  166. }, submission.properties)
  167. def test_update_properties(self):
  168. finish = []
  169. finish.append(MR_CLUSTERS.set_for_testing({'default': {}}))
  170. finish.append(MR_CLUSTERS['default'].SUBMIT_TO.set_for_testing(True))
  171. finish.append(YARN_CLUSTERS.set_for_testing({'default': {}}))
  172. finish.append(YARN_CLUSTERS['default'].SUBMIT_TO.set_for_testing(True))
  173. try:
  174. properties = {
  175. 'user.name': 'hue',
  176. 'test.1': 'http://localhost/test?test1=test&test2=test',
  177. 'nameNode': 'hdfs://curacao:8020',
  178. 'jobTracker': 'jtaddress',
  179. 'security_enabled': False
  180. }
  181. final_properties = properties.copy()
  182. submission = Submission(None, properties=properties, oozie_id='test', fs=MockFs())
  183. assert_equal(properties, submission.properties)
  184. submission._update_properties('jtaddress', 'deployment-directory')
  185. assert_equal(final_properties, submission.properties)
  186. clear_sys_caches()
  187. fs = cluster.get_hdfs()
  188. final_properties = properties.copy()
  189. final_properties.update({
  190. 'jobTracker': 'jtaddress',
  191. 'nameNode': fs.fs_defaultfs
  192. })
  193. submission = Submission(None, properties=properties, oozie_id='test', fs=fs, jt=None)
  194. assert_equal(properties, submission.properties)
  195. submission._update_properties('jtaddress', 'deployment-directory')
  196. assert_equal(final_properties, submission.properties)
  197. finish.append(HDFS_CLUSTERS['default'].LOGICAL_NAME.set_for_testing('namenode'))
  198. finish.append(MR_CLUSTERS['default'].LOGICAL_NAME.set_for_testing('jobtracker'))
  199. clear_sys_caches()
  200. fs = cluster.get_hdfs()
  201. final_properties = properties.copy()
  202. final_properties.update({
  203. 'jobTracker': 'jobtracker',
  204. 'nameNode': 'namenode'
  205. })
  206. submission = Submission(None, properties=properties, oozie_id='test', fs=fs, jt=None)
  207. assert_equal(properties, submission.properties)
  208. finally:
  209. clear_sys_caches()
  210. for reset in finish:
  211. reset()
  212. def test_get_external_parameters(self):
  213. xml = """
  214. <workflow-app name="Pig" xmlns="uri:oozie:workflow:0.4">
  215. <start to="Pig"/>
  216. <action name="Pig">
  217. <pig>
  218. <job-tracker>${jobTracker}</job-tracker>
  219. <name-node>${nameNode}</name-node>
  220. <prepare>
  221. <delete path="${output}"/>
  222. </prepare>
  223. <script>aggregate.pig</script>
  224. <argument>-param</argument>
  225. <argument>INPUT=${input}</argument>
  226. <argument>-param</argument>
  227. <argument>OUTPUT=${output}</argument>
  228. <configuration>
  229. <property>
  230. <name>mapred.input.format.class</name>
  231. <value>org.apache.hadoop.examples.SleepJob$SleepInputFormat</value>
  232. </property>
  233. </configuration>
  234. </pig>
  235. <ok to="end"/>
  236. <error to="kill"/>
  237. </action>
  238. <kill name="kill">
  239. <message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
  240. </kill>
  241. <end name="end"/>
  242. </workflow-app>
  243. """
  244. properties = """
  245. #
  246. # Licensed to the Hue
  247. #
  248. nameNode=hdfs://localhost:8020
  249. jobTracker=localhost:8021
  250. queueName=default
  251. examplesRoot=examples
  252. oozie.use.system.libpath=true
  253. oozie.wf.application.path=${nameNode}/user/${user.name}/${examplesRoot}/apps/pig
  254. """
  255. parameters = Submission(self.user)._get_external_parameters(xml, properties)
  256. assert_equal({'oozie.use.system.libpath': 'true',
  257. 'input': '',
  258. 'jobTracker': 'localhost:8021',
  259. 'oozie.wf.application.path': '${nameNode}/user/${user.name}/${examplesRoot}/apps/pig',
  260. 'examplesRoot': 'examples',
  261. 'output': '',
  262. 'nameNode': 'hdfs://localhost:8020',
  263. 'queueName': 'default'
  264. },
  265. parameters)
  266. def test_update_credentials_from_hive_action(self):
  267. class TestJob(object):
  268. XML_FILE_NAME = 'workflow.xml'
  269. def __init__(self):
  270. self.deployment_dir = '/tmp/test'
  271. self.nodes = [
  272. Node({'id': '1', 'type': 'hive-document', 'properties': {'jdbc_url': u'jdbc:hive2://test-replace-url:12345/default', 'password': u'test'}})
  273. ]
  274. user = User.objects.get(username='test')
  275. submission = Submission(user, job=TestJob(), fs=MockFs(logical_name='fsname'), jt=MockJt(logical_name='jtname'))
  276. finish = (
  277. beeswax.conf.HIVE_SERVER_HOST.set_for_testing('hue-koh-chang'),
  278. beeswax.conf.HIVE_SERVER_PORT.set_for_testing(12345),
  279. )
  280. try:
  281. creds = Credentials(credentials=TestCredentials.CREDENTIALS.copy())
  282. hive_properties = {
  283. 'thrift_uri': 'thrift://first-url:9999',
  284. 'kerberos_principal': 'hive',
  285. 'hive2.server.principal': 'hive/hive2_host@test-realm.com',
  286. }
  287. submission.properties['credentials'] = creds.get_properties(hive_properties)
  288. submission._update_credentials_from_hive_action(creds)
  289. assert_equal(submission.properties['credentials'][creds.hiveserver2_name]['properties'], [
  290. ('hive2.jdbc.url', u'jdbc:hive2://test-replace-url:12345/default'),
  291. ('hive2.server.principal', u'hive/test-replace-url@test-realm.com')
  292. ]
  293. )
  294. # Test parsing failure
  295. hive_properties = {
  296. 'thrift_uri': 'thrift://first-url:9999',
  297. 'kerberos_principal': 'hive',
  298. 'hive2.server.principal': 'hive',
  299. }
  300. submission.properties['credentials'] = creds.get_properties(hive_properties)
  301. assert_raises(PopupException, submission._update_credentials_from_hive_action, creds)
  302. finally:
  303. for f in finish:
  304. f()
  305. def test_update_credentials_from_hive_action_when_jdbc_url_is_variable(self):
  306. class TestJob(object):
  307. XML_FILE_NAME = 'workflow.xml'
  308. def __init__(self):
  309. self.deployment_dir = '/tmp/test'
  310. self.nodes = [
  311. Node({'id': '1', 'type': 'hive-document', 'properties': {'jdbc_url': u"${wf:actionData('shell-31b5')['hiveserver']}", 'password': u'test'}})
  312. ]
  313. user = User.objects.get(username='test')
  314. submission = Submission(user, job=TestJob(), fs=MockFs(logical_name='fsname'), jt=MockJt(logical_name='jtname'))
  315. finish = (
  316. beeswax.conf.HIVE_SERVER_HOST.set_for_testing('hue-koh-chang'),
  317. beeswax.conf.HIVE_SERVER_PORT.set_for_testing(12345),
  318. )
  319. try:
  320. creds = Credentials(credentials=TestCredentials.CREDENTIALS.copy())
  321. hive_properties = {
  322. 'thrift_uri': 'thrift://first-url:9999',
  323. 'kerberos_principal': 'hive',
  324. 'hive2.server.principal': 'hive/hive2_host@test-realm.com',
  325. }
  326. submission.properties['credentials'] = creds.get_properties(hive_properties)
  327. submission._update_credentials_from_hive_action(creds)
  328. assert_equal(submission.properties['credentials'][creds.hiveserver2_name]['properties'], [
  329. ('hive2.jdbc.url', u'jdbc:hive2://hue-koh-chang:12345/default'),
  330. ('hive2.server.principal', u'hive/hive2_host@test-realm.com')
  331. ]
  332. )
  333. finally:
  334. for f in finish:
  335. f()
  336. def test_generate_altus_action_start_cluster(self):
  337. class TestJob(object):
  338. XML_FILE_NAME = 'workflow.xml'
  339. def __init__(self):
  340. self.deployment_dir = '/tmp/test'
  341. self.nodes = [
  342. Node({'id': '1', 'type': 'hive-document', 'properties': {'jdbc_url': u"${wf:actionData('shell-31b5')['hiveserver']}", 'password': u'test'}})
  343. ]
  344. user = User.objects.get(username='test')
  345. submission = Submission(user, job=TestJob(), fs=MockFs(logical_name='fsname'), jt=MockJt(logical_name='jtname'))
  346. command = submission._generate_altus_action_script(
  347. service='dataeng',
  348. command='listClusters',
  349. arguments={},
  350. auth_key_id='altus_auth_key_id',
  351. auth_key_secret='altus_auth_key_secret'
  352. )
  353. assert_true('''#!/usr/bin/env python
  354. from navoptapi.api_lib import ApiLib
  355. hostname = 'dataengapi.us-west-1.altus.cloudera.com'
  356. auth_key_id = 'altus_auth_key_id'
  357. auth_key_secret = \'\'\'altus_auth_key_secret\'\'\'
  358. def _exec(service, command, parameters=None):
  359. if parameters is None:
  360. parameters = {}
  361. try:
  362. api = ApiLib(service, hostname, auth_key_id, auth_key_secret)
  363. resp = api.call_api(command, parameters)
  364. return resp.json()
  365. except Exception, e:
  366. print e
  367. raise e
  368. print _exec('dataeng', 'listClusters', {})
  369. ''' in command,
  370. command
  371. )