소스 검색

HUE-4822 [editor] Provide live-cluster test harness for HS2 editor tests

Jenny Kim 9 년 전
부모
커밋
48a8b61

+ 1 - 0
apps/beeswax/src/beeswax/test_base.py

@@ -59,6 +59,7 @@ LOG = logging.getLogger(__name__)
 def is_hive_on_spark():
   return os.environ.get('ENABLE_HIVE_ON_SPARK', 'false').lower() == 'true'
 
+
 def get_available_execution_engines():
   available_engines = os.environ.get('AVAILABLE_EXECUTION_ENGINES_FOR_TEST', 'mr').lower().split(",")
   if any(engine not in _SUPPORTED_EXECUTION_ENGINES for engine in available_engines):

+ 1 - 5
desktop/libs/notebook/src/notebook/connectors/hiveserver2.py

@@ -298,6 +298,7 @@ class HS2Api(Api):
         if engine == 'mr':
           # Get last task of last job
           logs = self.get_log(notebook, snippet, startFrom=0)
+
           jobs = self.get_jobs(notebook, snippet, logs)
           if jobs:
             last_job_id = jobs[-1].get('name')
@@ -453,11 +454,6 @@ class HS2Api(Api):
       'statement': query.get_query_statement(0),
     }
 
-  def fetch_result_size(self, notebook, snippet):
-    return {
-      'rows': 1000,
-      'size': 1024 * 1024 * 100, # Until HUE-4181
-    }
 
   @query_error_handler
   def export_data_as_hdfs_file(self, snippet, target_file, overwrite):

+ 138 - 1
desktop/libs/notebook/src/notebook/connectors/tests/tests_hiveserver2.py

@@ -19,7 +19,9 @@
 import json
 import logging
 import re
+import time
 
+from nose.plugins.skip import SkipTest
 from nose.tools import assert_equal, assert_true, assert_false
 
 from django.contrib.auth.models import User
@@ -27,13 +29,14 @@ from django.core.urlresolvers import reverse
 
 from desktop.lib.django_test_util import make_logged_in_client
 from desktop.lib.test_utils import add_to_group, grant_access
+from hadoop.pseudo_hdfs4 import is_live_cluster
 
 from notebook.api import _save_notebook
 from notebook.connectors.hiveserver2 import HS2Api
 from notebook.models import make_notebook, Notebook
 
 from beeswax.server import dbms
-from beeswax.test_base import BeeswaxSampleProvider, get_query_server_config
+from beeswax.test_base import BeeswaxSampleProvider, get_query_server_config, is_hive_on_spark
 
 
 LOG = logging.getLogger(__name__)
@@ -486,6 +489,36 @@ class TestHiveserver2ApiWithHadoop(BeeswaxSampleProvider):
     return snippet
 
 
+  def execute_and_wait(self, query_doc, snippet_idx=0, timeout=30.0, wait=1.0):
+      notebook = Notebook(document=query_doc)
+      snippet = self.get_snippet(notebook, snippet_idx=snippet_idx)
+
+      curr = time.time()
+      end = curr + timeout
+      status = 'ready'
+
+      response = self.client.post(reverse('notebook:execute'),
+                                  {'notebook': notebook.get_json(), 'snippet': json.dumps(snippet)})
+      notebook = Notebook(document=query_doc)
+      snippet = self.get_snippet(notebook, snippet_idx=snippet_idx)
+      data = json.loads(response.content)
+      snippet['result']['handle'] = data['handle']
+
+      while status != 'available' and curr <= end:
+        response = self.client.post(reverse('notebook:check_status'),
+                                    {'notebook': notebook.get_json(), 'snippet': json.dumps(snippet)})
+        data = json.loads(response.content)
+        status = data['query_status']['status']
+        snippet['status'] = status
+        time.sleep(wait)
+        curr = time.time()
+
+      if status != 'available':
+        raise Exception('Query failed to complete or return results.')
+
+      return snippet
+
+
   def test_query_with_unicode(self):
     statement = "SELECT * FROM sample_07 WHERE code='validé';"
 
@@ -574,3 +607,107 @@ class TestHiveserver2ApiWithHadoop(BeeswaxSampleProvider):
     assert_equal(['code'], data['headers'])
     assert_true('rows' in data)
     assert_true(len(data['rows']) > 0)
+
+
+  def test_fetch_result_size_mr(self):
+    # Assert that a query with no job will return no rows or size
+    statement = "SELECT 'hello world';"
+
+    settings = [
+        {
+            'key': 'hive.execution.engine',
+            'value': 'mr'
+        }
+    ]
+    doc = self.create_query_document(owner=self.user, statement=statement, settings=settings)
+    notebook = Notebook(document=doc)
+    snippet = self.execute_and_wait(doc, snippet_idx=0)
+
+    response = self.client.post(reverse('notebook:fetch_result_size'),
+                                {'notebook': notebook.get_json(), 'snippet': json.dumps(snippet)})
+
+    data = json.loads(response.content)
+    assert_equal(0, data['status'], data)
+    assert_true('result' in data)
+    assert_true('rows' in data['result'])
+    assert_true('size' in data['result'])
+    assert_equal(None, data['result']['rows'])
+    assert_equal(None, data['result']['size'])
+
+    # Assert that a query with map & reduce task returns rows
+    statement = "SELECT DISTINCT code FROM sample_07;"
+    doc = self.create_query_document(owner=self.user, statement=statement, settings=settings)
+    notebook = Notebook(document=doc)
+    snippet = self.execute_and_wait(doc, snippet_idx=0, timeout=60.0, wait=2.0)
+
+    response = self.client.post(reverse('notebook:fetch_result_size'),
+                                {'notebook': notebook.get_json(), 'snippet': json.dumps(snippet)})
+
+    data = json.loads(response.content)
+    assert_equal(0, data['status'], data)
+    assert_true('result' in data)
+    assert_true('rows' in data['result'])
+    assert_true(data['result']['rows'] > 0)
+
+    # Assert that a query with multiple jobs returns rows
+    statement = "SELECT app, COUNT(1) AS count FROM web_logs GROUP BY app ORDER BY count DESC;"
+    doc = self.create_query_document(owner=self.user, statement=statement, settings=settings)
+    notebook = Notebook(document=doc)
+    snippet = self.execute_and_wait(doc, snippet_idx=0, timeout=60.0, wait=2.0)
+
+    response = self.client.post(reverse('notebook:fetch_result_size'),
+                                {'notebook': notebook.get_json(), 'snippet': json.dumps(snippet)})
+
+    data = json.loads(response.content)
+    assert_equal(0, data['status'], data)
+    assert_true('result' in data)
+    assert_true('rows' in data['result'])
+    assert_true(data['result']['rows'] > 0)
+
+
+  def test_fetch_result_size_spark(self):
+    if not is_live_cluster() or not is_hive_on_spark():
+      raise SkipTest
+
+    # TODO: Add session cleanup here so we don't have orphan spark sessions
+
+    # Assert that a query with no job will return no rows or size
+    statement = "SELECT 'hello world';"
+
+    settings = [
+        {
+            'key': 'hive.execution.engine',
+            'value': 'spark'
+        }
+    ]
+    doc = self.create_query_document(owner=self.user, statement=statement, settings=settings)
+    notebook = Notebook(document=doc)
+    snippet = self.execute_and_wait(doc, snippet_idx=0)
+
+    response = self.client.post(reverse('notebook:fetch_result_size'),
+                                {'notebook': notebook.get_json(), 'snippet': json.dumps(snippet)})
+
+    data = json.loads(response.content)
+    assert_equal(0, data['status'], data)
+    assert_true('result' in data)
+    assert_true('rows' in data['result'])
+    assert_true('size' in data['result'])
+    assert_equal(None, data['result']['rows'])
+    assert_equal(None, data['result']['size'])
+
+    # Assert that a query that runs a job will return rows and size
+    statement = "SELECT app, COUNT(1) AS count FROM web_logs GROUP BY app ORDER BY count DESC;"
+    doc = self.create_query_document(owner=self.user, statement=statement, settings=settings)
+    notebook = Notebook(document=doc)
+    snippet = self.execute_and_wait(doc, snippet_idx=0, timeout=60.0, wait=2.0)
+
+    response = self.client.post(reverse('notebook:fetch_result_size'),
+                                {'notebook': notebook.get_json(), 'snippet': json.dumps(snippet)})
+
+    data = json.loads(response.content)
+    assert_equal(0, data['status'], data)
+    assert_true('result' in data)
+    assert_true('rows' in data['result'])
+    assert_true('size' in data['result'])
+    assert_true(data['result']['rows'] > 0)
+    assert_true(data['result']['size'] > 0)