Browse Source

HUE-1817 [beeswax] Add testing helper for the new API

Testing helpers are changed to work with the new API.

1.
Run query until end (will get 'error' if basic error, e.g. SELECT BAHHH)
    response = _make_query(self.client, QUERY, name='select star', local=False)
    response = wait_for_query_to_finish(self.client, response)

Will get 'error' if error (e.g. create table that already exist) or statuses from watch_query_refresh_json() API.

2.
Fetch result
    content = fetch_query_result_data(self.client, response)
Romain Rigaux 12 years ago
parent
commit
174185165b

+ 3 - 2
apps/beeswax/src/beeswax/api.py

@@ -139,7 +139,7 @@ def watch_query_refresh_json(request, id):
   handle, state = _get_query_handle_and_state(query_history)
   handle, state = _get_query_handle_and_state(query_history)
   query_history.save_state(state)
   query_history.save_state(state)
 
 
-  # Show popup message if error, might be better in error tab instead
+  # Show popup message if error, should be better in error tab instead and merged into the result response below
   if query_history.is_failure():
   if query_history.is_failure():
     res = db.get_operation_status(handle)
     res = db.get_operation_status(handle)
     if hasattr(res, 'errorMessage') and res.errorMessage:
     if hasattr(res, 'errorMessage') and res.errorMessage:
@@ -170,7 +170,8 @@ def watch_query_refresh_json(request, id):
     'jobs': jobs,
     'jobs': jobs,
     'jobUrls': job_urls,
     'jobUrls': job_urls,
     'isSuccess': query_history.is_finished() or (query_history.is_success() and query_history.has_results),
     'isSuccess': query_history.is_finished() or (query_history.is_success() and query_history.has_results),
-    'isFailure': query_history.is_failure()
+    'isFailure': query_history.is_failure(),
+    'id': id
   }
   }
 
 
   return HttpResponse(json.dumps(result), mimetype="application/json")
   return HttpResponse(json.dumps(result), mimetype="application/json")

+ 35 - 20
apps/beeswax/src/beeswax/test_base.py

@@ -18,9 +18,9 @@
 #
 #
 
 
 import atexit
 import atexit
+import json
 import logging
 import logging
 import os
 import os
-import re
 import subprocess
 import subprocess
 import time
 import time
 
 
@@ -39,7 +39,7 @@ import beeswax.conf
 
 
 from beeswax.server.dbms import get_query_server_config
 from beeswax.server.dbms import get_query_server_config
 from beeswax.server import dbms
 from beeswax.server import dbms
- 
+
 
 
 HIVE_SERVER_TEST_PORT = find_unused_port()
 HIVE_SERVER_TEST_PORT = find_unused_port()
 _INITIALIZED = False
 _INITIALIZED = False
@@ -76,7 +76,7 @@ def _start_server(cluster):
        + ':' +
        + ':' +
        get_run_root('ext/hadoop/hadoop') + '/share/hadoop/mapreduce/hadoop-mapreduce-client-core.jar'
        get_run_root('ext/hadoop/hadoop') + '/share/hadoop/mapreduce/hadoop-mapreduce-client-core.jar'
        ,
        ,
-      'HADOOP_CLASSPATH': hadoop_cp,     
+      'HADOOP_CLASSPATH': hadoop_cp,
   })
   })
 
 
   if os.getenv("JAVA_HOME"):
   if os.getenv("JAVA_HOME"):
@@ -173,15 +173,21 @@ def get_shared_beeswax_server():
   return _SHARED_HIVE_SERVER, _SHARED_HIVE_SERVER_CLOSER
   return _SHARED_HIVE_SERVER, _SHARED_HIVE_SERVER_CLOSER
 
 
 
 
-REFRESH_RE = re.compile('<\s*meta\s+http-equiv="refresh"\s+content="\d*;([^"]*)"\s*/>', re.I)
-
-
 def wait_for_query_to_finish(client, response, max=30.0):
 def wait_for_query_to_finish(client, response, max=30.0):
+  # Take a async API execute_query() response in input
   start = time.time()
   start = time.time()
   sleep_time = 0.05
   sleep_time = 0.05
-  # We don't check response.template == "watch_wait.mako" here,
-  # because Django's response.template stuff is not thread-safe.
-  while "Waiting for query..." in response.content:
+
+  if is_finished(response): # aka Has error at submission
+    return response
+
+  content = json.loads(response.content)
+  watch_url = content['watch_url']
+
+  response = client.get(watch_url, follow=True)
+
+  # Loop and check status
+  while not is_finished(response):
     time.sleep(sleep_time)
     time.sleep(sleep_time)
     sleep_time = min(1.0, sleep_time * 2) # Capped exponential
     sleep_time = min(1.0, sleep_time * 2) # Capped exponential
     if (time.time() - start) > max:
     if (time.time() - start) > max:
@@ -189,17 +195,24 @@ def wait_for_query_to_finish(client, response, max=30.0):
       LOG.warning(message)
       LOG.warning(message)
       raise Exception(message)
       raise Exception(message)
 
 
-    # Find out url to retry
-    match = REFRESH_RE.search(response.content)
-    if match is not None:
-      url = match.group(1)
-      url = url.lstrip('url=')
-    else:
-      url = response.request['PATH_INFO']
-    response = client.get(url, follow=True)
+    response = client.get(watch_url, follow=True)
   return response
   return response
 
 
 
 
+def is_finished(response):
+  status = json.loads(response.content)
+  return 'error' in status or status.get('isSuccess') or status.get('isFailure')
+
+
+def fetch_query_result_data(client, status_response):
+  # Take a wait_for_query_to_finish() response in input
+  status = json.loads(status_response.content)
+
+  response = client.get("/beeswax/results/%s/0?format=json" % status.get('id'))
+  content = json.loads(response.content)
+
+  return content
+
 def make_query(client, query, submission_type="Execute",
 def make_query(client, query, submission_type="Execute",
                udfs=None, settings=None, resources=None,
                udfs=None, settings=None, resources=None,
                wait=False, name=None, desc=None, local=True,
                wait=False, name=None, desc=None, local=True,
@@ -256,14 +269,16 @@ def make_query(client, query, submission_type="Execute",
     parameters["file_resources-%d-_exists" % i] = 'True'
     parameters["file_resources-%d-_exists" % i] = 'True'
 
 
   kwargs.setdefault('follow', True)
   kwargs.setdefault('follow', True)
+  execute_url = reverse("beeswax:api_execute")
 
 
   if submission_type == 'Explain':
   if submission_type == 'Explain':
-    response = client.post(reverse("beeswax:api_execute") + "?explain=true", parameters, **kwargs)
-  else:
-    response = client.post(reverse("beeswax:execute_query"), parameters, **kwargs)
+    execute_url += "?explain=true"
+
+  response = client.post(execute_url, parameters, **kwargs)
 
 
   if wait:
   if wait:
     return wait_for_query_to_finish(client, response, max)
     return wait_for_query_to_finish(client, response, max)
+
   return response
   return response
 
 
 
 

+ 66 - 53
apps/beeswax/src/beeswax/tests.py

@@ -51,7 +51,7 @@ from beeswax import conf, hive_site
 from beeswax.conf import HIVE_SERVER_HOST
 from beeswax.conf import HIVE_SERVER_HOST
 from beeswax.views import collapse_whitespace
 from beeswax.views import collapse_whitespace
 from beeswax.test_base import make_query, wait_for_query_to_finish, verify_history, get_query_server_config,\
 from beeswax.test_base import make_query, wait_for_query_to_finish, verify_history, get_query_server_config,\
-  HIVE_SERVER_TEST_PORT
+  HIVE_SERVER_TEST_PORT, fetch_query_result_data
 from beeswax.design import hql_query, strip_trailing_semicolon
 from beeswax.design import hql_query, strip_trailing_semicolon
 from beeswax.data_export import download
 from beeswax.data_export import download
 from beeswax.models import SavedQuery, QueryHistory, HQL
 from beeswax.models import SavedQuery, QueryHistory, HQL
@@ -65,8 +65,6 @@ from beeswax.hive_site import get_metastore
 
 
 
 
 LOG = logging.getLogger(__name__)
 LOG = logging.getLogger(__name__)
-CSV_LINK_PAT = re.compile('/beeswax/download/\d+/csv')
-
 
 
 def _make_query(client, query, submission_type="Execute",
 def _make_query(client, query, submission_type="Execute",
                 udfs=None, settings=None, resources=[],
                 udfs=None, settings=None, resources=[],
@@ -87,9 +85,10 @@ def _make_query(client, query, submission_type="Execute",
 
 
 def get_csv(client, result_response):
 def get_csv(client, result_response):
   """Get the csv for a query result"""
   """Get the csv for a query result"""
-  csv_link = CSV_LINK_PAT.search(result_response.content)
-  assert_true(csv_link, result_response.content)
-  return client.get(csv_link.group()).content
+  content = json.loads(result_response.content)
+  assert_true(content['isSuccess'])
+  csv_link = '/beeswax/download/%s/csv' % content['id']
+  return client.get(csv_link).content
 
 
 
 
 class TestBeeswaxWithHadoop(BeeswaxSampleProvider):
 class TestBeeswaxWithHadoop(BeeswaxSampleProvider):
@@ -112,12 +111,11 @@ class TestBeeswaxWithHadoop(BeeswaxSampleProvider):
     return history.id
     return history.id
 
 
   def test_query_with_error(self):
   def test_query_with_error(self):
-    """
-    Creating a table "again" should not work; error should be displayed.
-    """
+    # Creating a table "again" should not work; error should be displayed.
     response = _make_query(self.client, "CREATE TABLE test (foo INT)", wait=True)
     response = _make_query(self.client, "CREATE TABLE test (foo INT)", wait=True)
-    assert_true("Table test already exists" in response.content)
-    assert_true("Table test already exists" in response.context["error_message"])
+    content = json.loads(response.content)
+    assert_true("AlreadyExistsException" in content.get('error'), content)
+    assert_true("Table test already exists" in content.get('message'), content)
 
 
   def test_configuration(self):
   def test_configuration(self):
     # No HS2 API
     # No HS2 API
@@ -153,7 +151,8 @@ for x in sys.stdin:
       "SELECT TRANSFORM (foo) USING 'python square.py' AS b FROM test",
       "SELECT TRANSFORM (foo) USING 'python square.py' AS b FROM test",
       resources=[("FILE", "/square.py")], local=False)
       resources=[("FILE", "/square.py")], local=False)
     response = wait_for_query_to_finish(self.client, response, max=180.0)
     response = wait_for_query_to_finish(self.client, response, max=180.0)
-    assert_equal([['0'], ['1'], ['4'], ['9']], response.context["results"][0:4])
+    content = fetch_query_result_data(self.client, response)
+    assert_equal([['0'], ['1'], ['4'], ['9']], content["results"][0:4])
 
 
   def test_query_with_setting(self):
   def test_query_with_setting(self):
     response = _make_query(self.client, "CREATE TABLE test2 AS SELECT foo+1 FROM test WHERE foo=4",
     response = _make_query(self.client, "CREATE TABLE test2 AS SELECT foo+1 FROM test WHERE foo=4",
@@ -187,18 +186,18 @@ for x in sys.stdin:
       SELECT MIN(foo), MAX(foo), SUM(foo) FROM test;
       SELECT MIN(foo), MAX(foo), SUM(foo) FROM test;
     """
     """
     response = _make_query(self.client, QUERY, local=False)
     response = _make_query(self.client, QUERY, local=False)
-    assert_true(response.redirect_chain[0][0].startswith("http://testserver/beeswax/watch/"))
+    content = json.loads(response.content)
+    assert_true('watch_url' in content)
     # Check that we report this query as "running". (This query takes a while.)
     # Check that we report this query as "running". (This query takes a while.)
     self._verify_query_state(beeswax.models.QueryHistory.STATE.running)
     self._verify_query_state(beeswax.models.QueryHistory.STATE.running)
 
 
     response = wait_for_query_to_finish(self.client, response, max=180.0)
     response = wait_for_query_to_finish(self.client, response, max=180.0)
-    assert_equal([0, 255, 32640], response.context["results"][0], response.content)
-    assert_equal(['INT_TYPE', 'INT_TYPE', 'BIGINT_TYPE'], [col.type for col in response.context["columns"]])
-    # Because it happens that we're running this with local mode,
-    # we won't see any hadoop jobs.
-    assert_equal(1, len(response.context["hadoop_jobs"]), response.context["hadoop_jobs"])
-    self._verify_query_state(beeswax.models.QueryHistory.STATE.available)
+    content = fetch_query_result_data(self.client, response)
 
 
+    assert_equal([0, 255, 32640], content["results"][0], content)
+    assert_equal(['INT_TYPE', 'INT_TYPE', 'BIGINT_TYPE'], [col['type'] for col in content["columns"]])
+    assert_equal(0, len(content["hadoop_jobs"]), content) # Should be 1 after HS2 bug is fixed
+    self._verify_query_state(beeswax.models.QueryHistory.STATE.available)
 
 
     # Query multi-page request
     # Query multi-page request
     QUERY = """
     QUERY = """
@@ -206,17 +205,20 @@ for x in sys.stdin:
     """
     """
     response = _make_query(self.client, QUERY, name='select star', local=False)
     response = _make_query(self.client, QUERY, name='select star', local=False)
     response = wait_for_query_to_finish(self.client, response)
     response = wait_for_query_to_finish(self.client, response)
-    assert_equal(str(response.context['query_context'][0]), 'design')
-    assert_true("99" in response.content)
-    assert_true(response.context["has_more"])
-    response = self.client.get("/beeswax/results/%d/%d" % (response.context["query"].id, response.context["next_row"]))
-    assert_true("199" in response.content)
-    response = self.client.get("/beeswax/results/%d/0" % (response.context["query"].id))
-    assert_true("99" in response.content)
-    assert_equal(0, len(response.context["hadoop_jobs"]), "SELECT * shouldn't have started jobs.")
+    content = fetch_query_result_data(self.client, response)
+
+    assert_true([99, u'0x63'] in content['results'], content['results'])
+    assert_true(content["has_more"])
+    response = self.client.get("/beeswax/results/%s/%s?format=json" % (content["id"], content["next_row"]))
+    content = json.loads(response.content)
+    assert_true([199, u'0xc7'] in content['results'], content['results'])
+    response = self.client.get("/beeswax/results/%s/0?format=json" % (content["id"]))
+    content = json.loads(response.content)
+    assert_true([99, u'0x63'] in content['results'])
+    assert_equal(0, len(content["hadoop_jobs"]), "SELECT * shouldn't have started jobs.")
 
 
     # Download the data
     # Download the data
-    response = self.client.get(response.context["download_urls"]["csv"])
+    response = self.client.get(content["download_urls"]["csv"])
     # Header line plus data lines...
     # Header line plus data lines...
     assert_equal(257, response.content.count("\n"))
     assert_equal(257, response.content.count("\n"))
 
 
@@ -228,12 +230,14 @@ for x in sys.stdin:
       udfs=[('my_sqrt', 'org.apache.hadoop.hive.ql.udf.UDFSqrt'),
       udfs=[('my_sqrt', 'org.apache.hadoop.hive.ql.udf.UDFSqrt'),
             ('my_power', 'org.apache.hadoop.hive.ql.udf.UDFPower')], local=False)
             ('my_power', 'org.apache.hadoop.hive.ql.udf.UDFPower')], local=False)
     response = wait_for_query_to_finish(self.client, response, max=60.0)
     response = wait_for_query_to_finish(self.client, response, max=60.0)
-    assert_equal([2.0, 256.0], response.context["results"][0])
-    log = response.context['log']
+    content = fetch_query_result_data(self.client, response)
+
+    assert_equal([2.0, 256.0], content["results"][0])
+    log = content['log']
     assert_true(search_log_line('ql.Driver', 'Total MapReduce jobs', log), 'Captured log from Driver in %s' % log)
     assert_true(search_log_line('ql.Driver', 'Total MapReduce jobs', log), 'Captured log from Driver in %s' % log)
     assert_true(search_log_line('exec.Task', 'Starting Job = job_', log), 'Captured log from MapRedTask in %s' % log)
     assert_true(search_log_line('exec.Task', 'Starting Job = job_', log), 'Captured log from MapRedTask in %s' % log)
     # Test job extraction while we're at it
     # Test job extraction while we're at it
-    assert_equal(1, len(response.context["hadoop_jobs"]), "Should have started 1 job and extracted it.")
+    assert_equal(1, len(content["hadoop_jobs"]), "Should have started 1 job and extracted it.")
 
 
   def test_query_with_remote_udf(self):
   def test_query_with_remote_udf(self):
     """
     """
@@ -258,11 +262,8 @@ for x in sys.stdin:
   def test_query_with_simple_errors(self):
   def test_query_with_simple_errors(self):
     hql = "SELECT KITTENS ARE TASTY"
     hql = "SELECT KITTENS ARE TASTY"
     resp = _make_query(self.client, hql, name='tasty kittens', wait=True, local=False)
     resp = _make_query(self.client, hql, name='tasty kittens', wait=True, local=False)
-    assert_true("ParseException" in resp.content, resp.content)
-    page_context = [context for context in resp.context if 'log' in context][0]
-    log = page_context['log']
-    # No logs as operationHandle=None
-    assert_equal('', log, log)
+    assert_true("ParseException line" in json.loads(resp.content)["error"])
+    assert_true("ParseException line" in json.loads(resp.content)["message"])
 
 
     # Watch page will fail as operationHandle=None
     # Watch page will fail as operationHandle=None
     query_id = self._verify_query_state(beeswax.models.QueryHistory.STATE.failed)
     query_id = self._verify_query_state(beeswax.models.QueryHistory.STATE.failed)
@@ -342,17 +343,23 @@ for x in sys.stdin:
 
 
 
 
   def test_parameterization(self):
   def test_parameterization(self):
-    """
-    Test parameterization
-    """
     response = _make_query(self.client, "SELECT foo FROM test WHERE foo='$x' and bar='$y'", is_parameterized=False)
     response = _make_query(self.client, "SELECT foo FROM test WHERE foo='$x' and bar='$y'", is_parameterized=False)
+    content = json.loads(response.content)
     # Assert no parameterization was offered
     # Assert no parameterization was offered
-    assert_true(any(["watch_wait.mako" in _template.filename for _template in response.template]), "we should have seen the template for a query executing")
+    assert_true('watch_url' in content, content)
 
 
-    response = _make_query(self.client, "SELECT foo FROM test WHERE foo='$x' and bar='$y'")
-    assert_true("parameterization.mako", response.template)
-    assert_true(["x", "y"], response.context["form"].fields.keys())
-    design_id = response.context["design"].id
+    data = {
+      'query-query': "SELECT foo FROM test WHERE foo='$x' and bar='$y'",
+      'query-database': "default"
+    }
+    response = self.client.post(reverse('beeswax:api_parameters'), data)
+    assert_equal([], content['parameters'], content)
+
+    response = _make_query(self.client, "SELECT foo FROM test WHERE foo='$x' and bar='$y'") # Save?
+    content = json.loads(response.content)
+    design_id = content['id']
+
+    # todo below
 
 
     # Don't fill out the form
     # Don't fill out the form
     response = self.client.post("/beeswax/execute_parameterized/%d" % design_id)
     response = self.client.post("/beeswax/execute_parameterized/%d" % design_id)
@@ -450,7 +457,12 @@ for x in sys.stdin:
     resp = _make_query(self.client, hql)
     resp = _make_query(self.client, hql)
     resp = wait_for_query_to_finish(self.client, resp, max=30.0)
     resp = wait_for_query_to_finish(self.client, resp, max=30.0)
 
 
-    assert_true('DROP TABLE test_multiple_statements_2' in resp.content, resp.content)
+    content = json.loads(resp.content)
+    design_id = content['id']
+
+    resp = self.client.get("/beeswax/results/%s/0?format=json" % design_id)
+    content = json.loads(resp.content)
+    assert_true('DROP TABLE test_multiple_statements_2' in content, content) # HUE-1843
 
 
   def test_multiple_statements_with_result_set(self):
   def test_multiple_statements_with_result_set(self):
     hql = """
     hql = """
@@ -464,7 +476,7 @@ for x in sys.stdin:
     handle = self.db.execute_and_wait(query)
     handle = self.db.execute_and_wait(query)
     resp = wait_for_query_to_finish(self.client, resp, max=30.0)
     resp = wait_for_query_to_finish(self.client, resp, max=30.0)
 
 
-    assert_true('multiStatementsQuery' in resp.content, resp.content)
+    assert_true('multiStatementsQuery' in resp.content, resp.content) # HUE-1843
 
 
     resp = self.client.post(reverse('beeswax:watch_query', args=[resp.context['query'].id]))
     resp = self.client.post(reverse('beeswax:watch_query', args=[resp.context['query'].id]))
     assert_true('Waiting for query' in resp.content, resp.content)
     assert_true('Waiting for query' in resp.content, resp.content)
@@ -483,7 +495,7 @@ for x in sys.stdin:
     handle = self.db.execute_and_wait(query)
     handle = self.db.execute_and_wait(query)
     resp = wait_for_query_to_finish(self.client, resp, max=30.0)
     resp = wait_for_query_to_finish(self.client, resp, max=30.0)
 
 
-    assert_true('SELECT foo FROM test' in resp.content, resp.content)
+    assert_true('SELECT foo FROM test' in resp.content, resp.content) # HUE-1843
 
 
   def test_parallel_queries(self):
   def test_parallel_queries(self):
     """
     """
@@ -711,13 +723,12 @@ for x in sys.stdin:
 
 
 
 
   def test_my_queries(self):
   def test_my_queries(self):
-    """Test the "My Queries" page"""
     # Explicit save a design
     # Explicit save a design
     _make_query(self.client, "select noHQL", name='my rubbish kuery', submission_type='Save')
     _make_query(self.client, "select noHQL", name='my rubbish kuery', submission_type='Save')
     # Run something
     # Run something
     _make_query(self.client, "Even More Bogus Junk")
     _make_query(self.client, "Even More Bogus Junk")
     resp = self.client.get('/beeswax/my_queries')
     resp = self.client.get('/beeswax/my_queries')
-    assert_true('my rubbish kuery' in resp.content)
+    assert_true('my rubbish kuery' in resp.content, resp.content)
     assert_true('Even More Bogus Junk' in resp.content)
     assert_true('Even More Bogus Junk' in resp.content)
 
 
     # Login as someone else
     # Login as someone else
@@ -1204,15 +1215,17 @@ for x in sys.stdin:
   def test_select_multi_db(self):
   def test_select_multi_db(self):
     response = _make_query(self.client, 'SELECT * FROM test LIMIT 5', local=False, database='default')
     response = _make_query(self.client, 'SELECT * FROM test LIMIT 5', local=False, database='default')
     response = wait_for_query_to_finish(self.client, response)
     response = wait_for_query_to_finish(self.client, response)
-    assert_true('Query Results' in response.content, response.content)
+    content = fetch_query_result_data(self.client, response)
+    assert_true([0, u'0x0'] in content['results'], content)
 
 
     response = _make_query(self.client, 'SHOW TABLES', local=False, database='other_db')
     response = _make_query(self.client, 'SHOW TABLES', local=False, database='other_db')
     response = wait_for_query_to_finish(self.client, response)
     response = wait_for_query_to_finish(self.client, response)
-    assert_true('Query Results' in response.content, response.content)
+    content = fetch_query_result_data(self.client, response)
+    assert_true('tab_name' in content['columns'][0]['name'], content)
 
 
     response = _make_query(self.client, 'SELECT * FROM test LIMIT 5', local=False, database='not_there')
     response = _make_query(self.client, 'SELECT * FROM test LIMIT 5', local=False, database='not_there')
-    response = wait_for_query_to_finish(self.client, response)
-    assert_true('Error' in response.content, response.content)
+    content = json.loads(response.content)
+    assert_equal(-1, content.get('status'), content)
 
 
 
 
   def test_xss_html_escaping(self):
   def test_xss_html_escaping(self):

+ 2 - 1
apps/beeswax/src/beeswax/urls.py

@@ -17,11 +17,12 @@
 
 
 from django.conf.urls.defaults import patterns, url
 from django.conf.urls.defaults import patterns, url
 
 
+
 urlpatterns = patterns('beeswax.views',
 urlpatterns = patterns('beeswax.views',
   url(r'^$', 'index', name='index'),
   url(r'^$', 'index', name='index'),
 
 
   url(r'^execute/(?P<design_id>\d+)?$', 'execute_query', name='execute_query'),
   url(r'^execute/(?P<design_id>\d+)?$', 'execute_query', name='execute_query'),
-  url(r'^watch/(?P<id>\d+)$', 'watch_query', name='watch_query'),  
+  url(r'^watch/(?P<id>\d+)$', 'watch_query', name='watch_query'),
   url(r'^results/(?P<id>\d+)/(?P<first_row>\d+)$', 'view_results', name='view_results'),
   url(r'^results/(?P<id>\d+)/(?P<first_row>\d+)$', 'view_results', name='view_results'),
   url(r'^download/(?P<id>\d+)/(?P<format>\w+)$', 'download', name='download'),
   url(r'^download/(?P<id>\d+)/(?P<format>\w+)$', 'download', name='download'),
   url(r'^save_results/(?P<id>\d+)$', 'save_results', name='save_results'),
   url(r'^save_results/(?P<id>\d+)$', 'save_results', name='save_results'),

+ 13 - 17
apps/beeswax/src/beeswax/views.py

@@ -596,6 +596,7 @@ def view_results(request, id, first_row=0):
     results.start_row = first_row
     results.start_row = first_row
 
 
     context.update({
     context.update({
+      'id': id,
       'results': data,
       'results': data,
       'has_more': results.has_more,
       'has_more': results.has_more,
       'next_row': results.start_row + len(data),
       'next_row': results.start_row + len(data),
@@ -605,27 +606,22 @@ def view_results(request, id, first_row=0):
       'download_urls': download_urls,
       'download_urls': download_urls,
       'save_form': save_form,
       'save_form': save_form,
       'can_save': query_history.owner == request.user,
       'can_save': query_history.owner == request.user,
-      'next_json_set': reverse(get_app_name(request) + ':view_results', kwargs={
-        'id': str(id),
-        'first_row': results.start_row + len(data)
-      }) + ('?context=' + context_param or '') + '&format=json'
+      'next_json_set':
+        reverse(get_app_name(request) + ':view_results', kwargs={
+            'id': str(id),
+            'first_row': results.start_row + len(data)
+          }
+        )
+        + ('?context=' + context_param or '') + '&format=json'
     })
     })
 
 
   if request.GET.get('format') == 'json':
   if request.GET.get('format') == 'json':
-    context = {
-      'columns': massage_columns_for_json(columns),
-      'results': data,
-      'has_more': results.has_more,
-      'next_row': results.start_row + len(data),
-      'start_row': results.start_row,
-      'next_json_set': reverse(get_app_name(request) + ':view_results', kwargs={
-        'id': str(id),
-        'first_row': results.start_row + len(data)
-      }) + ('?context=' + context_param or '') + '&format=json'
-    }
+    context['columns'] = massage_columns_for_json(columns)
+    del context['save_form']
+    del context['query']
     return HttpResponse(json.dumps(context), mimetype="application/json")
     return HttpResponse(json.dumps(context), mimetype="application/json")
-
-  return render('watch_results.mako', request, context)
+  else:
+    return render('watch_results.mako', request, context)
 
 
 
 
 def save_results(request, id):
 def save_results(request, id):