|
@@ -63,17 +63,53 @@ class TestHiveQueryApi():
|
|
|
schema_editor.delete_model(HiveQuery)
|
|
schema_editor.delete_model(HiveQuery)
|
|
|
|
|
|
|
|
|
|
|
|
|
- def test_apps_empty(self):
|
|
|
|
|
- with patch('jobbrowser.apis.hive_query_api.HiveQueryClient.get_queries') as get_queries:
|
|
|
|
|
|
|
+ def test_search_pagination(self):
|
|
|
|
|
+ with patch('jobbrowser.apis.hive_query_api.HiveQueryClient._get_queries') as _get_queries:
|
|
|
with patch('jobbrowser.apis.hive_query_api.HiveQueryClient.get_query_count') as get_query_count:
|
|
with patch('jobbrowser.apis.hive_query_api.HiveQueryClient.get_query_count') as get_query_count:
|
|
|
- get_queries.return_value = []
|
|
|
|
|
- get_query_count.return_value = 0
|
|
|
|
|
-
|
|
|
|
|
- response = self.client.post("/jobbrowser/api/jobs/queries-hive") # TODO: add POST parameters
|
|
|
|
|
|
|
+ query1 = HiveQuery()
|
|
|
|
|
+ query1.query_id = 10
|
|
|
|
|
+ query1.start_time = 5
|
|
|
|
|
+ query1.query = "select * from employee"
|
|
|
|
|
+ query1.end_time = 8
|
|
|
|
|
+ query1.elapsed_time = 28
|
|
|
|
|
+ query1.status = "SUCCESS"
|
|
|
|
|
+ query1.user_id = "hive"
|
|
|
|
|
+ query1.request_user = "hive"
|
|
|
|
|
+
|
|
|
|
|
+ query2 = HiveQuery()
|
|
|
|
|
+ query2.query_id = 12
|
|
|
|
|
+ query2.start_time = 4
|
|
|
|
|
+ query2.query = "select * from employee1"
|
|
|
|
|
+ query2.end_time = 6
|
|
|
|
|
+ query2.elapsed_time = 28
|
|
|
|
|
+ query2.status = "SUCCESS"
|
|
|
|
|
+ query2.user_id = "hive"
|
|
|
|
|
+ query2.request_user = "hive"
|
|
|
|
|
+
|
|
|
|
|
+ query3 = HiveQuery()
|
|
|
|
|
+ query3.query_id = 14
|
|
|
|
|
+ query3.start_time = 6
|
|
|
|
|
+ query3.query = "select * from employee2"
|
|
|
|
|
+ query3.end_time = 7
|
|
|
|
|
+ query3.elapsed_time = 28
|
|
|
|
|
+ query3.status = "FAIL"
|
|
|
|
|
+ query3.user_id = "hive"
|
|
|
|
|
+ query3.request_user = "hive"
|
|
|
|
|
+
|
|
|
|
|
+ get_query_count.return_value = 3
|
|
|
|
|
+ _get_queries.return_value = [query1, query2, query3]
|
|
|
|
|
+ filters = {"endTime": 10, "facets": [], "limit": 2, "offset": 0, "sortText": "", "startTime": 1, "text": "", "type": "basic"}
|
|
|
|
|
+ _data = json.dumps(filters)
|
|
|
|
|
+ response = self.client.post("/jobbrowser/api/jobs/queries-hive", content_type='applecation/json', data=_data)
|
|
|
data = json.loads(response.content)
|
|
data = json.loads(response.content)
|
|
|
|
|
+ paginated_query_list = HiveQueryClient().get_queries(filters)
|
|
|
|
|
+
|
|
|
|
|
+ assert_equal(2, len(paginated_query_list))
|
|
|
|
|
+ assert_equal(10, data['queries'][0]['queryId'])
|
|
|
|
|
+ assert_equal("SUCCESS", data['queries'][1]['status'])
|
|
|
|
|
+ assert_equal(3, data['meta']['size'])
|
|
|
|
|
+ assert_equal(2, data['meta']['limit'])
|
|
|
|
|
|
|
|
- assert_equal([], data['queries'])
|
|
|
|
|
- assert_equal(100, data['meta']['limit'])
|
|
|
|
|
|
|
|
|
|
# TODO
|
|
# TODO
|
|
|
# def test_app(self):
|
|
# def test_app(self):
|
|
@@ -93,24 +129,57 @@ class TestHiveQueryApi():
|
|
|
class TestHiveQueryClient():
|
|
class TestHiveQueryClient():
|
|
|
|
|
|
|
|
def setUp(self):
|
|
def setUp(self):
|
|
|
- if not QUERY_DATABASE.HOST.get() or True: # Note: table migrations / non auto model to add before it can be enabled
|
|
|
|
|
|
|
+ if not QUERY_DATABASE.HOST.get():
|
|
|
raise SkipTest
|
|
raise SkipTest
|
|
|
|
|
|
|
|
self.client = make_logged_in_client(username="test", groupname="default", recreate=True, is_superuser=False)
|
|
self.client = make_logged_in_client(username="test", groupname="default", recreate=True, is_superuser=False)
|
|
|
self.user = rewrite_user(User.objects.get(username="test"))
|
|
self.user = rewrite_user(User.objects.get(username="test"))
|
|
|
|
|
+ self.filters = {
|
|
|
|
|
+ 'endTime': 1602146114116,
|
|
|
|
|
+ 'facets': [],
|
|
|
|
|
+ 'limit': 2,
|
|
|
|
|
+ 'offset': 0,
|
|
|
|
|
+ 'sortText': "startTime:DESC",
|
|
|
|
|
+ 'startTime': 1601541314116,
|
|
|
|
|
+ 'text': "select",
|
|
|
|
|
+ 'type': "basic",
|
|
|
|
|
+ }
|
|
|
|
|
+ self.query1 = HiveQuery()
|
|
|
|
|
+ self.query2 = HiveQuery()
|
|
|
|
|
+ self.query3 = HiveQuery()
|
|
|
|
|
+
|
|
|
|
|
+ with connection.schema_editor() as schema_editor:
|
|
|
|
|
+ schema_editor.create_model(HiveQuery)
|
|
|
|
|
+
|
|
|
|
|
+ if HiveQuery._meta.db_table not in connection.introspection.table_names():
|
|
|
|
|
+ raise ValueError("Table `{table_name}` is missing in test database.".format(table_name=HiveQuery._meta.db_table))
|
|
|
|
|
+
|
|
|
|
|
+ def tearDown(self):
|
|
|
|
|
+ with connection.schema_editor() as schema_editor:
|
|
|
|
|
+ schema_editor.delete_model(HiveQuery)
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+ def test__get_queries(self):
|
|
|
|
|
+ HiveQueryClient()._get_queries(self.filters)
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_get_query_count(self):
|
|
def test_get_query_count(self):
|
|
|
- HiveQueryClient().get_query_count()
|
|
|
|
|
|
|
+ with patch('jobbrowser.apis.hive_query_api.HiveQueryClient._get_queries') as _get_queries:
|
|
|
|
|
|
|
|
|
|
+ _get_queries.return_value = [self.query1, self.query2, self.query3]
|
|
|
|
|
+ filtered_query_count = HiveQueryClient().get_query_count(self.filters)
|
|
|
|
|
|
|
|
- def test_get_queries(self):
|
|
|
|
|
- HiveQueryClient().get_queries(limit=100)
|
|
|
|
|
|
|
+ assert_equal(3, filtered_query_count)
|
|
|
|
|
|
|
|
- HiveQueryClient().get_queries(limit=10)
|
|
|
|
|
|
|
+
|
|
|
|
|
+ def test_get_queries(self):
|
|
|
|
|
+ HiveQueryClient().get_queries(self.filters)
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_get_query(self):
|
|
def test_get_query(self):
|
|
|
- query_id = 'd94d2fb4815a05c4:b1ccec1500000000'
|
|
|
|
|
|
|
+ with patch('jobbrowser.apis.hive_query_api.HiveQueryClient.get_query') as get_query:
|
|
|
|
|
+ query_id = 'd94d2fb4815a05c4:b1ccec1500000000'
|
|
|
|
|
+ self.query1.query_id = query_id
|
|
|
|
|
+ get_query.return_value = self.query1
|
|
|
|
|
|
|
|
- assert_raises(HiveQuery.DoesNotExist, HiveQueryClient().get_query, query_id=query_id)
|
|
|
|
|
|
|
+ assert_equal(HiveQueryClient().get_query(query_id).query_id, query_id)
|