|
@@ -35,7 +35,7 @@ from desktop.lib.python_util import find_unused_port
|
|
|
from desktop.lib.exceptions import StructuredThriftTransportException
|
|
from desktop.lib.exceptions import StructuredThriftTransportException
|
|
|
from desktop.lib.security_util import get_localhost_name
|
|
from desktop.lib.security_util import get_localhost_name
|
|
|
from desktop.lib.test_utils import add_to_group, grant_access
|
|
from desktop.lib.test_utils import add_to_group, grant_access
|
|
|
-from hadoop import pseudo_hdfs4
|
|
|
|
|
|
|
+from hadoop import cluster, pseudo_hdfs4
|
|
|
from hadoop.pseudo_hdfs4 import is_live_cluster, get_db_prefix
|
|
from hadoop.pseudo_hdfs4 import is_live_cluster, get_db_prefix
|
|
|
|
|
|
|
|
import beeswax.conf
|
|
import beeswax.conf
|
|
@@ -55,6 +55,10 @@ _SHARED_HIVE_SERVER_CLOSER = None
|
|
|
LOG = logging.getLogger(__name__)
|
|
LOG = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
+def is_hive_on_spark():
|
|
|
|
|
+ return os.environ.get('ENABLE_HIVE_ON_SPARK', 'false').lower() == 'true'
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
def _start_server(cluster):
|
|
def _start_server(cluster):
|
|
|
args = [beeswax.conf.HIVE_SERVER_BIN.get()]
|
|
args = [beeswax.conf.HIVE_SERVER_BIN.get()]
|
|
|
|
|
|
|
@@ -124,8 +128,6 @@ def get_shared_beeswax_server(db_name='default'):
|
|
|
raise
|
|
raise
|
|
|
except Exception, e:
|
|
except Exception, e:
|
|
|
LOG.exception('Failed to open Hive Server session')
|
|
LOG.exception('Failed to open Hive Server session')
|
|
|
- import pdb
|
|
|
|
|
- pdb.set_trace()
|
|
|
|
|
else:
|
|
else:
|
|
|
started = True
|
|
started = True
|
|
|
break
|
|
break
|
|
@@ -351,6 +353,7 @@ class BeeswaxSampleProvider(object):
|
|
|
def setup_class(cls):
|
|
def setup_class(cls):
|
|
|
cls.db_name = get_db_prefix(name='hive')
|
|
cls.db_name = get_db_prefix(name='hive')
|
|
|
cls.cluster, shutdown = get_shared_beeswax_server(cls.db_name)
|
|
cls.cluster, shutdown = get_shared_beeswax_server(cls.db_name)
|
|
|
|
|
+ cls.set_execution_engine()
|
|
|
cls.client = make_logged_in_client(username='test', is_superuser=False)
|
|
cls.client = make_logged_in_client(username='test', is_superuser=False)
|
|
|
add_to_group('test', 'test')
|
|
add_to_group('test', 'test')
|
|
|
grant_access('test', 'test', 'beeswax')
|
|
grant_access('test', 'test', 'beeswax')
|
|
@@ -363,11 +366,17 @@ class BeeswaxSampleProvider(object):
|
|
|
def teardown_class(cls):
|
|
def teardown_class(cls):
|
|
|
if is_live_cluster():
|
|
if is_live_cluster():
|
|
|
# Delete test DB and tables
|
|
# Delete test DB and tables
|
|
|
|
|
+ query_server = get_query_server_config()
|
|
|
client = make_logged_in_client()
|
|
client = make_logged_in_client()
|
|
|
user = User.objects.get(username='test')
|
|
user = User.objects.get(username='test')
|
|
|
- query_server = get_query_server_config()
|
|
|
|
|
|
|
+
|
|
|
db = dbms.get(user, query_server)
|
|
db = dbms.get(user, query_server)
|
|
|
|
|
|
|
|
|
|
+ # Kill Spark context if running
|
|
|
|
|
+ if is_hive_on_spark() and cluster.is_yarn():
|
|
|
|
|
+ # TODO: We should clean up the running Hive on Spark job here
|
|
|
|
|
+ pass
|
|
|
|
|
+
|
|
|
for db_name in [cls.db_name, '%s_other' % cls.db_name]:
|
|
for db_name in [cls.db_name, '%s_other' % cls.db_name]:
|
|
|
databases = db.get_databases()
|
|
databases = db.get_databases()
|
|
|
|
|
|
|
@@ -385,6 +394,17 @@ class BeeswaxSampleProvider(object):
|
|
|
global _INITIALIZED
|
|
global _INITIALIZED
|
|
|
_INITIALIZED = False
|
|
_INITIALIZED = False
|
|
|
|
|
|
|
|
|
|
+ @classmethod
|
|
|
|
|
+ def set_execution_engine(cls):
|
|
|
|
|
+ query_server = get_query_server_config()
|
|
|
|
|
+
|
|
|
|
|
+ if query_server['server_name'] == 'beeswax' and is_hive_on_spark():
|
|
|
|
|
+ user = User.objects.get(username='test')
|
|
|
|
|
+ db = dbms.get(user, query_server)
|
|
|
|
|
+
|
|
|
|
|
+ LOG.info("Setting Hive execution engine to Spark")
|
|
|
|
|
+ db.execute_statement('SET hive.execution.engine=spark')
|
|
|
|
|
+
|
|
|
@classmethod
|
|
@classmethod
|
|
|
def init_beeswax_db(cls):
|
|
def init_beeswax_db(cls):
|
|
|
"""
|
|
"""
|