Эх сурвалжийг харах

HUE-8948 [hive] Skip customer query if transactional table

Romain 6 жил өмнө
parent
commit
c42dd534e8

+ 39 - 27
apps/beeswax/src/beeswax/management/commands/beeswax_install_examples.py

@@ -113,7 +113,8 @@ class Command(BaseCommand):
     design_list = [d for d in design_list if int(d['type']) == app_type]
 
     for design_dict in design_list:
-      design = SampleQuery(design_dict)
+      if not has_concurrency_support() or design_dict['name'] != 'Sample: Customers':
+        design = SampleQuery(design_dict)
       try:
         design.install(django_user)
       except Exception as ex:
@@ -164,7 +165,6 @@ class SampleTable(object):
     db = dbms.get(django_user, self.query_server)
 
     try:
-      # Already exists?
       if self.app_name == 'impala':
         db.invalidate(database=self.db_name, flush_all=False)
       db.get_table(self.db_name, self.name)
@@ -187,44 +187,48 @@ class SampleTable(object):
         raise InstallException(msg)
 
   def load_partition(self, django_user, partition_spec, filepath):
-    """
-    Upload data found at filepath to HDFS home of user, the load intto a specific partition
-    """
-    LOAD_PARTITION_HQL = \
-      """
-      ALTER TABLE %(tablename)s ADD PARTITION(%(partition_spec)s) LOCATION '%(filepath)s'
-      """
-
-    partition_dir = self._get_partition_dir(partition_spec)
-    hdfs_root_destination = self._get_hdfs_root_destination(django_user, subdir=partition_dir)
-    filename = filepath.split('/')[-1]
-    hdfs_file_destination = self._upload_to_hdfs(django_user, filepath, hdfs_root_destination, filename)
-
-    hql = LOAD_PARTITION_HQL % {'tablename': self.name, 'partition_spec': partition_spec, 'filepath': hdfs_root_destination}
-    LOG.info('Running load query: %s' % hql)
+    if has_concurrency_support():
+      with open(filepath) as f:
+        hql = \
+          """
+          INSERT INTO TABLE %(tablename)s
+          PARTITIONS (%(partition_spec)s)
+          VALUES %(values)s
+          """ % {
+            'tablename': self.name,
+            'partition_spec': partition_spec,
+            'values': self._get_sql_insert_values(f)
+          }
+    else:
+      # Upload data found at filepath to HDFS home of user, the load intto a specific partition
+      LOAD_PARTITION_HQL = \
+        """
+        ALTER TABLE %(tablename)s ADD PARTITION(%(partition_spec)s) LOCATION '%(filepath)s'
+        """
+
+      partition_dir = self._get_partition_dir(partition_spec)
+      hdfs_root_destination = self._get_hdfs_root_destination(django_user, subdir=partition_dir)
+      filename = filepath.split('/')[-1]
+      hdfs_file_destination = self._upload_to_hdfs(django_user, filepath, hdfs_root_destination, filename)
+
+      hql = LOAD_PARTITION_HQL % {'tablename': self.name, 'partition_spec': partition_spec, 'filepath': hdfs_root_destination}
+
     self._load_data_to_table(django_user, hql)
 
 
   def load(self, django_user):
-    """
-    Upload data to HDFS home of user then load (aka move) it into the Hive table (in the Hive metastore in HDFS).
-    """
     if has_concurrency_support():
       with open(self._contents_file) as f:
-        data = f.read()
-        dialect = csv.Sniffer().sniff(data)
-        reader = csv.reader(data.splitlines(), delimiter=dialect.delimiter)
-
-        rows = [', '.join("'%s'" % col.replace("'", "\\'") for col in row) for row in reader][:MAX_INSERTED_ROWS]
         hql = \
           """
           INSERT INTO TABLE %(tablename)s
           VALUES %(values)s
           """ % {
             'tablename': self.name,
-            'values': ', '.join('(%s)' % row for row in rows)
+            'values': self._get_sql_insert_values(f)
           }
     else:
+      # Upload data to HDFS home of user then load (aka move) it into the Hive table (in the Hive metastore in HDFS).
       hdfs_root_destination = self._get_hdfs_root_destination(django_user)
       hdfs_file_destination = self._upload_to_hdfs(django_user, self._contents_file, hdfs_root_destination)
       hql = \
@@ -236,7 +240,6 @@ class SampleTable(object):
           'filename': hdfs_file_destination
         }
 
-    LOG.info('Running load query: %s' % hql)
     self._load_data_to_table(django_user, hql)
 
 
@@ -306,6 +309,15 @@ class SampleTable(object):
       raise InstallException(msg)
 
 
+  def _get_sql_insert_values(self, f):
+    data = f.read()
+    dialect = csv.Sniffer().sniff(data)
+    reader = csv.reader(data.splitlines(), delimiter=dialect.delimiter)
+
+    rows = [', '.join("'%s'" % col.replace("'", "\\'") for col in row) for row in reader][:MAX_INSERTED_ROWS]
+    return ', '.join('(%s)' % row for row in rows)
+
+
 class SampleQuery(object):
 
   """Represents a query loaded from the designs.json file"""

+ 4 - 3
apps/beeswax/src/beeswax/management/commands/beeswax_install_examples_tests.py

@@ -43,7 +43,6 @@ class TestTransactionalTables():
 
 
   def test_load_sample_07_with_concurrency_support(self):
-
     table_data =   {
       "data_file": "sample_07.csv",
       "create_hql": "CREATE TABLE `sample_07` (\n  `code` string ,\n  `description` string ,\n  `total_emp` int ,\n  `salary` int )\nSTORED AS parquet\nTBLPROPERTIES ('transactional'='true', 'transactional_properties'='insert_only')\n",
@@ -58,14 +57,16 @@ class TestTransactionalTables():
 
         get.assert_called()
 
-  def test_load_tables_concurrency_support(self):
 
+  def test_load_tables_concurrency_support(self):
     with patch('beeswax.server.dbms.get') as get:
       with patch('beeswax.management.commands.beeswax_install_examples.has_concurrency_support') as has_concurrency_support:
+        get.return_value = Mock(
+          get_table=Exception('Table could not be found')
+        )
         has_concurrency_support.return_value = True
 
         cmd = Command()
-        # cmd.handle(app_name='beeswax', db_name='default', user=self.user)
         cmd._install_tables(self.user, 'beeswax', 'default', 'tables_transactional.json')
 
         get.assert_called()