Kaynağa Gözat

[core][cleanup] Refactor and remove old Py2 code checks and methods (#3683)

## What changes were proposed in this pull request?

- Remove all Py2 check conditionals.
- Remove some not-used imports.
- Streamline file opening code section and remove py2 support.
- Remove `smart_unicode` method and change its usage with `smart_str` wherever required. This also triggered adding an encoding param in the `smart_str` method for compatibility with `smart_unicode` switch.


- Because of all the files modified with above actions, we are also fixing Ruff violations on these files **which made this change big.**
- **The idea is to merge this as a single commit and have as low regression as possible and if something major comes up, we can try simply reverting to previous state. For minor regressions, we can try fixing on top of this change.**

## How was this patch tested?

- Manually and basic Hue testing.
- Running existing full unit test suite.
- E2E cluster setup
Harsh Gupta 1 yıl önce
ebeveyn
işleme
ccecafa513
100 değiştirilmiş dosya ile 2300 ekleme ve 2725 silme
  1. 0 1
      apps/about/src/about/templates/admin_wizard.mako
  2. 1 6
      apps/about/src/about/urls.py
  3. 1 8
      apps/about/src/about/views.py
  4. 0 1
      apps/beeswax/src/beeswax/api.py
  5. 8 21
      apps/beeswax/src/beeswax/api_tests.py
  6. 0 4
      apps/beeswax/src/beeswax/conf.py
  7. 42 45
      apps/beeswax/src/beeswax/create_table.py
  8. 11 19
      apps/beeswax/src/beeswax/data_export.py
  9. 2 17
      apps/beeswax/src/beeswax/design.py
  10. 14 15
      apps/beeswax/src/beeswax/forms.py
  11. 23 16
      apps/beeswax/src/beeswax/hive_site.py
  12. 11 34
      apps/beeswax/src/beeswax/management/commands/beeswax_install_examples.py
  13. 5 16
      apps/beeswax/src/beeswax/management/commands/beeswax_install_examples_tests.py
  14. 4 14
      apps/beeswax/src/beeswax/management/commands/create_table_query_data.py
  15. 16 31
      apps/beeswax/src/beeswax/models.py
  16. 49 27
      apps/beeswax/src/beeswax/query_history.py
  17. 0 2
      apps/beeswax/src/beeswax/server/dbms.py
  18. 0 1
      apps/beeswax/src/beeswax/server/dbms_tests.py
  19. 8 33
      apps/beeswax/src/beeswax/server/hive_metastore_server.py
  20. 4 20
      apps/beeswax/src/beeswax/server/hive_server2_lib.py
  21. 31 50
      apps/beeswax/src/beeswax/server/hive_server2_lib_tests.py
  22. 65 68
      apps/beeswax/src/beeswax/tests.py
  23. 8 11
      apps/beeswax/src/beeswax/urls.py
  24. 40 36
      apps/beeswax/src/beeswax/views.py
  25. 4 10
      apps/beeswax/src/beeswax/views_tests.py
  26. 2 2
      apps/filebrowser/src/filebrowser/api.py
  27. 1 7
      apps/filebrowser/src/filebrowser/conf.py
  28. 37 21
      apps/filebrowser/src/filebrowser/forms.py
  29. 11 16
      apps/filebrowser/src/filebrowser/lib/archives.py
  30. 11 18
      apps/filebrowser/src/filebrowser/lib/xxd_test.py
  31. 1 6
      apps/filebrowser/src/filebrowser/urls.py
  32. 17 52
      apps/filebrowser/src/filebrowser/views.py
  33. 20 22
      apps/hbase/src/hbase/api.py
  34. 9 13
      apps/hbase/src/hbase/conf.py
  35. 10 14
      apps/hbase/src/hbase/hbase_site.py
  36. 6 13
      apps/hbase/src/hbase/management/commands/hbase_setup.py
  37. 26 30
      apps/hbase/src/hbase/tests.py
  38. 2 5
      apps/hbase/src/hbase/urls.py
  39. 25 30
      apps/hbase/src/hbase/views.py
  40. 2 5
      apps/help/src/help/urls.py
  41. 13 16
      apps/help/src/help/views.py
  42. 7 11
      apps/hive/src/hive/conf.py
  43. 5 6
      apps/hive/src/hive/tests.py
  44. 1 5
      apps/hive/src/hive/urls.py
  45. 29 21
      apps/impala/src/impala/api.py
  46. 6 10
      apps/impala/src/impala/api_tests.py
  47. 24 13
      apps/impala/src/impala/conf.py
  48. 13 21
      apps/impala/src/impala/dbms.py
  49. 5 12
      apps/impala/src/impala/dbms_tests.py
  50. 10 23
      apps/impala/src/impala/server.py
  51. 5 14
      apps/impala/src/impala/server_tests.py
  52. 2 6
      apps/impala/src/impala/test_impala_flags.py
  53. 121 107
      apps/impala/src/impala/tests.py
  54. 2 5
      apps/impala/src/impala/urls.py
  55. 16 25
      apps/jobbrowser/src/jobbrowser/api.py
  56. 5 12
      apps/jobbrowser/src/jobbrowser/api2.py
  57. 8 11
      apps/jobbrowser/src/jobbrowser/apis/base_api.py
  58. 10 20
      apps/jobbrowser/src/jobbrowser/apis/beat_api.py
  59. 19 25
      apps/jobbrowser/src/jobbrowser/apis/beeswax_query_api.py
  60. 13 24
      apps/jobbrowser/src/jobbrowser/apis/clusters.py
  61. 7 19
      apps/jobbrowser/src/jobbrowser/apis/data_eng_api.py
  62. 9 20
      apps/jobbrowser/src/jobbrowser/apis/data_warehouse.py
  63. 6 16
      apps/jobbrowser/src/jobbrowser/apis/history.py
  64. 6 12
      apps/jobbrowser/src/jobbrowser/apis/hive_query_api.py
  65. 40 63
      apps/jobbrowser/src/jobbrowser/apis/job_api.py
  66. 5 17
      apps/jobbrowser/src/jobbrowser/apis/livy_api.py
  67. 27 30
      apps/jobbrowser/src/jobbrowser/apis/query_api.py
  68. 9 15
      apps/jobbrowser/src/jobbrowser/apis/query_api_tests.py
  69. 13 22
      apps/jobbrowser/src/jobbrowser/apis/schedule_api.py
  70. 3 14
      apps/jobbrowser/src/jobbrowser/apis/schedule_hive.py
  71. 21 20
      apps/jobbrowser/src/jobbrowser/apis/workflow_api.py
  72. 4 6
      apps/jobbrowser/src/jobbrowser/conf.py
  73. 13 16
      apps/jobbrowser/src/jobbrowser/models.py
  74. 7 8
      apps/jobbrowser/src/jobbrowser/templatetags/unix_ms_to_datetime.py
  75. 796 723
      apps/jobbrowser/src/jobbrowser/tests.py
  76. 2 6
      apps/jobbrowser/src/jobbrowser/urls.py
  77. 46 46
      apps/jobbrowser/src/jobbrowser/views.py
  78. 12 17
      apps/jobbrowser/src/jobbrowser/yarn_models.py
  79. 3 8
      apps/jobsub/src/jobsub/conf.py
  80. 6 8
      apps/jobsub/src/jobsub/forms.py
  81. 7 10
      apps/jobsub/src/jobsub/models.py
  82. 1 4
      apps/jobsub/src/jobsub/old_migrations/0005_unify_with_oozie.py
  83. 1 4
      apps/jobsub/src/jobsub/old_migrations/0006_chg_varchars_to_textfields.py
  84. 2 5
      apps/jobsub/src/jobsub/urls.py
  85. 10 14
      apps/jobsub/src/jobsub/views.py
  86. 2 13
      apps/metastore/src/metastore/forms.py
  87. 0 1
      apps/metastore/src/metastore/templates/metastore.mako
  88. 115 114
      apps/metastore/src/metastore/tests.py
  89. 2 5
      apps/metastore/src/metastore/urls.py
  90. 31 40
      apps/metastore/src/metastore/views.py
  91. 10 12
      apps/oozie/src/oozie/conf.py
  92. 5 12
      apps/oozie/src/oozie/decorators.py
  93. 35 16
      apps/oozie/src/oozie/forms.py
  94. 6 10
      apps/oozie/src/oozie/importlib/coordinators.py
  95. 22 32
      apps/oozie/src/oozie/importlib/workflows.py
  96. 11 26
      apps/oozie/src/oozie/management/commands/oozie_setup.py
  97. 47 60
      apps/oozie/src/oozie/models.py
  98. 70 83
      apps/oozie/src/oozie/models2.py
  99. 46 27
      apps/oozie/src/oozie/models2_tests.py
  100. 1 4
      apps/oozie/src/oozie/old_migrations/0020_chg_large_varchars_to_textfields.py

+ 0 - 1
apps/about/src/about/templates/admin_wizard.mako

@@ -23,7 +23,6 @@ from metadata.conf import OPTIMIZER, has_optimizer
 
 from desktop.auth.backend import is_admin
 from desktop.conf import has_connectors
-from desktop.lib.i18n import smart_unicode
 from desktop.views import commonheader, commonfooter
 
 if sys.version_info[0] > 2:

+ 1 - 6
apps/about/src/about/urls.py

@@ -15,15 +15,10 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import sys
+from django.urls import re_path
 
 from about import views as about_views
 
-if sys.version_info[0] > 2:
-  from django.urls import re_path
-else:
-  from django.conf.urls import url as re_path
-
 urlpatterns = [
   re_path(r'^$', about_views.admin_wizard, name='index'),
   re_path(r'^admin_wizard$', about_views.admin_wizard, name='admin_wizard'),

+ 1 - 8
apps/about/src/about/views.py

@@ -15,9 +15,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from builtins import str
-import logging
-import sys
+from django.utils.translation import gettext as _
 
 from desktop import appmanager
 from desktop.auth.backend import is_hue_admin
@@ -26,11 +24,6 @@ from desktop.lib.django_util import JsonResponse, render
 from desktop.models import Settings, hue_version
 from desktop.views import collect_usage
 
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
-
 
 def admin_wizard(request):
   if is_hue_admin(request.user):

+ 0 - 1
apps/beeswax/src/beeswax/api.py

@@ -16,7 +16,6 @@
 # limitations under the License.
 
 import re
-import sys
 import json
 import logging
 from builtins import zip

+ 8 - 21
apps/beeswax/src/beeswax/api_tests.py

@@ -16,27 +16,19 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import json
-import logging
-import pytest
 import sys
+import logging
+from unittest.mock import Mock, patch
 
+import pytest
 from django.test import TestCase
 from requests.exceptions import ReadTimeout
 
+from beeswax.api import _autocomplete, get_functions
 from desktop.lib.django_test_util import make_logged_in_client
 from desktop.lib.test_utils import add_to_group, grant_access
 from useradmin.models import User
 
-from beeswax.api import _autocomplete, get_functions
-
-
-if sys.version_info[0] > 2:
-  from unittest.mock import patch, Mock
-else:
-  from mock import patch, Mock
-
-
 LOG = logging.getLogger()
 
 
@@ -47,9 +39,8 @@ class TestApi():
     self.client = make_logged_in_client(username="test", groupname="default", recreate=True, is_superuser=False)
     self.user = User.objects.get(username="test")
 
-
   def test_autocomplete_time_out(self):
-    get_tables_meta=Mock(
+    get_tables_meta = Mock(
       side_effect=ReadTimeout("HTTPSConnectionPool(host='gethue.com', port=10001): Read timed out. (read timeout=120)")
     )
     db = Mock(
@@ -65,7 +56,6 @@ class TestApi():
         'error': "HTTPSConnectionPool(host='gethue.com', port=10001): Read timed out. (read timeout=120)"
       })
 
-
   def test_get_functions(self):
     db = Mock(
       get_functions=Mock(
@@ -83,7 +73,6 @@ class TestApi():
       resp ==
       [{'name': 'f1'}, {'name': 'f2'}])
 
-
   def test_get_functions(self):
     with patch('beeswax.api._get_functions') as _get_functions:
       db = Mock()
@@ -97,12 +86,11 @@ class TestApi():
         resp['functions'] ==
         [{'name': 'f1'}, {'name': 'f2'}, {'name': 'f3'}])
 
-
   def test_get_function(self):
     db = Mock()
-    db.client = Mock(query_server = {'dialect': 'hive'})
+    db.client = Mock(query_server={'dialect': 'hive'})
     db.get_function = Mock(
-      return_value = [
+      return_value=[
         ['floor_month(param) - Returns the timestamp at a month granularity'],
         ['param needs to be a timestamp value'],
         ['Example:'],
@@ -123,8 +111,7 @@ class TestApi():
             '> SELECT floor_month(CAST(\'yyyy-MM-dd HH:mm:ss\' AS TIMESTAMP)) FROM src;\nyyyy-MM-01 00:00:00'
       })
 
-
-    db.client = Mock(query_server = {'dialect': 'impala'})
+    db.client = Mock(query_server={'dialect': 'impala'})
     data = _autocomplete(db, operation='function')
 
     assert data['function'] == {}

+ 0 - 4
apps/beeswax/src/beeswax/conf.py

@@ -15,13 +15,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from __future__ import division
-
-import sys
 import math
 import logging
 import os.path
-from builtins import str
 
 from django.utils.translation import gettext as _, gettext_lazy as _t
 

+ 42 - 45
apps/beeswax/src/beeswax/create_table.py

@@ -15,46 +15,39 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
-from __future__ import division
-from builtins import str
-from builtins import range
-from builtins import object
+import re
 import csv
 import gzip
 import json
-import logging
 import math
-import re
-import sys
+import logging
 
-from django.urls import reverse
 from django.http import QueryDict
+from django.urls import reverse
+from django.utils.translation import gettext as _
 
 from aws.s3.s3fs import S3FileSystemException
+from beeswax.common import TERMINATORS
+from beeswax.design import hql_query
+from beeswax.forms import (
+  TERMINATOR_CHOICES,
+  ColumnTypeFormSet,
+  CreateByImportDelimForm,
+  CreateByImportFileForm,
+  CreateTableForm,
+  PartitionTypeFormSet,
+)
+from beeswax.server import dbms
+from beeswax.server.dbms import QueryServerException
+from beeswax.views import execute_directly
 from desktop.context_processors import get_app_name
 from desktop.lib import django_mako, i18n
+from desktop.lib.django_forms import MultiForm
 from desktop.lib.django_util import render
 from desktop.lib.exceptions_renderable import PopupException
-from desktop.lib.django_forms import MultiForm
 from desktop.models import _get_apps
 from hadoop.fs import hadoopfs
 
-from beeswax.common import TERMINATORS
-from beeswax.design import hql_query
-from beeswax.forms import CreateTableForm, ColumnTypeFormSet,\
-  PartitionTypeFormSet, CreateByImportFileForm, CreateByImportDelimForm,\
-  TERMINATOR_CHOICES
-from beeswax.server import dbms
-from beeswax.server.dbms import QueryServerException
-from beeswax.views import execute_directly
-
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
-
-
 LOG = logging.getLogger()
 
 
@@ -62,7 +55,7 @@ def create_table(request, database='default'):
   """Create a table by specifying its attributes manually"""
   db = dbms.get(request.user)
   dbs = db.get_databases()
-  databases = [{'name':db, 'url':reverse('beeswax:create_table', kwargs={'database': db})} for db in dbs]
+  databases = [{'name': db, 'url': reverse('beeswax:create_table', kwargs={'database': db})} for db in dbs]
 
   form = MultiForm(
       table=CreateTableForm,
@@ -77,8 +70,8 @@ def create_table(request, database='default'):
 
     if request.POST.get('create'):
       if form.is_valid():
-        columns = [ f.cleaned_data for f in form.columns.forms ]
-        partition_columns = [ f.cleaned_data for f in form.partitions.forms ]
+        columns = [f.cleaned_data for f in form.columns.forms]
+        partition_columns = [f.cleaned_data for f in form.partitions.forms]
         proposed_query = django_mako.render_to_string("create_table_statement.mako", {
             'databases': databases,
             'database': database,
@@ -109,15 +102,16 @@ def create_table(request, database='default'):
 
 IMPORT_PEEK_SIZE = 5 * 1024**2
 IMPORT_PEEK_NLINES = 10
-DELIMITERS = [ hive_val for hive_val, desc, ascii in TERMINATORS ]
-DELIMITER_READABLE = {'\\001' : _('ctrl-As'),
-                      '\\002' : _('ctrl-Bs'),
-                      '\\003' : _('ctrl-Cs'),
-                      '\\t'   : _('tabs'),
-                      ','     : _('commas'),
-                      ' '     : _('spaces')}
+DELIMITERS = [hive_val for hive_val, desc, ascii in TERMINATORS]
+DELIMITER_READABLE = {'\\001': _('ctrl-As'),
+                      '\\002': _('ctrl-Bs'),
+                      '\\003': _('ctrl-Cs'),
+                      '\\t': _('tabs'),
+                      ',': _('commas'),
+                      ' ': _('spaces')}
 FILE_READERS = []
 
+
 def import_wizard(request, database='default'):
   """
   Help users define table and based on a file they want to import to Hive.
@@ -133,7 +127,7 @@ def import_wizard(request, database='default'):
 
   db = dbms.get(request.user)
   dbs = db.get_databases()
-  databases = [{'name':db, 'url':reverse('beeswax:import_wizard', kwargs={'database': db})} for db in dbs]
+  databases = [{'name': db, 'url': reverse('beeswax:import_wizard', kwargs={'database': db})} for db in dbs]
 
   if request.method == 'POST':
     #
@@ -164,7 +158,7 @@ def import_wizard(request, database='default'):
       cancel_s3_column_def = request.POST.get('cancel_create')  # Step 3 -> 2
 
       # Exactly one of these should be True
-      if len([_f for _f in (do_s2_auto_delim, do_s2_user_delim, do_s3_column_def, do_hive_create, cancel_s2_user_delim, cancel_s3_column_def) if _f]) != 1:
+      if len([_f for _f in (do_s2_auto_delim, do_s2_user_delim, do_s3_column_def, do_hive_create, cancel_s2_user_delim, cancel_s3_column_def) if _f]) != 1:  # noqa: E501
         raise PopupException(_('Invalid form submission'))
 
       if not do_s2_auto_delim:
@@ -198,7 +192,8 @@ def import_wizard(request, database='default'):
           raise PopupException(_('Path location "%s" is invalid: %s') % (path, e))
 
         delim_is_auto = True
-        fields_list, n_cols, s2_delim_form = _delim_preview(request.fs, s1_file_form, encoding, [reader.TYPE for reader in FILE_READERS], DELIMITERS)
+        fields_list, n_cols, s2_delim_form = _delim_preview(
+          request.fs, s1_file_form, encoding, [reader.TYPE for reader in FILE_READERS], DELIMITERS)
 
       if (do_s2_user_delim or do_s3_column_def or cancel_s3_column_def) and s2_delim_form.is_valid():
         # Delimit based on input
@@ -236,7 +231,7 @@ def import_wizard(request, database='default'):
         try:
           fields_list_for_json = list(fields_list)
           if fields_list_for_json:
-            fields_list_for_json[0] = [re.sub('[^\w]', '', a) for a in fields_list_for_json[0]] # Cleaning headers
+            fields_list_for_json[0] = [re.sub(r'[^\w]', '', a) for a in fields_list_for_json[0]]  # Cleaning headers
           apps_list = _get_apps(request.user, '')
           return render('import_wizard_define_columns.mako', request, {
             'apps': apps_list,
@@ -251,7 +246,8 @@ def import_wizard(request, database='default'):
             'databases': databases
           })
         except Exception as e:
-          raise PopupException(_("The selected delimiter is creating an un-even number of columns. Please make sure you don't have empty columns."), detail=e)
+          raise PopupException(_(
+            "The selected delimiter is creating an un-even number of columns. Please make sure you don't have empty columns."), detail=e)
 
       #
       # Final: Execute
@@ -271,7 +267,7 @@ def import_wizard(request, database='default'):
                 'path': path,
                 'skip_header': request.GET.get('removeHeader', 'off').lower() == 'on'
              },
-            'columns': [ f.cleaned_data for f in s3_col_formset.forms ],
+            'columns': [f.cleaned_data for f in s3_col_formset.forms],
             'partition_columns': [],
             'database': database,
             'databases': databases
@@ -337,7 +333,7 @@ def _delim_preview(fs, file_form, encoding, file_types, delimiters):
     LOG.exception(msg)
     raise PopupException(msg)
 
-  n_cols = max([ len(row) for row in fields_list ])
+  n_cols = max([len(row) for row in fields_list])
   # ``delimiter`` is a MultiValueField. delimiter_0 and delimiter_1 are the sub-fields.
   delimiter_0 = delim
   delimiter_1 = ''
@@ -409,13 +405,12 @@ def _readfields(lines, delimiters):
     avg_n_fields = math.floor(sum(len_list) / n_lines)
     sq_of_exp = avg_n_fields * avg_n_fields
 
-    len_list_sq = [l * l for l in len_list]
+    len_list_sq = [len * len for len in len_list]
     exp_of_sq = math.floor(sum(len_list_sq) / n_lines)
     var = exp_of_sq - sq_of_exp
     # Favour more fields
     return (1000.0 / (var + 1)) + avg_n_fields
 
-
   max_score = -1
   res = (None, None)
 
@@ -424,7 +419,7 @@ def _readfields(lines, delimiters):
     delimiter = delim.decode('string_escape')
     try:
       fields_list = _get_rows(lines, delimiter)
-    except:
+    except Exception:
       LOG.exception('failed to get rows')
       fields_list = [line.split(delimiter) for line in lines if line]
 
@@ -472,6 +467,7 @@ class GzipFileReader(object):
     except UnicodeError:
       return None
 
+
 FILE_READERS.append(GzipFileReader)
 
 
@@ -488,6 +484,7 @@ class TextFileReader(object):
     except UnicodeError:
       return None
 
+
 FILE_READERS.append(TextFileReader)
 
 

+ 11 - 19
apps/beeswax/src/beeswax/data_export.py

@@ -15,28 +15,20 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from builtins import str
-from builtins import object
 import json
-import logging
 import math
-import sys
-import types
-
-from desktop.lib import export_csvxls
-from beeswax import common, conf
+import logging
 
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
+from django.utils.translation import gettext as _
 
+from beeswax import common, conf
+from desktop.lib import export_csvxls
 
 LOG = logging.getLogger()
 
 
 FETCH_SIZE = 1000
-DOWNLOAD_COOKIE_AGE = 1800 # 30 minutes
+DOWNLOAD_COOKIE_AGE = 1800  # 30 minutes
 
 
 def download(handle, format, db, id=None, file_name='query_result', user_agent=None):
@@ -115,22 +107,22 @@ class DataAdapter(object):
   # Avoid serialization to string where possible
   def _getsizeofascii(self, row):
     size = 0
-    size += max(len(row) - 1, 0) # CSV commas between columns
-    size += 2 # CSV \r\n at the end of row
+    size += max(len(row) - 1, 0)  # CSV commas between columns
+    size += 2  # CSV \r\n at the end of row
     for col in row:
       col_type = type(col)
-      if col_type == int:
+      if col_type is int:
         if col == 0:
           size += 1
         elif col < 0:
           size += int(math.log10(-1 * col)) + 2
         else:
           size += int(math.log10(col)) + 1
-      elif col_type == bytes:
+      elif col_type is bytes:
         size += len(col)
-      elif col_type == float:
+      elif col_type is float:
         size += len(str(col))
-      elif col_type == bool:
+      elif col_type is bool:
         size += 4
       elif col_type == type(None):
         size += 4

+ 2 - 17
apps/beeswax/src/beeswax/design.py

@@ -18,30 +18,18 @@
 """
 The HQLdesign class can (de)serialize a design to/from a QueryDict.
 """
-
-from future import standard_library
-standard_library.install_aliases()
-from builtins import object
 import json
 import logging
-import os
-import re
-import sys
 import urllib.parse
 
 import django.http
 from django import forms
 from django.forms import ValidationError
+from django.utils.translation import gettext as _
 
-from notebook.sql_utils import split_statements, strip_trailing_semicolon
 from desktop.lib.django_forms import BaseSimpleFormSet, MultiForm
 from hadoop.cluster import get_hdfs
-
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
-
+from notebook.sql_utils import split_statements, strip_trailing_semicolon
 
 LOG = logging.getLogger()
 
@@ -296,6 +284,3 @@ def denormalize_formset_dict(data_dict_list, formset, attr_list):
 
   res[str(formset.management_form.add_prefix('next_form_id'))] = str(len(data_dict_list))
   return res
-
-  def __str__(self):
-    return '%s: %s' % (self.__class__, self.query)

+ 14 - 15
apps/beeswax/src/beeswax/forms.py

@@ -17,22 +17,16 @@
 
 import sys
 
-from builtins import chr
 from django import forms
-from django.core.validators import MinValueValidator, MaxValueValidator
+from django.core.validators import MaxValueValidator, MinValueValidator
 from django.forms import NumberInput
+from django.utils.translation import gettext as _, gettext_lazy as _t
 
 from aws.s3 import S3_ROOT, S3A_ROOT
-from desktop.lib.django_forms import simple_formset_factory, DependencyAwareForm, ChoiceOrOtherField, MultiForm, SubmitButton
-from filebrowser.forms import PathField
-
 from beeswax import common
 from beeswax.models import SavedQuery
-
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _, gettext_lazy as _t
-else:
-  from django.utils.translation import ugettext as _, ugettext_lazy as _t
+from desktop.lib.django_forms import ChoiceOrOtherField, DependencyAwareForm, MultiForm, SubmitButton, simple_formset_factory
+from filebrowser.forms import PathField
 
 
 class QueryForm(MultiForm):
@@ -129,7 +123,7 @@ class SaveResultsTableForm(forms.Form):
       label=_t("Table Name"),
       required=True,
       help_text=_t("Name of the new table")
-  ) # Can also contain a DB prefixed table name, e.g. DB_NAME.TABLE_NAME
+  )  # Can also contain a DB prefixed table name, e.g. DB_NAME.TABLE_NAME
 
   def __init__(self, *args, **kwargs):
     self.db = kwargs.pop('db', None)
@@ -186,6 +180,7 @@ class FunctionForm(forms.Form):
   name = forms.CharField(required=True)
   class_name = forms.CharField(required=True)
 
+
 FunctionFormSet = simple_formset_factory(FunctionForm)
 
 
@@ -212,11 +207,13 @@ class SettingForm(forms.Form):
   key = forms.CharField()
   value = forms.CharField()
 
+
 SettingFormSet = simple_formset_factory(SettingForm)
 
 
 # In theory, there are only 256 of these...
-TERMINATOR_CHOICES = [ (hive_val, desc) for hive_val, desc, ascii in common.TERMINATORS ]
+TERMINATOR_CHOICES = [(hive_val, desc) for hive_val, desc, ascii in common.TERMINATORS]
+
 
 class CreateTableForm(DependencyAwareForm):
   """
@@ -230,7 +227,7 @@ class CreateTableForm(DependencyAwareForm):
 
   # Row Formatting
   row_format = forms.ChoiceField(required=True,
-                                choices=common.to_choices([ "Delimited", "SerDe" ]),
+                                choices=common.to_choices(["Delimited", "SerDe"]),
                                 initial="Delimited")
 
   # Delimited Row
@@ -370,13 +367,14 @@ class CreateByImportDelimForm(forms.Form):
 
 # Note, struct is not currently supported.  (Because it's recursive, for example.)
 HIVE_TYPES = \
-    ( "string", "tinyint", "smallint", "int", "bigint", "boolean",
+    ("string", "tinyint", "smallint", "int", "bigint", "boolean",
       "float", "double", "array", "map", "timestamp", "date",
       "char", "varchar")
 HIVE_PRIMITIVE_TYPES = \
     ("string", "tinyint", "smallint", "int", "bigint", "boolean",
       "float", "double", "timestamp", "date", "char", "varchar")
 
+
 class PartitionTypeForm(forms.Form):
   dependencies = [
     ("column_type", "char", "char_length"),
@@ -393,6 +391,7 @@ class PartitionTypeForm(forms.Form):
                                       validators=[MinValueValidator(1), MaxValueValidator(65355)],
                                       help_text=_t("Specify if column_is varchar"))
 
+
 class ColumnTypeForm(DependencyAwareForm):
   """
   Form used to specify a column during table creation
@@ -432,7 +431,7 @@ PartitionTypeFormSet = simple_formset_factory(PartitionTypeForm, add_label=_t("A
 
 def _clean_databasename(name):
   try:
-    if name in db.get_databases(): # Will always fail
+    if name in db.get_databases():  # Will always fail
       raise forms.ValidationError(_('Database "%(name)s" already exists.') % {'name': name})
   except Exception:
     return name

+ 23 - 16
apps/beeswax/src/beeswax/hive_site.py

@@ -19,25 +19,17 @@
 Helper for reading hive-site.xml
 """
 
-from builtins import str
+import re
 import errno
+import socket
 import logging
 import os.path
-import re
-import socket
-import sys
 
+import beeswax.conf
 from desktop.lib import security_util
 from hadoop import confparse
 from hadoop.ssl_client_site import get_trustore_location, get_trustore_password
 
-import beeswax.conf
-
-if sys.version_info[0] > 2:
-  open_file = open
-else:
-  open_file = file
-
 LOG = logging.getLogger()
 
 _HIVE_SITE_PATH = None                  # Path to hive-site.xml
@@ -72,13 +64,14 @@ _CNF_HIVE_EXECUTION_ENGINE = 'hive.execution.engine'
 
 
 # Host is whatever up to the colon. Allow and ignore a trailing slash.
-_THRIFT_URI_RE = re.compile("^thrift://([^:]+):(\d+)[/]?$")
+_THRIFT_URI_RE = re.compile(r"^thrift://([^:]+):(\d+)[/]?$")
 
 
 class MalformedHiveSiteException(Exception):
   """Parsing error class used internally"""
   pass
 
+
 def reset():
   """Reset the cached conf"""
   global _HIVE_SITE_DICT
@@ -105,7 +98,7 @@ def get_metastore():
 
     if not is_local:
       use_sasl = str(get_conf().get(_CNF_METASTORE_SASL, 'false')).lower() == 'true'
-      thrift_uri = thrift_uris.split(",")[0] # First URI
+      thrift_uri = thrift_uris.split(",")[0]  # First URI
       host = socket.getfqdn()
       match = _THRIFT_URI_RE.match(thrift_uri)
       if not match:
@@ -138,18 +131,23 @@ def get_hiveserver2_kerberos_principal(hostname_or_ip):
   else:
     return None
 
+
 def get_metastore_warehouse_dir():
   return get_conf().get(_CNF_METASTORE_WAREHOUSE_DIR, '/user/hive/warehouse')
 
+
 def get_hiveserver2_authentication():
-  return get_conf().get(_CNF_HIVESERVER2_AUTHENTICATION, 'NONE').upper() # NONE == PLAIN SASL
+  return get_conf().get(_CNF_HIVESERVER2_AUTHENTICATION, 'NONE').upper()  # NONE == PLAIN SASL
+
 
 def get_hiveserver2_thrift_sasl_qop():
   return get_conf().get(_CNF_HIVESERVER2_THRIFT_SASL_QOP, 'NONE').lower()
 
+
 def hiveserver2_impersonation_enabled():
   return get_conf().get(_CNF_HIVESERVER2_IMPERSONATION, 'TRUE').upper() == 'TRUE'
 
+
 def hiveserver2_jdbc_url():
   is_transport_mode_http = hiveserver2_transport_mode() == 'HTTP'
   urlbase = 'jdbc:hive2://%s:%s/default' % (
@@ -180,15 +178,19 @@ def hiveserver2_jdbc_url():
 def hiveserver2_use_ssl():
   return get_conf().get(_CNF_HIVESERVER2_USE_SSL, 'FALSE').upper() == 'TRUE'
 
+
 def hiveserver2_transport_mode():
   return get_conf().get(_CNF_HIVESERVER2_TRANSPORT_MODE, 'TCP').upper()
 
+
 def hiveserver2_thrift_binary_port():
   return get_conf().get(_CNF_HIVESERVER2_THRIFT_BINARY_PORT)
 
+
 def hiveserver2_thrift_http_port():
   return get_conf().get(_CNF_HIVESERVER2_THRIFT_HTTP_PORT)
 
+
 def hiveserver2_thrift_http_path():
   return get_conf().get(_CNF_HIVESERVER2_THRIFT_HTTP_PATH, 'cliservice')
 
@@ -202,15 +204,19 @@ def has_concurrency_support():
   ''''Possibly use set -v in future to obtain properties hive.create.as.acid=true & hive.create.as.insert.only=true'''
   return get_conf().get(_CNF_HIVE_SUPPORT_CONCURRENCY, 'TRUE').upper() == 'TRUE'
 
+
 def get_hive_hook_proto_base_directory():
   return get_conf().get(_CNF_HIVE_HOOK_PROTO_BASE_DIR)
 
+
 def get_hive_execution_mode():
   return get_conf().get(_CNF_HIVE_EXECUTION_MODE)
 
+
 def get_hive_execution_engine():
   return get_conf().get(_CNF_HIVE_EXECUTION_ENGINE)
 
+
 def _parse_hive_site():
   """
   Parse hive-site.xml and store in _HIVE_SITE_DICT
@@ -220,7 +226,7 @@ def _parse_hive_site():
 
   _HIVE_SITE_PATH = os.path.join(beeswax.conf.HIVE_CONF_DIR.get(), 'hive-site.xml')
   try:
-    data = open_file(_HIVE_SITE_PATH, 'r').read()
+    data = open(_HIVE_SITE_PATH, 'r').read()
   except IOError as err:
     if err.errno != errno.ENOENT:
       LOG.error('Cannot read from "%s": %s' % (_HIVE_SITE_PATH, err))
@@ -230,9 +236,10 @@ def _parse_hive_site():
 
   _HIVE_SITE_DICT = confparse.ConfParse(data)
 
+
 def get_hive_site_content():
   hive_site_path = os.path.join(beeswax.conf.HIVE_CONF_DIR.get(), 'hive-site.xml')
   if not os.path.exists(hive_site_path):
     return ''
   else:
-    return open_file(hive_site_path, 'r').read()
+    return open(hive_site_path, 'r').read()

+ 11 - 34
apps/beeswax/src/beeswax/management/commands/beeswax_install_examples.py

@@ -15,33 +15,24 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from builtins import object
-import csv
-import logging
-import json
 import os
-import sys
+import csv
 import pwd
+import json
+import logging
 
 from django.core.management.base import BaseCommand
+from django.utils.translation import gettext as _
 
-from desktop.lib.exceptions_renderable import PopupException
-from desktop.conf import USE_NEW_EDITOR
-from desktop.models import Directory, Document2, Document2Permission
-from hadoop import cluster
-from notebook.models import import_saved_beeswax_query, make_notebook, MockRequest, _get_example_directory
-from useradmin.models import get_default_user_group, install_sample_user, User
-
-from beeswax.design import hql_query
 from beeswax.conf import LOCAL_EXAMPLES_DATA_DIR
 from beeswax.hive_site import has_concurrency_support
-from beeswax.models import SavedQuery, HQL, IMPALA, RDBMS
-from beeswax.server import dbms
-
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
+from beeswax.models import HQL, IMPALA, RDBMS, SavedQuery
+from desktop.conf import USE_NEW_EDITOR
+from desktop.lib.exceptions_renderable import PopupException
+from desktop.models import Document2, Document2Permission
+from hadoop import cluster
+from notebook.models import MockRequest, _get_example_directory, import_saved_beeswax_query, make_notebook
+from useradmin.models import User, get_default_user_group, install_sample_user
 
 LOG = logging.getLogger()
 
@@ -104,7 +95,6 @@ class Command(BaseCommand):
 
     return self.successes, self.errors
 
-
   def install_tables(self, django_user, dialect, db_name, tables, interpreter=None, request=None):
     data_dir = LOCAL_EXAMPLES_DATA_DIR.get()
     table_file = open(os.path.join(data_dir, tables))
@@ -129,7 +119,6 @@ class Command(BaseCommand):
           LOG.error(msg)
           self.errors.append(_('Could not install table %s: %s') % (full_name, msg))
 
-
   def install_queries(self, django_user, dialect, interpreter=None):
     design_file = open(os.path.join(LOCAL_EXAMPLES_DATA_DIR.get(), 'queries.json'))
     design_list = json.load(design_file)
@@ -196,7 +185,6 @@ class SampleTable(object):
       self._contents_file = os.path.join(self._data_dir, self.filename)
       self._check_file_contents(self._contents_file)
 
-
   def install(self, django_user):
     if self.dialect in ('hive', 'impala'):
       if has_concurrency_support() and not self.is_transactional:
@@ -215,7 +203,6 @@ class SampleTable(object):
 
     return True
 
-
   def create(self, django_user):
     """
     Create SQL sample table.
@@ -242,7 +229,6 @@ class SampleTable(object):
       else:
         raise ex
 
-
   def load(self, django_user):
     inserts = []
 
@@ -279,7 +265,6 @@ class SampleTable(object):
     for insert in inserts:
       self._load_data_to_table(django_user, insert)
 
-
   def load_partition(self, django_user, partition_spec, filepath, columns):
     if (self.dialect not in ('hive', 'impala') or has_concurrency_support()) and self.is_transactional:
       with open(filepath) as f:
@@ -309,14 +294,12 @@ class SampleTable(object):
 
     self._load_data_to_table(django_user, hql)
 
-
   def _check_file_contents(self, filepath):
     if not os.path.isfile(filepath):
       msg = _('Cannot find table data in "%(file)s".') % {'file': filepath}
       LOG.error(msg)
       raise ValueError(msg)
 
-
   def _get_partition_dir(self, partition_spec):
     parts = partition_spec.split(',')
     last_part = parts[-1]
@@ -324,7 +307,6 @@ class SampleTable(object):
     part_dir = part_value.strip("'").replace('-', '_')
     return part_dir
 
-
   def _get_hdfs_root_destination(self, django_user, subdir=None):
     fs = cluster.get_hdfs()
     hdfs_root_destination = None
@@ -349,7 +331,6 @@ class SampleTable(object):
 
     return hdfs_root_destination
 
-
   def _upload_to_hdfs(self, django_user, local_filepath, hdfs_root_destination, filename=None):
     fs = cluster.get_hdfs()
 
@@ -362,7 +343,6 @@ class SampleTable(object):
 
     return hdfs_destination
 
-
   def _load_data_to_table(self, django_user, hql):
     LOG.info('Loading data into table "%s"' % (self.name,))
 
@@ -378,7 +358,6 @@ class SampleTable(object):
     )
     job.execute_and_wait(self.request)
 
-
   def _get_sql_insert_values(self, f, columns=None):
     data = f.read()
     dialect = csv.Sniffer().sniff(data)
@@ -409,7 +388,6 @@ class SampleQuery(object):
     self.type = int(data_dict['type'])
     self.data = data_dict['data']
 
-
   def install(self, django_user, interpreter=None):
     """
     Install queries. Raise InstallException on failure.
@@ -468,7 +446,6 @@ class SampleQuery(object):
       examples_dir.share(django_user, Document2Permission.READ_PERM, groups=[get_default_user_group()])
       LOG.info('Successfully installed sample query: %s' % doc2)
 
-
   def _document_type(self, type, interpreter=None):
     if type == HQL:
       return 'query-hive'

+ 5 - 16
apps/beeswax/src/beeswax/management/commands/beeswax_install_examples_tests.py

@@ -16,22 +16,17 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import sys
 import logging
+from unittest.mock import patch
+
 import pytest
-import sys
 
+from beeswax.management.commands.beeswax_install_examples import Command, SampleQuery, SampleTable
 from desktop.auth.backend import rewrite_user
 from desktop.lib.django_test_util import make_logged_in_client
 from desktop.models import Document2
-from useradmin.models import User, install_sample_user
-
-from beeswax.management.commands.beeswax_install_examples import SampleTable, Command, SampleQuery
-
-if sys.version_info[0] > 2:
-  from unittest.mock import patch, Mock, MagicMock
-else:
-  from mock import patch, Mock, MagicMock
-
+from useradmin.models import User
 
 LOG = logging.getLogger()
 
@@ -43,7 +38,6 @@ class TestStandardTables():
     self.client = make_logged_in_client(username="test", groupname="default", recreate=True, is_superuser=False)
     self.user = User.objects.get(username="test")
 
-
   def test_install_queries_mysql(self):
     design_dict = {
       "name": "TestStandardTables Query",
@@ -115,7 +109,6 @@ class TestHiveServer2():
       query = Document2.objects.filter(name='TestBeswaxHiveTables Query').get()
       assert 'query-hive' == query.type
 
-
   def test_create_table_load_data_but_no_fs(self):
     table_data = {
       "data_file": "sample_07.csv",
@@ -134,7 +127,6 @@ class TestHiveServer2():
         make_notebook.assert_not_called()
 
 
-
 @pytest.mark.django_db
 class TestTransactionalTables():
 
@@ -142,7 +134,6 @@ class TestTransactionalTables():
     self.client = make_logged_in_client(username="test", groupname="default", recreate=True, is_superuser=False)
     self.user = rewrite_user(User.objects.get(username="test"))
 
-
   def test_load_sample_07_with_concurrency_support(self):
     table_data = {
       "data_file": "sample_07.csv",
@@ -161,7 +152,6 @@ class TestTransactionalTables():
 
         make_notebook.assert_called()
 
-
   def test_load_web_logs_with_concurrency_support(self):
     table_data = {
       "partition_files": {
@@ -204,7 +194,6 @@ class TestTransactionalTables():
 
         make_notebook.assert_called()
 
-
   def test_create_phoenix_table(self):
     table_data = {
       "data_file": "./tables/us_population.csv",

+ 4 - 14
apps/beeswax/src/beeswax/management/commands/create_table_query_data.py

@@ -15,27 +15,18 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from builtins import str
-
 import logging
-import sys
 
 from django.core.management.base import BaseCommand
+from django.utils.translation import gettext as _
 
-from desktop.lib import django_mako
+from beeswax import hive_site
+from beeswax.design import hql_query
 from beeswax.server import dbms
 from beeswax.server.dbms import get_query_server_config
-
-from beeswax.design import hql_query
-from beeswax import hive_site
+from desktop.lib import django_mako
 from useradmin.models import install_sample_user
 
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
-
-
 LOG = logging.getLogger()
 
 
@@ -46,7 +37,6 @@ class Command(BaseCommand):
   args = ''
   help = 'Create table sys.query_data over hive.hook.proto.base-directory'
 
-
   def handle(self, *args, **options):
     create_table()
 

+ 16 - 31
apps/beeswax/src/beeswax/models.py

@@ -15,36 +15,26 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from builtins import range
-from builtins import object
 import ast
-import base64
-import datetime
 import json
+import base64
 import logging
-import sys
+import datetime
+from enum import Enum
 
-from django.db import models
 from django.contrib.contenttypes.fields import GenericRelation
+from django.db import models
 from django.urls import reverse
+from django.utils.translation import gettext as _, gettext_lazy as _t
+from TCLIService.ttypes import THandleIdentifier, TOperationHandle, TOperationState, TOperationType, TSessionHandle
 
-from enum import Enum
-from TCLIService.ttypes import TSessionHandle, THandleIdentifier, TOperationState, TOperationHandle, TOperationType
-
+from beeswax.design import HQLdesign
 from desktop.lib.exceptions_renderable import PopupException
 from desktop.models import Document, Document2
 from desktop.redaction import global_redaction_engine
 from librdbms.server import dbms as librdbms_dbms
 from useradmin.models import User, UserProfile
 
-from beeswax.design import HQLdesign
-
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _, gettext_lazy as _t
-else:
-  from django.utils.translation import ugettext as _, ugettext_lazy as _t
-
-
 LOG = logging.getLogger()
 
 QUERY_SUBMISSION_TIMEOUT = datetime.timedelta(0, 60 * 60)               # 1 hour
@@ -54,6 +44,7 @@ BEESWAX = 'beeswax'
 HIVE_SERVER2 = 'hiveserver2'
 QUERY_TYPES = (HQL, IMPALA, RDBMS, SPARK, HPLSQL) = list(range(5))
 
+
 class QueryHistory(models.Model):
   """
   Holds metadata about all queries that have been executed.
@@ -136,7 +127,6 @@ class QueryHistory(models.Model):
 
     return query_server
 
-
   def get_current_statement(self):
     if self.design is not None:
       design = self.design.get_design()
@@ -156,7 +146,7 @@ class QueryHistory(models.Model):
 
     if self.design is not None:
       design = self.design.get_design()
-      return is_statement_finished and self.statement_number + 1 == design.statement_count # Last statement
+      return is_statement_finished and self.statement_number + 1 == design.statement_count  # Last statement
     else:
       return is_statement_finished
 
@@ -363,7 +353,7 @@ class SavedQuery(models.Model):
   def get_query_context(self):
     try:
       return make_query_context('design', self.id)
-    except:
+    except Exception:
       LOG.exception('failed to make query context')
       return ""
 
@@ -455,7 +445,7 @@ class SessionManager(models.Manager):
       if available_sessions:
         session = available_sessions[0]
       else:
-        session = None # No available session found
+        session = None  # No available session found
 
       return session
 
@@ -485,7 +475,7 @@ class Session(models.Model):
   def get_adjusted_guid_secret(self):
     secret = self.secret
     guid = self.guid
-    if sys.version_info[0] > 2 and not isinstance(self.secret, bytes) and not isinstance(self.guid, bytes):
+    if not isinstance(self.secret, bytes) and not isinstance(self.guid, bytes):
       # only for py3, after bytes saved, bytes wrapped in a string object
       try:
         secret = ast.literal_eval(secret)
@@ -521,7 +511,6 @@ class QueryHandle(object):
     return '%s %s' % (self.secret, self.guid)
 
 
-
 class HiveServerQueryHandle(QueryHandle):
   """
   QueryHandle for Hive Server 2.
@@ -556,16 +545,10 @@ class HiveServerQueryHandle(QueryHandle):
 
   @classmethod
   def get_decoded(cls, secret, guid):
-    if sys.version_info[0] > 2:
-      return base64.b64decode(secret), base64.b64decode(guid)
-    else:
-      return base64.decodestring(secret), base64.decodestring(guid)
+    return base64.b64decode(secret), base64.b64decode(guid)
 
   def get_encoded(self):
-    if sys.version_info[0] > 2:
-      return base64.b64encode(self.secret), base64.b64encode(self.guid)
-    else:
-      return base64.encodestring(self.secret), base64.encodestring(self.guid)
+    return base64.b64encode(self.secret), base64.b64encode(self.guid)
 
 
 # Deprecated. Could be removed.
@@ -608,6 +591,7 @@ class MetaInstall(models.Model):
     except MetaInstall.DoesNotExist:
       return MetaInstall(id=1)
 
+
 class Namespace(models.Model):
   name = models.CharField(default='', max_length=255)
   description = models.TextField(default='')
@@ -652,6 +636,7 @@ class Namespace(models.Model):
       'external_id': self.external_id
     }
 
+
 class Compute(models.Model):
   """
   Instance of a compute type pointing to a Hive or Impala compute resources.

+ 49 - 27
apps/beeswax/src/beeswax/query_history.py

@@ -15,32 +15,22 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from builtins import filter
-from builtins import str
-
-import collections
-import logging
 import json
-import sys
-import threading
 import uuid
+import logging
+import threading
+import collections
+
+from django.utils.translation import gettext as _
 
 from beeswax.design import hql_query
+from beeswax.management.commands import create_table_query_data
 from beeswax.server import dbms
 from beeswax.server.dbms import get_query_server_config
-from beeswax.management.commands import create_table_query_data
-
-from desktop.lib.exceptions_renderable import raise_popup_exception, PopupException
 from desktop.lib import django_mako
-
+from desktop.lib.exceptions_renderable import PopupException, raise_popup_exception
 from useradmin.models import install_sample_user
 
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
-
-
 LOG = logging.getLogger()
 
 QUERY_HISTORY_CACHE_MAX_USER_COUNT = 10
@@ -50,8 +40,8 @@ HAS_CREATED_TABLE = False
 
 class QueryHistory(object):
   def __init__(self, max_user=10, max_history_per_user=25):
-    self.max_user=max_user
-    self.max_history_per_user=max_history_per_user
+    self.max_user = max_user
+    self.max_history_per_user = max_history_per_user
     self.by_user = collections.OrderedDict()
     self.no_user_key = str(uuid.uuid4())
     self.lock = threading.Lock()
@@ -131,16 +121,19 @@ class QueryHistory(object):
       self.lock.acquire()
       by_user = self.by_user.get(request_user)
       if by_user and by_user['filters'] == filters:
-        del self.by_user[request_user] # Moving request_user to head of queue
+        del self.by_user[request_user]  # Moving request_user to head of queue
         self.by_user[request_user] = by_user
         return by_user
       return None
     finally:
       self.lock.release()
 
+
 QUERY_HISTORY = QueryHistory(max_user=QUERY_HISTORY_CACHE_MAX_USER_COUNT, max_history_per_user=QUERY_HISTORY_CACHE_MAX_LENGTH_PER_USER)
 
-# If fresh user get from _get_query_history_latest else get _get_query_history_from. if results set from _get_query_history_from less than limit merge results with cache else call _get_query_history_latest
+
+# If fresh user get from _get_query_history_latest else get _get_query_history_from.
+# if results set from _get_query_history_from less than limit merge results with cache else call _get_query_history_latest
 def get_query_history(request_user=None, start_date=None, start_time=None, query_id=None, status=None, limit=None):
   _init_table()
 
@@ -151,7 +144,7 @@ def get_query_history(request_user=None, start_date=None, start_time=None, query
     last = history['max']
     data = _get_query_history_from(request_user=request_user,
                                    start_date=last['date'],
-                                   start_time=last['time']+1,
+                                   start_time=last['time'] + 1,
                                    query_id=query_id,
                                    status=status,
                                    limit=limit)
@@ -161,10 +154,19 @@ def get_query_history(request_user=None, start_date=None, start_time=None, query
       cached = _n_filter(filter_list, cached)[:limit]
       return {'data': cached}
 
-  data = _get_query_history_latest(request_user=request_user, start_date=start_date, start_time=start_time, query_id=query_id, status=status, limit=limit, force_refresh=True)
+  data = _get_query_history_latest(
+    request_user=request_user,
+    start_date=start_date,
+    start_time=start_time,
+    query_id=query_id,
+    status=status,
+    limit=limit,
+    force_refresh=True
+  )
   QUERY_HISTORY.set(request_user, data['data'], filters)
   return data
 
+
 # If id in cache return cache else _get_query_history_from
 def get_query_by_id(request_user=None, query_id=None):
   _init_table()
@@ -173,10 +175,11 @@ def get_query_by_id(request_user=None, query_id=None):
   if datum:
     return {'data': [datum]}
   else:
-    data = _get_query_history_from(request_user=request_user, query_id=query_id) # force_refresh?
+    data = _get_query_history_from(request_user=request_user, query_id=query_id)  # force_refresh?
     cached = _groupby({'by_id': {}}, data['data'])
     return {'data': cached}
 
+
 def _init_table():
   global HAS_CREATED_TABLE
   if not HAS_CREATED_TABLE:
@@ -185,8 +188,21 @@ def _init_table():
   if not HAS_CREATED_TABLE:
     raise PopupException(_('Could not initialize query history table.'))
 
-def _get_query_history_latest(request_user=None, query_id=None, start_date=None, start_time=None, status=None, limit=25, force_refresh=False):
-  proposed_query = django_mako.render_to_string("select_table_query_data_latest.mako", {'table': {'name': 'query_data', 'request_user': request_user, 'query_id': query_id, 'start_date': start_date, 'start_time': start_time, 'status': status, 'limit': limit, 'force_refresh': force_refresh}})
+
+def _get_query_history_latest(
+    request_user=None, query_id=None, start_date=None, start_time=None, status=None, limit=25, force_refresh=False):
+  proposed_query = django_mako.render_to_string(
+    "select_table_query_data_latest.mako",
+    {'table': {
+      'name': 'query_data',
+      'request_user': request_user,
+      'query_id': query_id,
+      'start_date': start_date,
+      'start_time': start_time,
+      'status': status,
+      'limit': limit,
+      'force_refresh': force_refresh
+    }})
   data = _execute_query(proposed_query, limit)
   for row in data['data']:
     if row[1]:
@@ -197,6 +213,7 @@ def _get_query_history_latest(request_user=None, query_id=None, start_date=None,
       row[8] = json.loads(row[8])
   return data
 
+
 def _get_query_history_from(request_user=None, start_date=None, start_time=None, status=None, query_id=None, limit=25):
   proposed_query = django_mako.render_to_string("select_table_query_data_from.mako",
                                                 {'table':
@@ -217,6 +234,7 @@ def _get_query_history_from(request_user=None, start_date=None, start_time=None,
       row[8] = [row[8]]
   return data
 
+
 def _execute_query(proposed_query, limit):
   user = install_sample_user()
   query_server = get_query_server_config('beeswax')
@@ -243,6 +261,7 @@ def _execute_query(proposed_query, limit):
     except Exception as ex:
       raise_popup_exception(_('Error fetching query history.'))
 
+
 def _get_filter_list(filters):
   filter_list = []
   if filters.get("states"):
@@ -250,14 +269,17 @@ def _get_filter_list(filters):
 
   return filter_list
 
+
 def _get_status(row):
   return 'completed' if len(row[1]) >= 2 else 'running'
 
+
 def _n_filter(filters, tuples):
   for f in filters:
     tuples = list(filter(f, tuples))
   return tuples
 
+
 def _groupby(by_user, data):
   results = []
   for row in data:
@@ -270,7 +292,7 @@ def _groupby(by_user, data):
       results.append(row)
     else:
       item = by_user['by_id'][row[0]]
-      if row[8][0] in item[8]: # we have dup
+      if row[8][0] in item[8]:  # we have dup
         continue
       if row[1]:
         item[1] += row[1]

+ 0 - 2
apps/beeswax/src/beeswax/server/dbms.py

@@ -16,7 +16,6 @@
 # limitations under the License.
 
 import re
-import sys
 import json
 import time
 import logging
@@ -72,7 +71,6 @@ from desktop.lib.django_util import format_preserving_redirect
 from desktop.lib.exceptions_renderable import PopupException
 from desktop.lib.parameterization import substitute_variables
 from desktop.lib.view_util import location_to_url
-from desktop.models import Cluster
 from desktop.settings import CACHES_HIVE_DISCOVERY_KEY
 from indexer.file_format import HiveFormat
 from libzookeeper import conf as libzookeeper_conf

+ 0 - 1
apps/beeswax/src/beeswax/server/dbms_tests.py

@@ -16,7 +16,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import sys
 import logging
 from unittest.mock import Mock, patch
 

+ 8 - 33
apps/beeswax/src/beeswax/server/hive_metastore_server.py

@@ -15,32 +15,21 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from builtins import object
 import logging
-import re
-import sys
-import thrift
 
-from django.utils.encoding import smart_str, force_unicode
-
-import hadoop.cluster
-
-from desktop.lib import thrift_util
-from desktop.conf import KERBEROS
+from django.utils.encoding import force_unicode, smart_str
+from django.utils.translation import gettext as _
 from hive_metastore import ThriftHiveMetastore
 from TCLIService.ttypes import TOperationState
 
+import hadoop.cluster
 from beeswax import hive_site
 from beeswax.conf import SERVER_CONN_TIMEOUT
-from beeswax.server.hive_server2_lib import ResultCompatible
 from beeswax.models import HiveServerQueryHandle, QueryHistory
-from beeswax.server.dbms import Table, DataTable
-
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
-
+from beeswax.server.dbms import DataTable, Table
+from beeswax.server.hive_server2_lib import ResultCompatible
+from desktop.conf import KERBEROS
+from desktop.lib import thrift_util
 
 LOG = logging.getLogger()
 
@@ -105,7 +94,6 @@ class HiveDataTable(DataTable):
       yield parse_result_row(row)
 
 
-
 class HiveMetastoreClient(object):
 
   def __init__(self, query_server, user):
@@ -113,15 +101,12 @@ class HiveMetastoreClient(object):
     self.query_server = query_server
     self.meta_client = self.meta_client()
 
-
   def get_databases(self, *args, **kwargs):
     return self.meta_client.get_all_databases()
 
-
   def get_tables(self, *args, **kwargs):
     return self.meta_client.get_tables(*args, **kwargs)
 
-
   def get_tables_meta(self, *args, **kwargs):
     meta_tables = self.meta_client.get_table_meta(*args, **kwargs)
     return [
@@ -140,41 +125,32 @@ class HiveMetastoreClient(object):
 
     return table
 
-
   def get_partitions(self, db_name, tbl_name, max_parts):
     if max_parts is None:
       max_parts = -1
     return self.meta_client.get_partitions(db_name, tbl_name, max_parts)
 
-
   def use(self, query):
     pass
 
-
   def query(self, query, statement=0):
     return HiveServerQueryHandle(secret='mock', guid='mock')
 
-
   def get_state(self, handle):
     return QueryHistory.STATE.available
 
-
   def close(self, handle):
     pass
 
-
   def get_operation_status(self, handle):
     return MockFinishedOperation()
 
-
   def get_default_configuration(self, *args, **kwargs):
     return []
 
-
   def fetch(self, handle, start_over=False, max_rows=None):
     return EmptyResultCompatible()
 
-
   @classmethod
   def get_security(cls, query_server=None):
     cluster_conf = hadoop.cluster.get_cluster_conf_for_job_submission()
@@ -193,7 +169,6 @@ class HiveMetastoreClient(object):
 
     return use_sasl, kerberos_principal_short_name
 
-
   def meta_client(self):
     """Get the Thrift client to talk to the metastore"""
 
@@ -281,7 +256,7 @@ class HiveMetastoreClient(object):
         self._encode_partition(new_part)
         return self._client.alter_partition(db_name, tbl_name, new_part)
 
-    use_sasl, kerberos_principal_short_name = HiveMetastoreClient.get_security() # TODO Reuse from HiveServer2 lib
+    use_sasl, kerberos_principal_short_name = HiveMetastoreClient.get_security()  # TODO Reuse from HiveServer2 lib
 
     client = thrift_util.get_client(
         ThriftHiveMetastore.Client,

+ 4 - 20
apps/beeswax/src/beeswax/server/hive_server2_lib.py

@@ -16,12 +16,11 @@
 # limitations under the License.
 
 import re
-import sys
 import json
 import logging
-from builtins import filter, map, next, object
 from operator import itemgetter
 
+from django.utils.translation import gettext as _
 from TCLIService import TCLIService
 from TCLIService.ttypes import (
   TCancelOperationReq,
@@ -55,12 +54,6 @@ from desktop.conf import DEFAULT_USER, ENABLE_X_CSRF_TOKEN_FOR_HIVE_IMPALA, ENAB
 from desktop.lib import python_util, thrift_util
 from notebook.connectors.base import get_interpreter
 
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
-
-
 LOG = logging.getLogger()
 IMPALA_RESULTSET_CACHE_SIZE = 'impala.resultset.cache.size'
 DEFAULT_USER = DEFAULT_USER.get()
@@ -358,7 +351,7 @@ class HiveServerTColumnValue2(object):
 
   @classmethod
   def mark_nulls(cls, values, bytestring):
-    if sys.version_info[0] < 3 or isinstance(bytestring, bytes):
+    if isinstance(bytestring, bytes):
       mask = bytearray(bytestring)
     else:
       bitstring = python_util.from_string_to_bits(bytestring)
@@ -379,7 +372,7 @@ class HiveServerTColumnValue2(object):
   def set_nulls(cls, values, nulls):
     can_decode = True
     bytestring = nulls
-    if sys.version_info[0] == 3 and isinstance(bytestring, bytes):
+    if isinstance(bytestring, bytes):
       try:
         bytestring = bytestring.decode('utf-8')
       except Exception:
@@ -421,10 +414,7 @@ class HiveServerDataTable(DataTable):
       try:
         yield row.fields()
       except StopIteration as e:
-        if sys.version_info[0] > 2:
-          return  # pep-0479: expected Py3.8 generator raised StopIteration
-        else:
-          raise e
+        return  # pep-0479: expected Py3.8 generator raised StopIteration
 
 
 class HiveServerTTableSchema(object):
@@ -1041,9 +1031,6 @@ class HiveServerClient(object):
     if self.query_server.get('dialect') == 'impala' and self.query_server['QUERY_TIMEOUT_S'] > 0:
       configuration['QUERY_TIMEOUT_S'] = str(self.query_server['QUERY_TIMEOUT_S'])
 
-    if sys.version_info[0] == 2:
-      statement = statement.encode('utf-8')
-
     req = TExecuteStatementReq(statement=statement, confOverlay=configuration)
     (res, session) = self.call(self._client.ExecuteStatement, req, session=session)
 
@@ -1061,9 +1048,6 @@ class HiveServerClient(object):
     if self.query_server.get('dialect') == 'impala' and self.query_server['QUERY_TIMEOUT_S'] > 0:
       conf_overlay['QUERY_TIMEOUT_S'] = str(self.query_server['QUERY_TIMEOUT_S'])
 
-    if sys.version_info[0] == 2:
-      statement = statement.encode('utf-8')
-
     (res, session) = self.call_return_result_and_session(thrift_function, thrift_request, session=session)
 
     return HiveServerQueryHandle(

+ 31 - 50
apps/beeswax/src/beeswax/server/hive_server2_lib_tests.py

@@ -16,26 +16,21 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import sys
 import logging
+from unittest.mock import MagicMock, Mock, patch
+
 import pytest
-import sys
 from TCLIService.ttypes import TStatusCode
 
+from beeswax.conf import CLOSE_SESSIONS, MAX_NUMBER_OF_SESSIONS
+from beeswax.models import HiveServerQueryHandle, Session
+from beeswax.server.dbms import QueryServerException, get_query_server_config
+from beeswax.server.hive_server2_lib import HiveServerClient, HiveServerClientCompatible, HiveServerTable
 from desktop.auth.backend import rewrite_user
 from desktop.lib.django_test_util import make_logged_in_client
 from useradmin.models import User
 
-from beeswax.conf import MAX_NUMBER_OF_SESSIONS, CLOSE_SESSIONS
-from beeswax.models import HiveServerQueryHandle, Session
-from beeswax.server.dbms import get_query_server_config, QueryServerException
-from beeswax.server.hive_server2_lib import HiveServerTable, HiveServerClient, HiveServerClientCompatible
-
-if sys.version_info[0] > 2:
-  from unittest.mock import patch, Mock, MagicMock
-else:
-  from mock import patch, Mock, MagicMock
-
-
 LOG = logging.getLogger()
 
 
@@ -112,7 +107,6 @@ class TestHiveServerClient():
         original_guid ==
         handle.sessionId.guid)
 
-
   def test_get_configuration(self):
 
     with patch('beeswax.server.hive_server2_lib.HiveServerClient.execute_query_statement') as execute_query_statement:
@@ -229,16 +223,13 @@ class TestHiveServerClient():
       client.get_databases(query)
 
       assert (
-        None !=
-        client.call.call_args[0][1].schemaName), client.call.call_args.args
+        None is not client.call.call_args[0][1].schemaName), client.call.call_args.args
 
       with patch.dict(self.query_server, {'dialect': 'impala'}, clear=True):
         client.get_databases(query)
 
         assert (
-          None == # Should be empty and not '*' with Impala
-          client.call.call_args[0][1].schemaName), client.call.call_args.args
-
+          None is client.call.call_args[0][1].schemaName), client.call.call_args.args  # Should be empty and not '*' with Impala
 
   def test_get_table_with_error(self):
     query = Mock(
@@ -309,18 +300,15 @@ class TestHiveServerClient():
       try:
         client.get_table(database='database', table_name='table_name')
       except QueryServerException as e:
-        if sys.version_info[0] > 2:
-          req_string = ("TGetTablesReq(sessionHandle=TSessionHandle(sessionId=THandleIdentifier(guid=%s, secret=%s)), "
-            "catalogName=None, schemaName='database', tableName='table_name', tableTypes=None)")\
-            % (str(original_guid), str(original_secret))
-        else:
-          req_string = ("TGetTablesReq(schemaName='database', sessionHandle=TSessionHandle(sessionId=THandleIdentifier"
-            "(secret='%s', guid='%s')), tableName='table_name', tableTypes=None, catalogName=None)")\
-            % ('s\\xb6\\x0ePP\\xbdL\\x17\\xa3\\x0f\\\\\\xf7K\\xe8Y\\x1d',
-               '\\xd9\\xe0hT\\xd6wO\\xe1\\xa3S\\xfb\\x04\\xca\\x93V\\x01') # manually adding '\'
+        req_string = ("TGetTablesReq(sessionHandle=TSessionHandle(sessionId=THandleIdentifier(guid=%s, secret=%s)), "
+          "catalogName=None, schemaName='database', tableName='table_name', tableTypes=None)")\
+          % (str(original_guid), str(original_secret))
+
         assert (
           "Bad status for request %s:\n%s" % (req_string, get_tables_res) ==
-          str(e))
+          str(e)
+        )
+
 
 class TestHiveServerTable():
 
@@ -353,7 +341,7 @@ class TestHiveServerTable():
         Mock(stringVal=Mock(
           values=[
             'comment', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL',
-            'NULL', 'NULL', '{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"code\":\"true\",\"description\":\"true\",\"salary\":\"true\",'\
+            'NULL', 'NULL', '{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"code\":\"true\",\"description\":\"true\",\"salary\":\"true\",'
             '\"total_emp\":\"true\"}}', '2', '1', '822', '3288', '48445', 'true', 'insert_only', '1572882268', 'NULL', 'NULL', 'NULL',
             'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', '1',
           ],
@@ -381,7 +369,6 @@ class TestHiveServerTable():
     assert table.cols[2] == {'col_name': 'total_emp', 'data_type': 'int', 'comment': 'NULL'}
     assert table.cols[3] == {'col_name': 'salary', 'data_type': 'int', 'comment': 'NULL'}
 
-
   def test_cols_hive_tez(self):
 
     table_results = Mock()
@@ -413,7 +400,7 @@ class TestHiveServerTable():
           nulls='')),
         Mock(stringVal=Mock(
           values=[
-            '', '', '', '', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', '{\"BASIC_STATS\":'\
+            '', '', '', '', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', '{\"BASIC_STATS\":'
             '\"true\",\"COLUMN_STATS\":{\"code\":\"true\",\"description\":\"true\",\"salary\":\"true\",\"total_emp\":\"true\"}}', '2',
             '1', '822', '3288', '48445', 'TRUE', 'insert_only         ', '1572882268', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL',
             'NULL', 'NULL', 'NULL', 'NULL', '1',
@@ -442,7 +429,6 @@ class TestHiveServerTable():
     assert table.cols[2] == {'col_name': 'total_emp', 'data_type': 'int', 'comment': ''}
     assert table.cols[3] == {'col_name': 'salary', 'data_type': 'int', 'comment': ''}
 
-
   def test_cols_hive_llap_upstream(self):
 
     table_results = Mock()
@@ -471,7 +457,7 @@ class TestHiveServerTable():
         Mock(stringVal=Mock(
           values=[
             'comment', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL',
-            'NULL', '{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"code\":\"true\",\"description\":\"true\",\"salary\":\"true\",'\
+            'NULL', '{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"code\":\"true\",\"description\":\"true\",\"salary\":\"true\",'
             '\"total_emp\":\"true\"}}', '2', '1', '822', '3288', '48445', 'true', 'insert_only', '1572882268', 'NULL', 'NULL', 'NULL',
             'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', '1',
           ],
@@ -499,7 +485,6 @@ class TestHiveServerTable():
     assert table.cols[2] == {'col_name': 'total_emp', 'data_type': 'int', 'comment': 'NULL'}
     assert table.cols[3] == {'col_name': 'salary', 'data_type': 'int', 'comment': 'NULL'}
 
-
   def test_partition_keys_impala(self):
 
     table_results = Mock()
@@ -530,7 +515,7 @@ class TestHiveServerTable():
           nulls='')),
         Mock(stringVal=Mock(
           values=['comment', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'comment', 'NULL', 'NULL',
-            'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', '{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":'\
+            'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', '{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":'
             '{\"code\":\"true\",\"description\":\"true\",\"salary\":\"true\",\"total_emp\":\"true\"}}', '2', '1', '822', '3288', '48445',
             'true', 'insert_only', '1572882268', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', '1',
           ],
@@ -557,7 +542,6 @@ class TestHiveServerTable():
     assert table.partition_keys[0].type == 'string'
     assert table.partition_keys[0].comment == 'NULL'
 
-
   def test_partition_keys_hive(self):
 
     table_results = Mock()
@@ -590,7 +574,7 @@ class TestHiveServerTable():
         Mock(stringVal=Mock(
           values=[
             'comment', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'comment', '', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL',
-            'NULL', 'NULL', 'NULL', 'NULL', 'NULL', '{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"code\":\"true\",\"description\":'\
+            'NULL', 'NULL', 'NULL', 'NULL', 'NULL', '{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"code\":\"true\",\"description\":'
             '\"true\",\"salary\":\"true\",\"total_emp\":\"true\"}}', '2', '1', '822', '3288', '48445', 'true', 'insert_only',
             '1572882268', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', '1',
           ],
@@ -617,7 +601,6 @@ class TestHiveServerTable():
     assert table.partition_keys[0].type == 'string'
     assert table.partition_keys[0].comment == ''
 
-
   def test_single_primary_key_hive(self):
 
     table_results = Mock()
@@ -650,7 +633,7 @@ class TestHiveServerTable():
         Mock(stringVal=Mock(
           values=[
             'comment', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'comment', '', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL',
-            'NULL', 'NULL', 'NULL', 'NULL', 'NULL', '{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"code\":\"true\",\"description\":'\
+            'NULL', 'NULL', 'NULL', 'NULL', 'NULL', '{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"code\":\"true\",\"description\":'
             '\"true\",\"salary\":\"true\",\"total_emp\":\"true\"}}', '2', '1', '822', '3288', '48445', 'true', 'insert_only',
             '1572882268', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', '1', 'NULL', 'NULL', 'NULL',
             'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL'
@@ -678,7 +661,6 @@ class TestHiveServerTable():
     assert table.primary_keys[0].type == 'NULL'
     assert table.primary_keys[0].comment == 'NULL'
 
-
   def test_multi_primary_keys_hive(self):
 
     table_results = Mock()
@@ -712,7 +694,7 @@ class TestHiveServerTable():
         Mock(stringVal=Mock(
           values=[
             'comment', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'comment', '', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL',
-            'NULL', 'NULL', 'NULL', 'NULL', 'NULL', '{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"code\":\"true\",\"description\":'\
+            'NULL', 'NULL', 'NULL', 'NULL', 'NULL', '{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"code\":\"true\",\"description\":'
             '\"true\",\"salary\":\"true\",\"total_emp\":\"true\"}}', '2', '1', '822', '3288', '48445', 'true', 'insert_only',
             '1572882268', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', '1', 'NULL', 'NULL', 'NULL',
             'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL'
@@ -744,7 +726,6 @@ class TestHiveServerTable():
     assert table.primary_keys[1].type == 'NULL'
     assert table.primary_keys[1].comment == 'NULL'
 
-
   def test_foreign_keys_hive(self):
 
     table_results = Mock()
@@ -787,7 +768,7 @@ class TestHiveServerTable():
           stringVal=Mock(
             values=[
               'comment', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'comment', '', 'NULL', 'NULL', 'NULL', 'NULL',
-              'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', '{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"code\":\"true\",'\
+              'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', '{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"code\":\"true\",'
               '\"description\":\"true\",\"salary\":\"true\",\"total_emp\":\"true\"}}', '2', '1', '822', '3288', '48445', 'true',
               'insert_only', '1572882268', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', '1',
               'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'NULL', 'Key Sequence:1',
@@ -849,13 +830,13 @@ class TestSessionManagement():
 
             # Reuse session from argument
             (res, session2) = client.call(fn, req, status=None, session=session1)
-            open_session.assert_called_once() # open_session should not be called again, because we're reusing session
+            open_session.assert_called_once()  # open_session should not be called again, because we're reusing session
             assert session1 == session2
 
             # Reuse session from get_session
             get_session.return_value = session1
             (res, session3) = client.call(fn, req, status=None)
-            open_session.assert_called_once() # open_session should not be called again, because we're reusing session
+            open_session.assert_called_once()  # open_session should not be called again, because we're reusing session
             assert session1 == session3
     finally:
       for f in finish:
@@ -884,13 +865,13 @@ class TestSessionManagement():
 
             # Reuse session from argument
             (res, session2) = client.call(fn, req, status=None, session=session1)
-            open_session.assert_called_once() # open_session should not be called again, because we're reusing session
+            open_session.assert_called_once()  # open_session should not be called again, because we're reusing session
             assert session1 == session2
 
             # Reuse session from get_session
             get_session.return_value = session1
             (res, session3) = client.call(fn, req, status=None)
-            open_session.assert_called_once() # open_session should not be called again, because we're reusing session
+            open_session.assert_called_once()  # open_session should not be called again, because we're reusing session
             assert session1 == session3
     finally:
       for f in finish:
@@ -941,7 +922,7 @@ class TestSessionManagement():
 
           # Reuse session from argument
           (res, session2) = client.call(fn, req, status=None, session=session1)
-          open_session.assert_called_once() # open_session should not be called again, because we're reusing session
+          open_session.assert_called_once()  # open_session should not be called again, because we're reusing session
           assert session1 == session2
 
           # Create new session
@@ -999,7 +980,7 @@ class TestSessionManagement():
               assert open_session.call_count == 6
               assert close_session.call_count == 6
 
-              res = client.get_partitions(MagicMock(), MagicMock()) # get_partitions does 2 requests with 1 session each
+              res = client.get_partitions(MagicMock(), MagicMock())  # get_partitions does 2 requests with 1 session each
               assert open_session.call_count == 8
               assert close_session.call_count == 8
     finally:
@@ -1033,8 +1014,8 @@ class TestSessionManagement():
       for f in finish:
         f()
 
-class TestHiveServerClientCompatible():
 
+class TestHiveServerClientCompatible():
 
   def test_get_tables_meta(self):
     client = Mock(

+ 65 - 68
apps/beeswax/src/beeswax/tests.py

@@ -16,79 +16,83 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from future import standard_library
-from builtins import next, map, str, chr, range, object
+import os
+import re
+import sys
 import gzip
 import json
-import logging
-import os
 import random
-import re
 import shutil
 import socket
 import string
-import sys
+import logging
 import tempfile
 import threading
-import pytest
-import hadoop
+from io import BytesIO as string_io
+from unittest.mock import patch
 
+import pytest
+from django.db import transaction
 from django.test import TestCase
+from django.urls import reverse
 from django.utils.encoding import smart_str
 from django.utils.html import escape
-from django.urls import reverse
-from django.db import transaction
-
-from desktop.lib.exceptions_renderable import PopupException
-from desktop.conf import AUTH_USERNAME as DEFAULT_AUTH_USERNAME, AUTH_PASSWORD as DEFAULT_AUTH_PASSWORD, \
-  AUTH_PASSWORD_SCRIPT as DEFAULT_AUTH_PASSWORD_SCRIPT, LDAP_USERNAME, LDAP_PASSWORD, USE_NEW_EDITOR
-from desktop import redaction
-from desktop.redaction import logfilter
-from desktop.redaction.engine import RedactionPolicy, RedactionRule
-from desktop.lib.django_test_util import make_logged_in_client, assert_equal_mod_whitespace
-from desktop.lib.parameterization import substitute_variables
-from desktop.lib.python_util import from_string_to_bits, get_bytes_from_bits
-from desktop.lib.test_utils import grant_access, add_to_group
-from desktop.lib.security_util import get_localhost_name
-from desktop.lib.export_csvxls_tests import _read_xls_sheet_data
-from hadoop.fs.hadoopfs import Hdfs
-from useradmin.models import User
-
-from hadoop import ssl_client_site
-from hadoop.pseudo_hdfs4 import is_live_cluster
 
+import hadoop
 import desktop.conf as desktop_conf
-
-import beeswax.create_table
-import beeswax.hive_site
-import beeswax.models
 import beeswax.views
-
+import beeswax.models
+import beeswax.hive_site
+import beeswax.create_table
 from beeswax import conf, hive_site
 from beeswax.common import apply_natural_sort
-from beeswax.conf import HIVE_SERVER_HOST, AUTH_USERNAME, AUTH_PASSWORD, AUTH_PASSWORD_SCRIPT
-from beeswax.views import collapse_whitespace, _save_design, parse_out_jobs, parse_out_queries
-from beeswax.test_base import make_query, wait_for_query_to_finish, verify_history, get_query_server_config, fetch_query_result_data
+from beeswax.conf import AUTH_PASSWORD, AUTH_PASSWORD_SCRIPT, AUTH_USERNAME, HIVE_SERVER_HOST
+from beeswax.data_export import download, upload
 from beeswax.design import hql_query
-from beeswax.data_export import upload, download
-from beeswax.models import SavedQuery, QueryHistory, HQL, HIVE_SERVER2
+from beeswax.hive_site import get_metastore, hiveserver2_jdbc_url
+from beeswax.models import HIVE_SERVER2, HQL, QueryHistory, SavedQuery
 from beeswax.server import dbms
 from beeswax.server.dbms import QueryServerException
-from beeswax.server.hive_server2_lib import HiveServerClient, PartitionKeyCompatible, PartitionValueCompatible, HiveServerTable, \
-    HiveServerTColumnValue2
-from beeswax.test_base import BeeswaxSampleProvider, is_hive_on_spark, get_available_execution_engines
-from beeswax.hive_site import get_metastore, hiveserver2_jdbc_url
-
-standard_library.install_aliases()
-
-if sys.version_info[0] > 2:
-  from unittest.mock import patch, Mock
-  from io import BytesIO as string_io
-  open_file = open
-else:
-  from mock import patch, Mock
-  from cStringIO import StringIO as string_io
-  open_file = file
+from beeswax.server.hive_server2_lib import (
+  HiveServerClient,
+  HiveServerTable,
+  HiveServerTColumnValue2,
+  PartitionKeyCompatible,
+  PartitionValueCompatible,
+)
+from beeswax.test_base import (
+  BeeswaxSampleProvider,
+  fetch_query_result_data,
+  get_available_execution_engines,
+  get_query_server_config,
+  is_hive_on_spark,
+  make_query,
+  verify_history,
+  wait_for_query_to_finish,
+)
+from beeswax.views import _save_design, collapse_whitespace, parse_out_jobs, parse_out_queries
+from desktop import redaction
+from desktop.conf import (
+  AUTH_PASSWORD as DEFAULT_AUTH_PASSWORD,
+  AUTH_PASSWORD_SCRIPT as DEFAULT_AUTH_PASSWORD_SCRIPT,
+  AUTH_USERNAME as DEFAULT_AUTH_USERNAME,
+  LDAP_PASSWORD,
+  LDAP_USERNAME,
+  USE_NEW_EDITOR,
+)
+from desktop.lib.django_test_util import assert_equal_mod_whitespace, make_logged_in_client
+from desktop.lib.exceptions_renderable import PopupException
+from desktop.lib.export_csvxls_tests import _read_xls_sheet_data
+from desktop.lib.parameterization import substitute_variables
+from desktop.lib.python_util import from_string_to_bits, get_bytes_from_bits
+from desktop.lib.security_util import get_localhost_name
+from desktop.lib.test_utils import add_to_group, grant_access
+from desktop.redaction import logfilter
+from desktop.redaction.engine import RedactionPolicy, RedactionRule
+from hadoop import ssl_client_site
+from hadoop.fs.hadoopfs import Hdfs
+from hadoop.pseudo_hdfs4 import is_live_cluster
+from useradmin.models import User
 
 LOG = logging.getLogger()
 
@@ -433,7 +437,7 @@ for x in sys.stdin:
     # BeeswaxTest.jar is gone
     pytest.skip("Skipping Test")
 
-    src = open_file(os.path.join(os.path.dirname(__file__), "..", "..", "java-lib", "BeeswaxTest.jar"))
+    src = open(os.path.join(os.path.dirname(__file__), "..", "..", "java-lib", "BeeswaxTest.jar"))
     udf = self.cluster.fs_prefix + "hive1157.jar"
     dest = self.cluster.fs.open(udf, "w")
     shutil.copyfileobj(src, dest)
@@ -2102,10 +2106,7 @@ for x in sys.stdin:
 def test_import_gzip_reader():
   """Test the gzip reader in create table"""
   # Make gzipped data
-  if sys.version_info[0] > 2:
-    data = open(__file__, encoding='utf-8').read()
-  else:
-    data = file(__file__).read()
+  data = open(__file__, encoding='utf-8').read()
   data_gz_sio = string_io()
   gz = gzip.GzipFile(fileobj=data_gz_sio, mode='wb')
   gz_data = data
@@ -2247,7 +2248,7 @@ def test_hive_site():
         return tmpdir
 
     xml = hive_site_xml(is_local=True, use_sasl=False)
-    open_file(os.path.join(tmpdir, 'hive-site.xml'), 'w').write(xml)
+    open(os.path.join(tmpdir, 'hive-site.xml'), 'w').write(xml)
 
     beeswax.hive_site.reset()
     saved = beeswax.conf.HIVE_CONF_DIR
@@ -2278,7 +2279,7 @@ def test_hive_site_host_pattern_local_host():
       is_local=False, use_sasl=False, thrift_uris=thrift_uris, kerberos_principal='test/_HOST@TEST.COM',
       hs2_kerberos_principal='test/_HOST@TEST.COM'
     )
-    open_file(os.path.join(tmpdir, 'hive-site.xml'), 'w').write(xml)
+    open(os.path.join(tmpdir, 'hive-site.xml'), 'w').write(xml)
 
     beeswax.hive_site.reset()
     saved = beeswax.conf.HIVE_CONF_DIR
@@ -2309,7 +2310,7 @@ def test_hive_site_null_hs2krb():
         return tmpdir
 
     xml = hive_site_xml(is_local=True, use_sasl=False, hs2_kerberos_principal=None)
-    open_file(os.path.join(tmpdir, 'hive-site.xml'), 'w').write(xml)
+    open(os.path.join(tmpdir, 'hive-site.xml'), 'w').write(xml)
 
     beeswax.hive_site.reset()
     saved = beeswax.conf.HIVE_CONF_DIR
@@ -2876,12 +2877,8 @@ class TestWithMockedServer(object):
 
   def test_get_history_xss(self):
     sql = 'SELECT count(sample_07.salary) FROM sample_07;"><iFrAME>src="javascript:alert(\'Hue has an xss\');"></iFraME>'
-    if sys.version_info[0] < 3:
-      sql_escaped = b'SELECT count(sample_07.salary) FROM sample_07;&quot;&gt;&lt;iFrAME&gt;'\
-        b'src=&quot;javascript:alert(&#39;Hue has an xss&#39;);&quot;&gt;&lt;/iFraME&gt;'
-    else:
-      sql_escaped = b'SELECT count(sample_07.salary) FROM sample_07;&quot;&gt;&lt;iFrAME&gt;'\
-        b'src=&quot;javascript:alert(&#x27;Hue has an xss&#x27;);&quot;&gt;&lt;/iFraME&gt;'
+    sql_escaped = b'SELECT count(sample_07.salary) FROM sample_07;&quot;&gt;&lt;iFrAME&gt;'\
+      b'src=&quot;javascript:alert(&#x27;Hue has an xss&#x27;);&quot;&gt;&lt;/iFraME&gt;'
 
     response = _make_query(self.client, sql, submission_type='Save', name='My Name 1', desc='My Description')
     content = json.loads(response.content)
@@ -3157,7 +3154,7 @@ def test_metastore_security():
         return tmpdir
 
     xml = hive_site_xml(is_local=False, use_sasl=True, kerberos_principal='hive/_HOST@test.com')
-    open_file(os.path.join(tmpdir, 'hive-site.xml'), 'w').write(xml)
+    open(os.path.join(tmpdir, 'hive-site.xml'), 'w').write(xml)
 
     beeswax.hive_site.reset()
     saved = beeswax.conf.HIVE_CONF_DIR

+ 8 - 11
apps/beeswax/src/beeswax/urls.py

@@ -15,17 +15,14 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import sys
-
-from beeswax import views as beeswax_views
-from beeswax import create_database as beeswax_create_database
-from beeswax import create_table as beeswax_create_table
-from beeswax import api as beeswax_api
-
-if sys.version_info[0] > 2:
-  from django.urls import re_path
-else:
-  from django.conf.urls import url as re_path
+from django.urls import re_path
+
+from beeswax import (
+    api as beeswax_api,
+    create_database as beeswax_create_database,
+    create_table as beeswax_create_table,
+    views as beeswax_views,
+)
 
 urlpatterns = [
   re_path(r'^$', beeswax_views.index, name='index'),

+ 40 - 36
apps/beeswax/src/beeswax/views.py

@@ -15,70 +15,66 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from builtins import next, str
-import json
-import logging
 import re
 import sys
+import json
 import time
+import logging
 
 from django import forms
-from django.core.paginator import Paginator, EmptyPage, InvalidPage
 from django.contrib import messages
+from django.core.paginator import EmptyPage, Paginator
 from django.db.models import Q
 from django.http import HttpResponse, QueryDict
 from django.shortcuts import redirect
-from django.utils.html import escape
 from django.urls import reverse
+from django.utils.html import escape
+from django.utils.translation import gettext as _
 
+import beeswax.forms
+import beeswax.design
+from beeswax import common, data_export, models
+from beeswax.management.commands import beeswax_install_examples
+from beeswax.models import QueryHistory, SavedQuery, Session
+from beeswax.server import dbms
+from beeswax.server.dbms import QueryServerException, expand_exception, get_query_server_config
 from desktop.appmanager import get_apps_dict
+from desktop.auth.backend import is_admin
 from desktop.conf import ENABLE_DOWNLOAD, REDIRECT_WHITELIST
 from desktop.context_processors import get_app_name
-
-from desktop.lib.django_util import JsonResponse
-from desktop.lib.django_util import copy_query_dict, format_preserving_redirect, render
-from desktop.lib.django_util import login_notrequired, get_desktop_uri_prefix
+from desktop.lib.django_util import (
+  JsonResponse,
+  copy_query_dict,
+  format_preserving_redirect,
+  get_desktop_uri_prefix,
+  login_notrequired,
+  render,
+)
 from desktop.lib.exceptions_renderable import PopupException
-from desktop.models import Document, _get_apps
 from desktop.lib.parameterization import find_variables
+from desktop.models import Document, _get_apps
 from desktop.views import serve_403_error
 from notebook.models import escape_rows
 from useradmin.models import User
 
-import beeswax.forms
-import beeswax.design
-
-from beeswax import common, data_export, models
-from beeswax.management.commands import beeswax_install_examples
-from beeswax.models import QueryHistory, SavedQuery, Session
-from beeswax.server import dbms
-from beeswax.server.dbms import expand_exception, get_query_server_config, QueryServerException
-
-from desktop.auth.backend import is_admin
-
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
-
-
 LOG = logging.getLogger()
 
 # For scraping Job IDs from logs
 HADOOP_JOBS_RE = re.compile("Starting Job = ([a-z0-9_]+?),")
-SPARK_APPLICATION_RE = re.compile("Running with YARN Application = (?P<application_id>application_\d+_\d+)")
-TEZ_APPLICATION_RE = re.compile("Executing on YARN cluster with App id ([a-z0-9_]+?)\)")
-TEZ_QUERY_RE = re.compile("\(queryId=([a-z0-9_-]+?)\)")
-
+SPARK_APPLICATION_RE = re.compile(r"Running with YARN Application = (?P<application_id>application_\d+_\d+)")
+TEZ_APPLICATION_RE = re.compile(r"Executing on YARN cluster with App id ([a-z0-9_]+?)\)")
+TEZ_QUERY_RE = re.compile(r"\(queryId=([a-z0-9_-]+?)\)")
 
 
 def index(request):
   return execute_query(request)
 
+
 """
 Design views
 """
 
+
 def save_design(request, form, type_, design, explicit_save):
   """
   save_design(request, form, type_, design, explicit_save) -> SavedQuery
@@ -95,7 +91,7 @@ def save_design(request, form, type_, design, explicit_save):
   """
   authorized_get_design(request, design.id)
   assert form.saveform.is_valid()
-  sub_design_form = form # Beeswax/Impala case
+  sub_design_form = form  # Beeswax/Impala case
 
   if type_ == models.HQL:
     design_cls = beeswax.design.HQLdesign
@@ -371,7 +367,6 @@ def list_query_history(request):
     }
     return JsonResponse(resp)
 
-
   return render('list_history.mako', request, {
     'request': request,
     'page': page,
@@ -414,10 +409,12 @@ def download(request, id, format, user_agent=None):
       message = e.message
     raise PopupException(message, detail='')
 
+
 """
 Queries Views
 """
 
+
 def execute_query(request, design_id=None, query_history_id=None):
   """
   View function for executing an arbitrary query.
@@ -462,7 +459,7 @@ def execute_query(request, design_id=None, query_history_id=None):
   context = {
     'design': design,
     'apps': apps_list,
-    'query': query_history, # Backward
+    'query': query_history,  # Backward
     'query_history': query_history,
     'autocomplete_base_url': reverse(get_app_name(request) + ':api_autocomplete_databases', kwargs={}),
     'autocomplete_base_url_hive': reverse('beeswax:api_autocomplete_databases', kwargs={}),
@@ -616,6 +613,7 @@ def configuration(request):
 Other views
 """
 
+
 def install_examples(request):
   response = {'status': -1, 'message': ''}
 
@@ -686,6 +684,8 @@ def query_done_cb(request, server_id):
 """
 Utils
 """
+
+
 def massage_columns_for_json(cols):
   massaged_cols = []
   for column in cols:
@@ -885,7 +885,7 @@ def _list_designs(user, querydict, page_size, prefix="", is_trashed=False):
     sort_dir, sort_attr = DEFAULT_SORT
   db_queryset = db_queryset.order_by(sort_dir + SORT_ATTR_TRANSLATION[sort_attr])
 
-  designs = [job.content_object for job in db_queryset.all() if job.content_object and job.content_object.is_auto == False]
+  designs = [job.content_object for job in db_queryset.all() if job.content_object and job.content_object.is_auto is False]
 
   pagenum = int(querydict.get(prefix + 'page', 1))
   paginator = Paginator(designs, page_size, allow_empty_first_page=True)
@@ -972,6 +972,7 @@ def parse_out_jobs(log, engine='mr', with_state=False):
 
   return ret
 
+
 def parse_out_queries(log, engine=None, with_state=False):
   """
   Ideally, Hive would tell us what jobs it has run directly from the Thrift interface.
@@ -1013,6 +1014,7 @@ def parse_out_queries(log, engine=None, with_state=False):
 
   return ret
 
+
 def _copy_prefix(prefix, base_dict):
   """Copy keys starting with ``prefix``"""
   querydict = QueryDict(None, mutable=True)
@@ -1154,6 +1156,8 @@ def get_db_choices(request):
   return [(db, db) for db in dbs]
 
 
-WHITESPACE = re.compile("\s+", re.MULTILINE)
+WHITESPACE = re.compile(r"\s+", re.MULTILINE)
+
+
 def collapse_whitespace(s):
   return WHITESPACE.sub(" ", s).strip()

+ 4 - 10
apps/beeswax/src/beeswax/views_tests.py

@@ -16,22 +16,17 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import logging
-import json
 import sys
+import json
+import logging
+from unittest.mock import patch
 
-from django.urls import reverse
 import pytest
+from django.urls import reverse
 
 from desktop.lib.django_test_util import make_logged_in_client
 from useradmin.models import User
 
-if sys.version_info[0] > 2:
-  from unittest.mock import patch, Mock, MagicMock
-else:
-  from mock import patch, Mock, MagicMock
-
-
 LOG = logging.getLogger()
 
 
@@ -42,7 +37,6 @@ class TestInstallExamples():
     self.client = make_logged_in_client(username="test", groupname="default", recreate=True, is_superuser=True, is_admin=True)
     self.user = User.objects.get(username="test")
 
-
   def test_install_via_insert_mysql(self):
 
     with patch('beeswax.views.beeswax_install_examples.SampleTable') as SampleTable:

+ 2 - 2
apps/filebrowser/src/filebrowser/api.py

@@ -28,7 +28,7 @@ from desktop.lib import fsmanager
 from desktop.lib.django_util import JsonResponse
 from desktop.lib.fs.gc.gs import get_gs_home_directory
 from desktop.lib.fs.ozone.ofs import get_ofs_home_directory
-from desktop.lib.i18n import smart_unicode
+from desktop.lib.i18n import smart_str
 from filebrowser.views import _normalize_path
 
 LOG = logging.getLogger()
@@ -42,7 +42,7 @@ def error_handler(view_fn):
     except Exception as e:
       LOG.exception('Error running %s' % view_fn)
       response['status'] = -1
-      response['message'] = smart_unicode(e)
+      response['message'] = smart_str(e)
     return JsonResponse(response)
 
   return decorator

+ 1 - 7
apps/filebrowser/src/filebrowser/conf.py

@@ -15,17 +15,11 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import os
-import sys
+from django.utils.translation import gettext_lazy as _
 
 from desktop.conf import ENABLE_DOWNLOAD, is_oozie_enabled
 from desktop.lib.conf import Config, coerce_bool
 
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext_lazy as _
-else:
-  from django.utils.translation import ugettext_lazy as _
-
 MAX_SNAPPY_DECOMPRESSION_SIZE = Config(
   key="max_snappy_decompression_size",
   help=_("Max snappy decompression size in bytes."),

+ 37 - 21
apps/filebrowser/src/filebrowser/forms.py

@@ -15,35 +15,24 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from future import standard_library
-standard_library.install_aliases()
-from builtins import zip
-from builtins import range
 import logging
-import sys
-import urllib.request, urllib.error
+import urllib.error
+import urllib.request
+from urllib.parse import unquote as urllib_unquote
 
 from django import forms
-from django.forms import FileField, CharField, BooleanField, Textarea
-from django.forms.formsets import formset_factory, BaseFormSet
+from django.forms import BooleanField, CharField, FileField, Textarea
+from django.forms.formsets import BaseFormSet, formset_factory
+from django.utils.translation import gettext_lazy as _
 
 from aws.s3 import S3A_ROOT, normpath as s3_normpath
 from azure.abfs.__init__ import ABFS_ROOT, normpath as abfs_normpath
-from desktop.lib.fs.ozone import OFS_ROOT, normpath as ofs_normpath
-from desktop.lib.fs.gc import GS_ROOT, normpath as gs_normpath
 from desktop.lib import i18n
-from hadoop.fs import normpath
-from useradmin.models import User, Group
-
+from desktop.lib.fs.gc import GS_ROOT, normpath as gs_normpath
+from desktop.lib.fs.ozone import OFS_ROOT, normpath as ofs_normpath
 from filebrowser.lib import rwx
-
-if sys.version_info[0] > 2:
-  from urllib.parse import unquote as urllib_unquote
-  from django.utils.translation import gettext_lazy as _
-else:
-  from urllib import unquote as urllib_unquote
-  from django.utils.translation import ugettext_lazy as _
-
+from hadoop.fs import normpath
+from useradmin.models import Group, User
 
 logger = logging.getLogger()
 
@@ -108,31 +97,39 @@ class EditorForm(forms.Form):
       return i18n.get_site_encoding()
     return encoding
 
+
 class RenameForm(forms.Form):
   op = "rename"
   src_path = CharField(label=_("File to rename"), help_text=_("The file to rename."))
   dest_path = CharField(label=_("New name"), help_text=_("Rename the file to:"))
 
+
 class BaseRenameFormSet(FormSet):
   op = "rename"
 
+
 RenameFormSet = formset_factory(RenameForm, formset=BaseRenameFormSet, extra=0)
 
+
 class CopyForm(forms.Form):
   op = "copy"
   src_path = CharField(label=_("File to copy"), help_text=_("The file to copy."))
   dest_path = CharField(label=_("Destination location"), help_text=_("Copy the file to:"))
 
+
 class BaseCopyFormSet(FormSet):
   op = "copy"
 
+
 CopyFormSet = formset_factory(CopyForm, formset=BaseCopyFormSet, extra=0)
 
+
 class SetReplicationFactorForm(forms.Form):
   op = "setreplication"
   src_path = CharField(label=_("File to set replication factor"), help_text=_("The file to set replication factor."))
   replication_factor = CharField(label=_("Value of replication factor"), help_text=_("The value of replication factor."))
 
+
 class UploadFileForm(forms.Form):
   op = "upload"
   # The "hdfs" prefix in "hdfs_file" triggers the HDFSfileUploadHandler
@@ -140,54 +137,68 @@ class UploadFileForm(forms.Form):
   dest = PathField(label=_("Destination Path"), help_text=_("Filename or directory to upload to."), required=False)  # Used actually?
   extract_archive = BooleanField(required=False)
 
+
 class UploadLocalFileForm(forms.Form):
   op = "upload"
   file = FileField(label=_("File to Upload"))
 
+
 class UploadArchiveForm(forms.Form):
   op = "upload"
   archive = FileField(label=_("Archive to Upload"))
   dest = PathField(label=_("Destination Path"), help_text=_("Archive to upload to."))
 
+
 class RemoveForm(forms.Form):
   op = "remove"
   path = PathField(label=_("File to remove"))
 
+
 class RmDirForm(forms.Form):
   op = "rmdir"
   path = PathField(label=_("Directory to remove"))
 
+
 class RmTreeForm(forms.Form):
   op = "rmtree"
   path = PathField(label=_("Directory to remove (recursively)"))
 
+
 class BaseRmTreeFormset(FormSet):
   op = "rmtree"
 
+
 RmTreeFormSet = formset_factory(RmTreeForm, formset=BaseRmTreeFormset, extra=0)
 
+
 class RestoreForm(forms.Form):
   op = "rmtree"
   path = PathField(label=_("Path to restore"))
 
+
 class BaseRestoreFormset(FormSet):
   op = "restore"
 
+
 RestoreFormSet = formset_factory(RestoreForm, formset=BaseRestoreFormset, extra=0)
 
+
 class TrashPurgeForm(forms.Form):
   op = "purge_trash"
 
+
 class MkDirForm(forms.Form):
   op = "mkdir"
   path = PathField(label=_("Path in which to create the directory"))
   name = PathField(label=_("Directory Name"))
 
+
 class TouchForm(forms.Form):
   op = "touch"
   path = PathField(label=_("Path in which to create the file"))
   name = PathField(label=_("File Name"))
 
+
 class ChownForm(forms.Form):
   op = "chown"
   path = PathField(label=_("Path to change user/group ownership"))
@@ -205,11 +216,14 @@ class ChownForm(forms.Form):
     self.all_groups = [group.name for group in Group.objects.all()]
     self.all_users = [user.username for user in User.objects.all()]
 
+
 class BaseChownFormSet(FormSet):
   op = "chown"
 
+
 ChownFormSet = formset_factory(ChownForm, formset=BaseChownFormSet, extra=0)
 
+
 class ChmodForm(forms.Form):
   op = "chmod"
   path = PathField(label=_("Path to change permissions"))
@@ -252,7 +266,9 @@ class ChmodForm(forms.Form):
     if hasattr(self, "cleaned_data"):
       self.cleaned_data["mode"] = rwx.compress_mode([self.cleaned_data[name] for name in self.names])
 
+
 class BaseChmodFormSet(FormSet):
   op = "chmod"
 
+
 ChmodFormSet = formset_factory(ChmodForm, formset=BaseChmodFormSet, extra=0)

+ 11 - 16
apps/filebrowser/src/filebrowser/lib/archives.py

@@ -17,24 +17,19 @@
 #
 # Utilities for dealing with file modes.
 
-from past.builtins import basestring
-from builtins import object
-import bz2
 import os
-import posixpath
-import sys
+import bz2
 import tarfile
 import tempfile
-
-from desktop.lib.exceptions_renderable import PopupException
-from filebrowser.conf import ARCHIVE_UPLOAD_TEMPDIR
+import posixpath
+from builtins import object
 from zipfile import ZipFile
 
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
+from django.utils.translation import gettext as _
+from past.builtins import basestring
 
+from desktop.lib.exceptions_renderable import PopupException
+from filebrowser.conf import ARCHIVE_UPLOAD_TEMPDIR
 
 __all__ = ['archive_factory']
 
@@ -65,6 +60,7 @@ class Archive(object):
       except OSError:
         pass
 
+
 class ZipArchive(Archive):
   """
   Acts on a zip file in memory or in a temporary location.
@@ -72,10 +68,8 @@ class ZipArchive(Archive):
   """
 
   def __init__(self, file):
-    if sys.version_info[0] > 2:
-      self.file = isinstance(file, basestring) and file
-    else:
-      self.file = isinstance(file, basestring) and open(file) or file
+    self.file = isinstance(file, basestring) and file
+
     self.zfh = ZipFile(self.file)
 
   def extract(self):
@@ -248,6 +242,7 @@ def archive_factory(path, archive_type='zip'):
   elif archive_type == 'bz2' or archive_type == 'bzip2':
     return BZ2Archive(path)
 
+
 class IllegalPathException(PopupException):
 
   def __init__(self):

+ 11 - 18
apps/filebrowser/src/filebrowser/lib/xxd_test.py

@@ -15,37 +15,29 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from __future__ import absolute_import
-from future import standard_library
-standard_library.install_aliases()
-from builtins import range
-import unittest
-import logging
-import pytest
 import random
-import sys
+import logging
+import unittest
 import subprocess
+from io import StringIO as string_io
+from subprocess import PIPE, Popen
 
-from filebrowser.lib import xxd
-
+import pytest
 from django.test import TestCase
-from subprocess import Popen, PIPE
 
-if sys.version_info[0] > 2:
-  from io import StringIO as string_io
-else:
-  from cStringIO import StringIO as string_io
+from filebrowser.lib import xxd
 
 LOG = logging.getLogger()
 
-LENGTH = 1024*10 # 10KB
+LENGTH = 1024 * 10  # 10KB
+
 
 class XxdTest(TestCase):
   def test_mask_not_alphanumeric(self):
-    assert  (1, ". X") == xxd.mask_not_alphanumeric("\n X")
+    assert (1, ". X") == xxd.mask_not_alphanumeric("\n X")
 
   def test_mask_not_printable(self):
-    assert  (2, "..@") == xxd.mask_not_alphanumeric("\xff\x90\x40")
+    assert (2, "..@") == xxd.mask_not_alphanumeric("\xff\x90\x40")
 
   def _get_offset_width(self, line):
     offset, match, _ = line.partition(":")
@@ -103,5 +95,6 @@ class XxdTest(TestCase):
     xxd.main(string_io(random_text), output)
     self._verify_content(stdin, output.getvalue())
 
+
 if __name__ == "__main__":
   unittest.main()

+ 1 - 6
apps/filebrowser/src/filebrowser/urls.py

@@ -15,15 +15,10 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import sys
+from django.urls import re_path
 
 from filebrowser import api as filebrowser_api, utils as filebrowser_utils, views as filebrowser_views
 
-if sys.version_info[0] > 2:
-  from django.urls import re_path
-else:
-  from django.conf.urls import url as re_path
-
 urlpatterns = [
   # Base view
   re_path(r'^$', filebrowser_views.index, name='index'),

+ 17 - 52
apps/filebrowser/src/filebrowser/views.py

@@ -15,6 +15,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import io
 import os
 import re
 import sys
@@ -26,11 +27,15 @@ import mimetypes
 import posixpath
 import urllib.error
 import urllib.request
-from builtins import object
+from builtins import object, str as new_str
 from bz2 import decompress
 from datetime import datetime
 from functools import partial
+from gzip import decompress as decompress_gzip
+from io import StringIO as string_io
+from urllib.parse import quote as urllib_quote, unquote as urllib_unquote, urlparse as lib_urlparse
 
+from avro import datafile, io
 from django.core.files.uploadhandler import FileUploadHandler, StopFutureHandlers, StopUpload
 from django.core.paginator import EmptyPage, InvalidPage, Page, Paginator
 from django.http import Http404, HttpResponse, HttpResponseForbidden, HttpResponseNotModified, HttpResponseRedirect, StreamingHttpResponse
@@ -39,6 +44,7 @@ from django.template.defaultfilters import filesizeformat, stringformat
 from django.urls import reverse
 from django.utils.html import escape
 from django.utils.http import http_date
+from django.utils.translation import gettext as _
 from django.views.decorators.csrf import csrf_exempt
 from django.views.decorators.http import require_http_methods
 from django.views.static import was_modified_since
@@ -100,28 +106,6 @@ from hadoop.fs.hadoopfs import Hdfs
 from hadoop.fs.upload import HDFSFineUploaderChunkedUpload, LocalFineUploaderChunkedUpload
 from useradmin.models import Group, User
 
-if sys.version_info[0] > 2:
-  import io
-  from builtins import str as new_str
-  from gzip import decompress as decompress_gzip
-  from io import StringIO as string_io
-  from urllib.parse import quote as urllib_quote, unquote as urllib_unquote, urlparse as lib_urlparse
-
-  from avro import datafile, io
-  from django.utils.translation import gettext as _
-else:
-  from urllib import quote as urllib_quote, unquote as urllib_unquote
-
-  from cStringIO import StringIO as string_io
-  from urlparse import urlparse as lib_urlparse
-  new_str = unicode
-  from gzip import GzipFile
-
-  import parquet
-  from avro import datafile, io
-  from django.utils.translation import ugettext as _
-
-
 DEFAULT_CHUNK_SIZE_BYTES = 1024 * 4  # 4KB
 MAX_CHUNK_SIZE_BYTES = 1024 * 1024  # 1MB
 
@@ -825,24 +809,17 @@ def display(request, path):
   # Get contents as string for text mode, or at least try
   uni_contents = None
   if not mode or mode == 'text':
-    if sys.version_info[0] > 2:
-      if not isinstance(contents, str):
-        uni_contents = new_str(contents, encoding, errors='replace')
-        is_binary = uni_contents.find(i18n.REPLACEMENT_CHAR) != -1
-        # Auto-detect mode
-        if not mode:
-          mode = is_binary and 'binary' or 'text'
-      else:
-        # We already have a string.
-        uni_contents = contents
-        is_binary = False
-        mode = 'text'
-    else:
+    if not isinstance(contents, str):
       uni_contents = new_str(contents, encoding, errors='replace')
       is_binary = uni_contents.find(i18n.REPLACEMENT_CHAR) != -1
       # Auto-detect mode
       if not mode:
         mode = is_binary and 'binary' or 'text'
+    else:
+      # We already have a string.
+      uni_contents = contents
+      is_binary = False
+      mode = 'text'
 
   # Get contents as bytes
   if mode == "binary":
@@ -1019,10 +996,7 @@ def _read_gzip(fhandle, path, offset, length, stats):
   if offset and offset != 0:
     raise PopupException(_("Offsets are not supported with Gzip compression."))
   try:
-    if sys.version_info[0] > 2:
-      contents = decompress_gzip(fhandle.read())
-    else:
-      contents = GzipFile('', 'r', 0, string_io(fhandle.read())).read(length)
+    contents = decompress_gzip(fhandle.read())
   except Exception as e:
     logging.exception('Could not decompress file at "%s": %s' % (path, e))
     raise PopupException(_("Failed to decompress file."))
@@ -1052,27 +1026,18 @@ def _read_simple(fhandle, path, offset, length, stats):
 
 def detect_gzip(contents):
   '''This is a silly small function which checks to see if the file is Gzip'''
-  if sys.version_info[0] > 2:
-    return contents[:2] == b'\x1f\x8b'
-  else:
-    return contents[:2] == '\x1f\x8b'
+  return contents[:2] == b'\x1f\x8b'
 
 
 def detect_bz2(contents):
   '''This is a silly small function which checks to see if the file is Bz2'''
-  if sys.version_info[0] > 2:
-    return contents[:3] == b'BZh'
-  else:
-    return contents[:3] == 'BZh'
+  return contents[:3] == b'BZh'
 
 
 def detect_avro(contents):
   '''This is a silly small function which checks to see if the file is Avro'''
   # Check if the first three bytes are 'O', 'b' and 'j'
-  if sys.version_info[0] > 2:
-    return contents[:3] == b'\x4F\x62\x6A'
-  else:
-    return contents[:3] == '\x4F\x62\x6A'
+  return contents[:3] == b'\x4F\x62\x6A'
 
 
 def detect_snappy(contents):

+ 20 - 22
apps/hbase/src/hbase/api.py

@@ -15,27 +15,25 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from builtins import range
-from builtins import object
-import json
-import logging
 import re
 import csv
-import sys
+import json
+import logging
 
 from django.utils.encoding import smart_str
+from django.utils.translation import gettext as _
 
 from desktop.lib import thrift_util
 from desktop.lib.exceptions_renderable import PopupException
-
 from hbase import conf
-from hbase.hbase_site import get_server_principal, get_server_authentication, is_using_thrift_ssl, is_using_thrift_http, get_thrift_transport
-from hbase.server.hbase_lib import get_thrift_type, get_client_type
-
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
+from hbase.hbase_site import (
+  get_server_authentication,
+  get_server_principal,
+  get_thrift_transport,
+  is_using_thrift_http,
+  is_using_thrift_ssl,
+)
+from hbase.server.hbase_lib import get_client_type, get_thrift_type
 
 LOG = logging.getLogger()
 
@@ -68,12 +66,12 @@ class HbaseApi(object):
     clusters = []
     try:
       full_config = json.loads(conf.HBASE_CLUSTERS.get().replace("'", "\""))
-    except:
+    except Exception:
       LOG.debug('Failed to read HBase cluster configuration as JSON, falling back to raw configuration.')
-      full_config = [conf.HBASE_CLUSTERS.get()] #hack cause get() is weird
+      full_config = [conf.HBASE_CLUSTERS.get()]  # hack cause get() is weird
 
     for config in full_config:
-      match = re.match('\((?P<name>[^\(\)\|]+)\|(?P<host>.+):(?P<port>[0-9]+)\)', config)
+      match = re.match(r'\((?P<name>[^\(\)\|]+)\|(?P<host>.+):(?P<port>[0-9]+)\)', config)
       if match:
         clusters += [{
           'name': match.group('name'),
@@ -90,7 +88,7 @@ class HbaseApi(object):
       for cluster in clusters:
         if cluster["name"] == name:
           return cluster
-    except:
+    except Exception:
       LOG.exception('failed to get the cluster %s' % name)
     raise PopupException(_("Cluster by the name of %s does not exist in configuration.") % name)
 
@@ -149,7 +147,7 @@ class HbaseApi(object):
 
   def getRows(self, cluster, tableName, columns, startRowKey, numRows, prefix=False):
     client = self.connectCluster(cluster)
-    if prefix == False:
+    if prefix is False:
       scanner = client.scannerOpen(tableName, smart_str(startRowKey), columns, None, doas=self.user.username)
     else:
       scanner = client.scannerOpenWithPrefix(tableName, smart_str(startRowKey), columns, None, doas=self.user.username)
@@ -193,7 +191,7 @@ class HbaseApi(object):
   def deleteColumns(self, cluster, tableName, row, columns):
     client = self.connectCluster(cluster)
     Mutation = get_thrift_type('Mutation')
-    mutations = [Mutation(isDelete = True, column=smart_str(column)) for column in columns]
+    mutations = [Mutation(isDelete=True, column=smart_str(column)) for column in columns]
     return client.mutateRow(tableName, smart_str(row), mutations, None, doas=self.user.username)
 
   def deleteColumn(self, cluster, tableName, row, column):
@@ -209,7 +207,7 @@ class HbaseApi(object):
     Mutation = get_thrift_type('Mutation')
     for column in list(data.keys()):
       value = smart_str(data[column]) if data[column] is not None else None
-      mutations.append(Mutation(column=smart_str(column), value=value)) # must use str for API, does thrift coerce by itself?
+      mutations.append(Mutation(column=smart_str(column), value=value))  # must use str for API, does thrift coerce by itself?
     return client.mutateRow(tableName, smart_str(row), mutations, None, doas=self.user.username)
 
   def putColumn(self, cluster, tableName, row, column, value=None):
@@ -225,8 +223,8 @@ class HbaseApi(object):
     aggregate_data = []
     limit = conf.TRUNCATE_LIMIT.get()
     if not isinstance(queries, list):
-      queries=json.loads(queries)
-    queries = sorted(queries, key=lambda query: query['scan_length']) #sort by scan length
+      queries = json.loads(queries)
+    queries = sorted(queries, key=lambda query: query['scan_length'])  # sort by scan length
     for query in queries:
       scan_length = int(query['scan_length'])
       if query['row_key'] == "null":

+ 9 - 13
apps/hbase/src/hbase/conf.py

@@ -16,19 +16,17 @@
 # limitations under the License.
 
 from __future__ import print_function
-import logging
+
 import os
 import sys
+import logging
+
+from django.utils.translation import gettext as _, gettext_lazy as _t
 
 from desktop.conf import default_ssl_validate
-from desktop.lib.conf import Config, validate_thrift_transport, coerce_bool
+from desktop.lib.conf import Config, coerce_bool, validate_thrift_transport
 from hbase.hbase_site import get_thrift_transport
 
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext_lazy as _t, gettext as _
-else:
-  from django.utils.translation import ugettext_lazy as _t, ugettext as _
-
 LOG = logging.getLogger()
 
 
@@ -50,7 +48,7 @@ TRUNCATE_LIMIT = Config(
 THRIFT_TRANSPORT = Config(
   key="thrift_transport",
   default="buffered",
-  help=_t("Should come from hbase-site.xml, do not set. 'framed' is used to chunk up responses, used with the nonblocking server in Thrift but is not supported in Hue."
+  help=_t("Should come from hbase-site.xml, do not set. 'framed' is used to chunk up responses, used with the nonblocking server in Thrift but is not supported in Hue."  # noqa: E501
        "'buffered' used to be the default of the HBase Thrift Server. Default is buffered when not set in hbase-site.xml."),
   type=str
 )
@@ -64,7 +62,7 @@ HBASE_CONF_DIR = Config(
 # Hidden, just for making patching of older version of Hue easier. To remove in Hue 4.
 USE_DOAS = Config(
   key='use_doas',
-  help=_t('Should come from hbase-site.xml, do not set. Force Hue to use Http Thrift mode with doas impersonation, regarless of hbase-site.xml properties.'),
+  help=_t('Should come from hbase-site.xml, do not set. Force Hue to use Http Thrift mode with doas impersonation, regarless of hbase-site.xml properties.'),  # noqa: E501
   default=False,
   type=coerce_bool
 )
@@ -84,9 +82,9 @@ def config_validator(user):
   from hbase.settings import NICE_NAME
 
   try:
-    if not 'test' in sys.argv: # Avoid tests hanging
+    if 'test' not in sys.argv:  # Avoid tests hanging
       api = HbaseApi(user=user)
-      cluster_name = api.getClusters()[0]['name'] # Currently pick first configured cluster
+      cluster_name = api.getClusters()[0]['name']  # Currently pick first configured cluster
       # Check connectivity
       api.connectCluster(cluster_name)
       api.getTableList(cluster_name)
@@ -104,8 +102,6 @@ def config_validator(user):
     LOG.exception(msg)
     res.append((NICE_NAME, _(msg)))
 
-
-
   res.extend(validate_thrift_transport(THRIFT_TRANSPORT))
 
   return res

+ 10 - 14
apps/hbase/src/hbase/hbase_site.py

@@ -18,15 +18,9 @@
 import errno
 import logging
 import os.path
-import sys
 
-from hadoop import confparse
 from desktop.lib.security_util import get_components
-
-if sys.version_info[0] > 2:
-  open_file = open
-else:
-  open_file = file
+from hadoop import confparse
 
 LOG = logging.getLogger()
 
@@ -44,7 +38,6 @@ _CNF_HBASE_USE_THRIFT_HTTP = 'hbase.regionserver.thrift.http'
 _CNF_HBASE_USE_THRIFT_SSL = 'hbase.thrift.ssl.enabled'
 
 
-
 def reset():
   global SITE_DICT
   SITE_DICT = None
@@ -67,6 +60,7 @@ def get_server_principal():
 def get_server_authentication():
   return get_conf().get(_CNF_HBASE_AUTHENTICATION, 'NOSASL').upper()
 
+
 def get_thrift_transport():
   use_framed = get_conf().get(_CNF_HBASE_REGIONSERVER_THRIFT_FRAMED)
   if use_framed is not None:
@@ -75,20 +69,23 @@ def get_thrift_transport():
     else:
       return "buffered"
   else:
-    #Avoid circular import
+    # Avoid circular import
     from hbase.conf import THRIFT_TRANSPORT
     return THRIFT_TRANSPORT.get()
 
+
 def is_impersonation_enabled():
-  #Avoid circular import
+  # Avoid circular import
   from hbase.conf import USE_DOAS
   return get_conf().get(_CNF_HBASE_IMPERSONATION_ENABLED, 'FALSE').upper() == 'TRUE' or USE_DOAS.get()
 
+
 def is_using_thrift_http():
-  #Avoid circular import
+  # Avoid circular import
   from hbase.conf import USE_DOAS
   return get_conf().get(_CNF_HBASE_USE_THRIFT_HTTP, 'FALSE').upper() == 'TRUE' or USE_DOAS.get()
 
+
 def is_using_thrift_ssl():
   return get_conf().get(_CNF_HBASE_USE_THRIFT_SSL, 'FALSE').upper() == 'TRUE'
 
@@ -97,11 +94,11 @@ def _parse_site():
   global SITE_DICT
   global SITE_PATH
 
-  #Avoid circular import
+  # Avoid circular import
   from hbase.conf import HBASE_CONF_DIR
   SITE_PATH = os.path.join(HBASE_CONF_DIR.get(), 'hbase-site.xml')
   try:
-    data = open_file(SITE_PATH, 'r').read()
+    data = open(SITE_PATH, 'r').read()
   except IOError as err:
     if err.errno != errno.ENOENT:
       LOG.error('Cannot read from "%s": %s' % (SITE_PATH, err))
@@ -109,4 +106,3 @@ def _parse_site():
     data = ""
 
   SITE_DICT = confparse.ConfParse(data)
-

+ 6 - 13
apps/hbase/src/hbase/management/commands/hbase_setup.py

@@ -15,24 +15,18 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import logging
 import os
 import sys
-
+import logging
 from datetime import datetime, timedelta
 
 from django.core.management.base import BaseCommand
+from django.utils.translation import gettext as _
+from hbased.ttypes import AlreadyExists
 
 from desktop.lib.paths import get_apps_root
-from useradmin.models import install_sample_user, User
-
-from hbased.ttypes import AlreadyExists
 from hbase.api import HbaseApi
-
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
+from useradmin.models import User, install_sample_user
 
 LOG = logging.getLogger()
 
@@ -50,7 +44,7 @@ class Command(BaseCommand):
       user = install_sample_user()
 
     api = HbaseApi(user=user)
-    cluster_name = api.getClusters()[0]['name'] # Currently pick first configured cluster
+    cluster_name = api.getClusters()[0]['name']  # Currently pick first configured cluster
 
     # Check connectivity
     api.connectCluster(cluster_name)
@@ -61,10 +55,9 @@ class Command(BaseCommand):
     self.create_binary_table(api, cluster_name)
     self.load_binary_table(api, cluster_name)
 
-
   def create_analytics_table(self, api, cluster_name):
     try:
-      api.createTable(cluster_name, 'analytics_demo', [{'properties': {'name': 'hour'}}, {'properties': {'name': 'day'}}, {'properties': {'name': 'total'}}])
+      api.createTable(cluster_name, 'analytics_demo', [{'properties': {'name': 'hour'}}, {'properties': {'name': 'day'}}, {'properties': {'name': 'total'}}])  # noqa: E501
     except AlreadyExists:
       pass
 

+ 26 - 30
apps/hbase/src/hbase/tests.py

@@ -15,30 +15,31 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from builtins import object
-import json
 import os
-import shutil
 import sys
+import json
+import shutil
 import tempfile
-import pytest
+from builtins import object
 
-from django.urls import reverse
+import pytest
 from django.test import TestCase
+from django.urls import reverse
 
 from desktop.lib.django_test_util import make_logged_in_client
-from desktop.lib.test_utils import grant_access, add_to_group
+from desktop.lib.test_utils import add_to_group, grant_access
 from hadoop.pseudo_hdfs4 import is_live_cluster
-from useradmin.models import User
-
 from hbase.api import HbaseApi
 from hbase.conf import HBASE_CONF_DIR
-from hbase.hbase_site import get_server_authentication, get_server_principal, get_conf, reset, _CNF_HBASE_IMPERSONATION_ENABLED, is_impersonation_enabled
-
-if sys.version_info[0] > 2:
-  open_file = open
-else:
-  open_file = file
+from hbase.hbase_site import (
+  _CNF_HBASE_IMPERSONATION_ENABLED,
+  get_conf,
+  get_server_authentication,
+  get_server_principal,
+  is_impersonation_enabled,
+  reset,
+)
+from useradmin.models import User
 
 
 def test_security_plain():
@@ -47,7 +48,7 @@ def test_security_plain():
 
   try:
     xml = hbase_site_xml()
-    open_file(os.path.join(tmpdir, 'hbase-site.xml'), 'w').write(xml)
+    open(os.path.join(tmpdir, 'hbase-site.xml'), 'w').write(xml)
     reset()
 
     assert 'NOSASL' == get_server_authentication()
@@ -56,7 +57,7 @@ def test_security_plain():
     security = HbaseApi._get_security()
 
     assert 'test' == security['kerberos_principal_short_name']
-    assert False == security['use_sasl']
+    assert False is security['use_sasl']
   finally:
     reset()
     finish()
@@ -69,7 +70,7 @@ def test_security_kerberos():
 
   try:
     xml = hbase_site_xml(authentication='kerberos')
-    open_file(os.path.join(tmpdir, 'hbase-site.xml'), 'w').write(xml)
+    open(os.path.join(tmpdir, 'hbase-site.xml'), 'w').write(xml)
     reset()
 
     assert 'KERBEROS' == get_server_authentication()
@@ -78,17 +79,14 @@ def test_security_kerberos():
     security = HbaseApi._get_security()
 
     assert 'test' == security['kerberos_principal_short_name']
-    assert True == security['use_sasl']
+    assert True is security['use_sasl']
   finally:
     reset()
     finish()
     shutil.rmtree(tmpdir)
 
 
-def hbase_site_xml(
-    kerberos_principal='test/test.com@TEST.COM',
-    authentication='NOSASL'):
-
+def hbase_site_xml(kerberos_principal='test/test.com@TEST.COM', authentication='NOSASL'):
   return """
     <configuration>
 
@@ -113,6 +111,7 @@ def test_impersonation_is_decorator_is_there():
   # Decorator is still there
   from hbased.Hbase import do_as
 
+
 @pytest.mark.django_db
 def test_impersonation():
   from hbased import Hbase as thrift_hbase
@@ -130,24 +129,22 @@ def test_impersonation():
   try:
     client.getTableNames(doas=user.username)
   except AttributeError:
-    pass # We don't mock everything
+    pass  # We don't mock everything
   finally:
     get_conf()[_CNF_HBASE_IMPERSONATION_ENABLED] = impersonation_enabled
 
   assert {} == proto.get_headers()
 
-
   get_conf()[_CNF_HBASE_IMPERSONATION_ENABLED] = 'TRUE'
 
   try:
     client.getTableNames(doas=user.username)
   except AttributeError:
-    pass # We don't mock everything
+    pass  # We don't mock everything
   finally:
     get_conf()[_CNF_HBASE_IMPERSONATION_ENABLED] = impersonation_enabled
 
-  assert {'doAs': u'test_hbase'} == proto.get_headers()
-
+  assert {'doAs': 'test_hbase'} == proto.get_headers()
 
 
 class MockHttpClient(object):
@@ -157,10 +154,12 @@ class MockHttpClient(object):
   def setCustomHeaders(self, headers):
     self.headers = headers
 
+
 class MockTransport(object):
   def __init__(self):
     self._TBufferedTransport__trans = MockHttpClient()
 
+
 class MockProtocol(object):
   def __init__(self):
     self.trans = MockTransport()
@@ -174,10 +173,8 @@ class MockProtocol(object):
 
 @pytest.mark.integration
 class TestIntegrationWithHBase(TestCase):
-
   @classmethod
   def setup_class(cls):
-
     if not is_live_cluster():
       pytest.skip('These tests can only run on a live cluster')
 
@@ -186,7 +183,6 @@ class TestIntegrationWithHBase(TestCase):
     add_to_group('test')
     grant_access("test", "test", "indexer")
 
-
   def test_list_tables(self):
     if not is_live_cluster():
       pytest.skip('HUE-2910: Skipping because test is not reentrant')

+ 2 - 5
apps/hbase/src/hbase/urls.py

@@ -17,12 +17,9 @@
 
 import sys
 
-from hbase import views as hbase_views
+from django.urls import re_path
 
-if sys.version_info[0] > 2:
-  from django.urls import re_path
-else:
-  from django.conf.urls import url as re_path
+from hbase import views as hbase_views
 
 urlpatterns = [
   re_path(r'^$', hbase_views.app, name='index'),

+ 25 - 30
apps/hbase/src/hbase/views.py

@@ -15,33 +15,25 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from future import standard_library
-standard_library.install_aliases()
-import base64
+import re
 import json
+import base64
 import logging
-import re
-import sys
-import urllib.request, urllib.parse, urllib.error
+import urllib.error
+import urllib.parse
+import urllib.request
+from io import StringIO as string_io
+
+from django.utils.translation import gettext as _
 
 from desktop.auth.backend import is_admin
 from desktop.lib.django_util import JsonResponse, render
-
 from hbase import conf
-from hbase.hbase_site import is_impersonation_enabled
-from hbase.settings import DJANGO_APPS
 from hbase.api import HbaseApi
+from hbase.hbase_site import is_impersonation_enabled
 from hbase.management.commands import hbase_setup
 from hbase.server.hbase_lib import get_thrift_type
-
-if sys.version_info[0] > 2:
-  from io import StringIO as string_io
-  from django.utils.translation import gettext as _
-else:
-  from cStringIO import StringIO as string_io
-  from avro import datafile, io
-  from django.utils.translation import ugettext as _
-
+from hbase.settings import DJANGO_APPS
 
 LOG = logging.getLogger()
 
@@ -49,42 +41,45 @@ LOG = logging.getLogger()
 def has_write_access(user):
   return is_admin(user) or user.has_hue_permission(action="write", app=DJANGO_APPS[0]) or is_impersonation_enabled()
 
+
 def app(request):
   return render('app.mako', request, {
     'can_write': has_write_access(request.user),
     'is_embeddable': request.GET.get('is_embeddable', False),
   })
 
+
 # action/cluster/arg1/arg2/arg3...
-def api_router(request, url): # On split, deserialize anything
+def api_router(request, url):  # On split, deserialize anything
 
   def safe_json_load(raw):
     try:
       return json.loads(re.sub(r'(?:\")([0-9]+)(?:\")', r'\1', str(raw)))
-    except:
+    except Exception:
       LOG.debug('Failed to parse input as JSON, falling back to raw input.')
       return raw
 
   def deserialize(data):
-    if type(data) == dict:
+    if type(data) is dict:
       special_type = get_thrift_type(data.pop('hue-thrift-type', ''))
       if special_type:
         return special_type(data)
 
     if hasattr(data, "__iter__"):
       for i, item in enumerate(data):
-        data[i] = deserialize(item) # Sets local binding, needs to set in data
+        data[i] = deserialize(item)  # Sets local binding, needs to set in data
     return data
 
   decoded_url_params = [urllib.parse.unquote(arg) for arg in re.split(r'(?<!\\)/', url.strip('/'))]
   url_params = [safe_json_load((arg, request.POST.get(arg[0:16], arg))[arg[0:15] == 'hbase-post-key-'])
-                for arg in decoded_url_params] # Deserialize later
+                for arg in decoded_url_params]  # Deserialize later
 
   if request.POST.get('dest', False):
     url_params += [request.FILES.get(request.GET.get('dest'))]
 
   return api_dump(HbaseApi(request.user).query(*url_params))
 
+
 def api_dump(response):
   ignored_fields = ('thrift_spec', '__.+__')
   trunc_limit = conf.TRUNCATE_LIMIT.get()
@@ -93,21 +88,21 @@ def api_dump(response):
     try:
       json.dumps(data)
       return data
-    except:
+    except Exception:
       LOG.debug('Failed to dump data as JSON, falling back to raw data.')
       cleaned = {}
       lim = [0]
-      if isinstance(data, str): # Not JSON dumpable, meaning some sort of bytestring or byte data
-        #detect if avro file
-        if(data[:3] == '\x4F\x62\x6A'):
-          #write data to file in memory
+      if isinstance(data, str):  # Not JSON dumpable, meaning some sort of bytestring or byte data
+        # detect if avro file
+        if (data[:3] == '\x4F\x62\x6A'):
+          # write data to file in memory
           try:
             output = io.StringIO()
-          except:
+          except Exception:
             output = string_io()
           output.write(data)
 
-          #read and parse avro
+          # read and parse avro
           rec_reader = io.DatumReader()
           df_reader = datafile.DataFileReader(output, rec_reader)
           return json.dumps(clean([record for record in df_reader]))

+ 2 - 5
apps/help/src/help/urls.py

@@ -17,12 +17,9 @@
 
 import sys
 
-from help import views as help_views
+from django.urls import re_path
 
-if sys.version_info[0] > 2:
-  from django.urls import re_path
-else:
-  from django.conf.urls import url as re_path
+from help import views as help_views
 
 urlpatterns = [
   re_path(r'^$', help_views.view, {"app": "desktop", "path": "/index.html"}),

+ 13 - 16
apps/help/src/help/views.py

@@ -15,30 +15,26 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from future import standard_library
-standard_library.install_aliases()
-from builtins import str
-from desktop.lib.django_util import render
-from desktop.lib.exceptions_renderable import PopupException
-from desktop import appmanager
-from hadoop.fs import LocalSubFileSystem
+import os
+import urllib.error
+import urllib.parse
+import urllib.request
 
 import markdown
-import urllib.request, urllib.parse, urllib.error
-import os
-import sys
 
-if sys.version_info[0] > 2:
-  open_file = open
-else:
-  open_file = file
+from desktop import appmanager
+from desktop.lib.django_util import render
+from desktop.lib.exceptions_renderable import PopupException
+from hadoop.fs import LocalSubFileSystem
 
 INDEX_FILENAMES = ("index.md", "index.html", "index.txt")
 
+
 def _unquote_path(path):
   """Normalizes paths."""
   return urllib.parse.unquote(path)
 
+
 def get_help_fs(app_name):
   """
   Creates a local file system for a given app's help directory.
@@ -51,6 +47,7 @@ def get_help_fs(app_name):
   else:
     raise PopupException("App '%s' is not loaded, so no help is available for it!" % app_name)
 
+
 def view(request, app, path):
   """
   Views and renders a file at a given path.
@@ -88,8 +85,8 @@ def view(request, app, path):
 
   data = {
     'content': content,
-    'apps': sorted([ x for x in appmanager.DESKTOP_MODULES if x.help_dir ],
-      key = lambda app: app.menu_index),
+    'apps': sorted([x for x in appmanager.DESKTOP_MODULES if x.help_dir],
+      key=lambda app: app.menu_index),
     'title': appmanager.get_desktop_module(app).nice_name,
     'current': app,
     'is_embeddable': request.GET.get('is_embeddable', False),

+ 7 - 11
apps/hive/src/hive/conf.py

@@ -15,19 +15,15 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import logging
 import sys
+import logging
 
-import beeswax.hive_site
+from django.utils.translation import gettext as _, gettext_lazy as _t
 
+import beeswax.hive_site
+from beeswax.settings import NICE_NAME
 from desktop.conf import has_connectors
 from desktop.lib.exceptions import StructuredThriftTransportException
-from beeswax.settings import NICE_NAME
-
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext_lazy as _t, gettext as _
-else:
-  from django.utils.translation import ugettext_lazy as _t, ugettext as _
 
 LOG = logging.getLogger()
 
@@ -42,7 +38,7 @@ def config_validator(user):
   v1
   All the configuration happens in apps/beeswax.
   '''
-  from beeswax.design import hql_query # dbms is dependent on beeswax.conf, import in method to avoid circular dependency
+  from beeswax.design import hql_query  # dbms is dependent on beeswax.conf, import in method to avoid circular dependency
   from beeswax.server import dbms
 
   res = []
@@ -52,7 +48,7 @@ def config_validator(user):
 
   try:
     try:
-      if not 'test' in sys.argv:  # Avoid tests hanging
+      if 'test' not in sys.argv:  # Avoid tests hanging
         server = dbms.get(user)
         query = hql_query("SELECT 'Hello World!';")
         handle = server.execute_and_wait(query, timeout_sec=10.0)
@@ -73,9 +69,9 @@ def config_validator(user):
     res.append((NICE_NAME, _(msg)))
 
   try:
-    from desktop.lib.fsmanager import get_filesystem
     from aws.conf import is_enabled as is_s3_enabled
     from azure.conf import is_abfs_enabled
+    from desktop.lib.fsmanager import get_filesystem
     warehouse = beeswax.hive_site.get_metastore_warehouse_dir()
     fs = get_filesystem()
     fs_scheme = fs._get_scheme(warehouse)

+ 5 - 6
apps/hive/src/hive/tests.py

@@ -15,12 +15,12 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import aws
+from unittest.mock import Mock, patch
+
 import pytest
-import sys
 
+import aws
 from desktop.lib.django_test_util import make_logged_in_client
-from unittest.mock import patch, Mock
 
 
 @pytest.mark.django_db
@@ -32,7 +32,7 @@ def test_config_check():
             'default': {
                 'region': 'us-east-1',
                 'access_key_id': 'access_key_id',
-                'secret_access_key':'secret_access_key'
+                'secret_access_key': 'secret_access_key'
             }
         }),
         warehouse = 's3a://yingsdx0602/data1/warehouse/tablespace/managed/hive'
@@ -61,8 +61,7 @@ def test_config_check():
           err_msg = 'Failed to access Hive warehouse: %s' % warehouse
           if not isinstance(err_msg, bytes):
             err_msg = err_msg.encode('utf-8')
-          assert not err_msg in resp.content, resp
+          assert err_msg not in resp.content, resp
         finally:
           for old_conf in reset:
             old_conf()
-

+ 1 - 5
apps/hive/src/hive/urls.py

@@ -17,11 +17,7 @@
 
 import sys
 
-if sys.version_info[0] > 2:
-  from django.urls import re_path
-else:
-  from django.conf.urls import url as re_path
-
+from django.urls import re_path
 
 urlpatterns = [
 ]

+ 29 - 21
apps/impala/src/impala/api.py

@@ -15,41 +15,32 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-## Main views are inherited from Beeswax.
+# Main views are inherited from Beeswax.
 
-import base64
-import logging
+import sys
 import json
+import base64
 import struct
-import sys
+import logging
 
+from django.utils.translation import gettext as _
 from django.views.decorators.http import require_POST
 
 from beeswax.api import error_handler
 from beeswax.models import Session
 from beeswax.server import dbms as beeswax_dbms
 from beeswax.views import authorized_get_query_history
-
 from desktop.lib.django_util import JsonResponse
 from desktop.lib.thrift_util import unpack_guid
 from desktop.models import Document2
-
-from jobbrowser.apis.query_api import _get_api
 from impala import dbms
-from impala.server import get_api as get_impalad_api, _get_impala_server_url
-
+from impala.server import _get_impala_server_url, get_api as get_impalad_api
+from jobbrowser.apis.query_api import _get_api
 from libanalyze import analyze as analyzer, rules
-
 from notebook.models import make_notebook
 
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
-
-
 LOG = logging.getLogger()
-ANALYZER = rules.TopDownAnalysis() # We need to parse some files so save as global
+ANALYZER = rules.TopDownAnalysis()  # We need to parse some files so save as global
 
 
 @require_POST
@@ -60,7 +51,7 @@ def invalidate(request):
   table = request.POST.get('table', None)
   flush_all = request.POST.get('flush_all', 'false').lower() == 'true'
 
-  query_server = dbms.get_query_server_config(connector=None) # TODO: connector support
+  query_server = dbms.get_query_server_config(connector=None)  # TODO: connector support
   db = beeswax_dbms.get(request.user, query_server=query_server)
 
   response = {'status': 0, 'message': ''}
@@ -128,6 +119,7 @@ def get_runtime_profile(request, query_history_id):
 
   return JsonResponse(response)
 
+
 @require_POST
 @error_handler
 def alanize(request):
@@ -150,15 +142,30 @@ def alanize(request):
 
     heatmap = {}
     summary = analyzer.summary(profile)
-    heatmapMetrics = ['AverageThreadTokens', 'BloomFilterBytes', 'PeakMemoryUsage', 'PerHostPeakMemUsage', 'PrepareTime', 'RowsProduced', 'TotalCpuTime', 'TotalNetworkReceiveTime', 'TotalNetworkSendTime', 'TotalStorageWaitTime', 'TotalTime']
+    heatmapMetrics = [
+      'AverageThreadTokens',
+      'BloomFilterBytes',
+      'PeakMemoryUsage',
+      'PerHostPeakMemUsage',
+      'PrepareTime',
+      'RowsProduced',
+      'TotalCpuTime',
+      'TotalNetworkReceiveTime',
+      'TotalNetworkSendTime',
+      'TotalStorageWaitTime',
+      'TotalTime',
+    ]
     for key in heatmapMetrics:
       metrics = analyzer.heatmap_by_host(profile, key)
       if metrics['data']:
         heatmap[key] = metrics
-    response['data'] = { 'query': { 'healthChecks' : result[0]['result'], 'summary': summary, 'heatmap': heatmap, 'heatmapMetrics': sorted(list(heatmap.keys())) } }
+    response['data'] = {
+      'query': {'healthChecks': result[0]['result'], 'summary': summary, 'heatmap': heatmap, 'heatmapMetrics': sorted(list(heatmap.keys()))}
+    }
     response['status'] = 0
   return JsonResponse(response)
 
+
 def alanize_metrics(request):
   response = {'status': -1}
   cluster = json.loads(request.POST.get('cluster', '{}'))
@@ -176,6 +183,7 @@ def alanize_metrics(request):
     response['status'] = 0
   return JsonResponse(response)
 
+
 @require_POST
 @error_handler
 def alanize_fix(request):
@@ -193,7 +201,7 @@ def alanize_fix(request):
       is_task=True,
       compute=cluster
     )
-    response['details'] = { 'task': notebook.execute(request, batch=True) }
+    response['details'] = {'task': notebook.execute(request, batch=True)}
     response['status'] = 0
 
   return JsonResponse(response)

+ 6 - 10
apps/impala/src/impala/api_tests.py

@@ -15,23 +15,19 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from builtins import object
+import sys
 import json
 import logging
-import pytest
-import sys
+from builtins import object
+from unittest.mock import Mock, patch
 
-from django.urls import reverse
+import pytest
 from django.test import TestCase
+from django.urls import reverse
 
 from desktop.lib.django_test_util import make_logged_in_client
 from impala import conf
 
-if sys.version_info[0] > 2:
-  from unittest.mock import patch, Mock
-else:
-  from mock import patch, Mock
-
 LOG = logging.getLogger()
 
 
@@ -50,7 +46,7 @@ class TestImpala(object):
 
       response = self.client.post(reverse("impala:invalidate"), {
           'flush_all': False,
-          'cluster': json.dumps({"credentials":{},"type":"direct","id":"default","name":"default"}),
+          'cluster': json.dumps({"credentials": {}, "type": "direct", "id": "default", "name": "default"}),
           'database': 'default',
           'table': 'k8s_logs'
         }

+ 24 - 13
apps/impala/src/impala/conf.py

@@ -15,25 +15,26 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import logging
 import os
-import socket
 import sys
+import socket
+import logging
+
+from django.utils.translation import gettext as _, gettext_lazy as _t
 
-from desktop.conf import default_ssl_cacerts, default_ssl_validate, AUTH_USERNAME as DEFAULT_AUTH_USERNAME, \
-    AUTH_PASSWORD as DEFAULT_AUTH_PASSWORD, has_connectors
-from desktop.lib.conf import ConfigSection, Config, coerce_bool, coerce_csv, coerce_password_from_script
+from desktop.conf import (
+  AUTH_PASSWORD as DEFAULT_AUTH_PASSWORD,
+  AUTH_USERNAME as DEFAULT_AUTH_USERNAME,
+  default_ssl_cacerts,
+  default_ssl_validate,
+  has_connectors,
+)
+from desktop.lib.conf import Config, ConfigSection, coerce_bool, coerce_csv, coerce_password_from_script
 from desktop.lib.exceptions import StructuredThriftTransportException
 from desktop.lib.paths import get_desktop_root
-
 from impala.impala_flags import get_max_result_cache_size, is_impersonation_enabled, is_kerberos_enabled, is_webserver_spnego_enabled
 from impala.settings import NICE_NAME
 
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext_lazy as _t, gettext as _
-else:
-  from django.utils.translation import ugettext_lazy as _t, ugettext as _
-
 LOG = logging.getLogger()
 
 
@@ -177,16 +178,19 @@ SSL = ConfigSection(
   )
 )
 
+
 def get_auth_username():
   """Get from top level default from desktop"""
   return DEFAULT_AUTH_USERNAME.get()
 
+
 AUTH_USERNAME = Config(
   key="auth_username",
   help=_t("Auth username of the hue user used for authentications."),
   private=True,
   dynamic_default=get_auth_username)
 
+
 def get_auth_password():
   """Get from script or backward compatibility"""
   password = AUTH_PASSWORD_SCRIPT.get()
@@ -195,6 +199,7 @@ def get_auth_password():
 
   return DEFAULT_AUTH_PASSWORD.get()
 
+
 AUTH_PASSWORD = Config(
   key="auth_password",
   help=_t("LDAP/PAM/.. password of the hue user used for authentications."),
@@ -210,6 +215,7 @@ AUTH_PASSWORD_SCRIPT = Config(
   default=None
 )
 
+
 def get_daemon_config(key):
   from metadata.conf import MANAGER
   from metadata.manager_client import ManagerApi
@@ -219,18 +225,21 @@ def get_daemon_config(key):
 
   return None
 
+
 def get_daemon_api_username():
   """
     Try to get daemon_api_username from Cloudera Manager API
   """
   return get_daemon_config('webserver_htpassword_user')
 
+
 def get_daemon_api_password():
   """
     Try to get daemon_api_password from Cloudera Manager API
   """
   return get_daemon_config('webserver_htpassword_password')
 
+
 DAEMON_API_PASSWORD = Config(
   key="daemon_api_password",
   help=_t("Password for Impala Daemon when username/password authentication is enabled for the Impala Daemon UI."),
@@ -262,9 +271,11 @@ DAEMON_API_AUTH_SCHEME = Config(
   default="digest"
 )
 
+
 def get_use_sasl_default():
   """kerberos enabled or password is specified"""
-  return is_kerberos_enabled() or AUTH_PASSWORD.get() is not None # Maps closely to legacy behavior
+  return is_kerberos_enabled() or AUTH_PASSWORD.get() is not None  # Maps closely to legacy behavior
+
 
 USE_SASL = Config(
   key="use_sasl",
@@ -296,7 +307,7 @@ def config_validator(user):
 
   try:
     try:
-      if not 'test' in sys.argv: # Avoid tests hanging
+      if 'test' not in sys.argv:  # Avoid tests hanging
         query_server = get_query_server_config(name='impala')
         server = dbms.get(user, query_server)
         query = hql_query("SELECT 'Hello World!';")

+ 13 - 21
apps/impala/src/impala/dbms.py

@@ -15,28 +15,28 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import logging
 import sys
+import logging
+
+from django.utils.translation import gettext as _
 
+from beeswax.design import hql_query
+from beeswax.models import QUERY_TYPES
+from beeswax.server import dbms
+from beeswax.server.dbms import (
+  HiveServer2Dbms,
+  QueryServerException,
+  QueryServerTimeoutException,
+  get_query_server_config as beeswax_query_server_config,
+  get_query_server_config_via_connector,
+)
 from desktop.conf import CLUSTER_ID, has_connectors
 from desktop.lib.exceptions_renderable import PopupException
 from desktop.lib.i18n import smart_str
 from desktop.models import Cluster
-from beeswax.design import hql_query
-from beeswax.models import QUERY_TYPES
-from beeswax.server import dbms
-from beeswax.server.dbms import HiveServer2Dbms, QueryServerException, QueryServerTimeoutException, \
-  get_query_server_config as beeswax_query_server_config, get_query_server_config_via_connector
-
 from impala import conf
 from impala.impala_flags import get_hs2_http_port
 
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
-
-
 LOG = logging.getLogger()
 
 
@@ -102,13 +102,11 @@ class ImpalaDbms(HiveServer2Dbms):
     from_clause = '.'.join('`%s`' % token.strip('`') for token in from_tokens)
     return select_clause, from_clause
 
-
   @classmethod
   def get_histogram_query(cls, database, table, column, nested=None):
     select_clause, from_clause = cls.get_nested_select(database, table, column, nested)
     return 'SELECT histogram(%s) FROM %s' % (select_clause, from_clause)
 
-
   # Deprecated
   def invalidate(self, database=None, table=None, flush_all=False):
     handle = None
@@ -145,7 +143,6 @@ class ImpalaDbms(HiveServer2Dbms):
       if handle:
         self.close(handle)
 
-
   def refresh_table(self, database, table):
     handle = None
     try:
@@ -159,7 +156,6 @@ class ImpalaDbms(HiveServer2Dbms):
       if handle:
         self.close(handle)
 
-
   def get_histogram(self, database, table, column, nested=None):
     """
     Returns the results of an Impala SELECT histogram() FROM query for a given column or nested type.
@@ -185,15 +181,12 @@ class ImpalaDbms(HiveServer2Dbms):
 
     return results
 
-
   def get_exec_summary(self, query_handle, session_handle):
     return self.client._client.get_exec_summary(query_handle, session_handle)
 
-
   def get_runtime_profile(self, query_handle, session_handle):
     return self.client._client.get_runtime_profile(query_handle, session_handle)
 
-
   def _get_beeswax_tables(self, database):
     beeswax_query_server = dbms.get(
       user=self.client.user,
@@ -203,7 +196,6 @@ class ImpalaDbms(HiveServer2Dbms):
     )
     return beeswax_query_server.get_tables(database=database)
 
-
   def _get_different_tables(self, database):
     beeswax_tables = self._get_beeswax_tables(database)
     impala_tables = self.get_tables(database=database)

+ 5 - 12
apps/impala/src/impala/dbms_tests.py

@@ -15,35 +15,28 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import sys
 import json
 import logging
-import pytest
-import sys
+from unittest.mock import Mock, patch
 
+import pytest
 from django.urls import reverse
 
 import desktop.conf as desktop_conf
 from desktop.lib.django_test_util import make_logged_in_client
-from useradmin.models import User
-
-
 from impala.dbms import get_query_server_config
-
-if sys.version_info[0] > 2:
-  from unittest.mock import patch, Mock
-else:
-  from mock import patch, Mock
-
+from useradmin.models import User
 
 LOG = logging.getLogger()
 
+
 @pytest.mark.django_db
 class TestDbms():
 
   def setup_method(self):
     self.client = make_logged_in_client()
 
-
   def test_get_connector_config(self):
     connector = {
       'type': 'impala-compute',

+ 10 - 23
apps/impala/src/impala/server.py

@@ -15,27 +15,23 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from past.builtins import basestring
-from builtins import object
+import sys
 import json
 import logging
 import threading
-import sys
+from builtins import object
+
+from django.utils.translation import gettext as _
+from ImpalaService import ImpalaHiveServer2Service
+from past.builtins import basestring
 
+from beeswax.server.dbms import QueryServerException
+from beeswax.server.hive_server2_lib import HiveServerClient
 from desktop.lib.exceptions_renderable import PopupException
 from desktop.lib.rest.http_client import HttpClient
 from desktop.lib.rest.resource import Resource
-from beeswax.server.dbms import QueryServerException
-from beeswax.server.hive_server2_lib import HiveServerClient
-
-from ImpalaService import ImpalaHiveServer2Service
-from impala.impala_flags import get_webserver_certificate_file, is_webserver_spnego_enabled, is_kerberos_enabled
-from impala.conf import DAEMON_API_USERNAME, DAEMON_API_PASSWORD, DAEMON_API_PASSWORD_SCRIPT, DAEMON_API_AUTH_SCHEME, COORDINATOR_URL
-
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
+from impala.conf import COORDINATOR_URL, DAEMON_API_AUTH_SCHEME, DAEMON_API_PASSWORD, DAEMON_API_PASSWORD_SCRIPT, DAEMON_API_USERNAME
+from impala.impala_flags import get_webserver_certificate_file, is_kerberos_enabled, is_webserver_spnego_enabled
 
 LOG = logging.getLogger()
 
@@ -87,7 +83,6 @@ class ImpalaServerClient(HiveServerClient):
 
     return self._serialize_exec_summary(resp.summary)
 
-
   def get_runtime_profile(self, operation_handle, session_handle):
     """
     Calls Impala HS2 API's GetRuntimeProfile method on the given query handle
@@ -105,7 +100,6 @@ class ImpalaServerClient(HiveServerClient):
 
     return resp.profile
 
-
   def _serialize_exec_summary(self, summary):
     try:
       summary_dict = {
@@ -169,29 +163,24 @@ class ImpalaDaemonApi(object):
   def __str__(self):
     return "ImpalaDaemonApi at %s" % self._url
 
-
   @property
   def url(self):
     return self._url
 
-
   @property
   def security_enabled(self):
     return self._security_enabled
 
-
   @property
   def user(self):
     return self._thread_local.user
 
-
   def set_user(self, user):
     if hasattr(user, 'username'):
       self._thread_local.user = user.username
     else:
       self._thread_local.user = user
 
-
   def get_queries(self):
     params = {
       'json': 'true'
@@ -206,7 +195,6 @@ class ImpalaDaemonApi(object):
     except ValueError as e:
       raise ImpalaDaemonApiException('ImpalaDaemonApi did not return valid JSON: %s' % e)
 
-
   def get_query(self, query_id):
     params = {
       'query_id': query_id,
@@ -222,7 +210,6 @@ class ImpalaDaemonApi(object):
     except ValueError as e:
       raise ImpalaDaemonApiException('ImpalaDaemonApi did not return valid JSON: %s' % e)
 
-
   def get_query_profile(self, query_id):
     params = {
       'query_id': query_id,

+ 5 - 14
apps/impala/src/impala/server_tests.py

@@ -16,21 +16,16 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import sys
 import logging
+from unittest.mock import MagicMock, Mock, patch
+
 import pytest
-import sys
 
-from desktop.lib.exceptions_renderable import PopupException
 from desktop.lib.django_test_util import make_logged_in_client
-from useradmin.models import User
-
+from desktop.lib.exceptions_renderable import PopupException
 from impala.server import ImpalaDaemonApi, _get_impala_server_url
-
-if sys.version_info[0] > 2:
-  from unittest.mock import patch, Mock, MagicMock
-else:
-  from mock import patch, Mock, MagicMock
-
+from useradmin.models import User
 
 LOG = logging.getLogger()
 
@@ -46,7 +41,6 @@ class TestImpalaDaemonApi():
     with pytest.raises(PopupException):
       _get_impala_server_url(session=None)
 
-
   def test_digest_auth(self):
 
     with patch('impala.server.DAEMON_API_USERNAME.get') as DAEMON_API_USERNAME_get:
@@ -73,7 +67,6 @@ class TestImpalaDaemonApi():
           server._client.set_kerberos_auth.assert_not_called()
           server._client.set_basic_auth.assert_not_called()
 
-
   def test_basic_auth(self):
 
     with patch('impala.server.DAEMON_API_USERNAME.get') as DAEMON_API_USERNAME_get:
@@ -104,7 +97,6 @@ class TestImpalaDaemonApi():
             server._client.set_digest_auth.assert_not_called()
             server._client.set_kerberos_auth.assert_not_called()
 
-
   def test_kerberos_auth(self):
 
     with patch('impala.server.DAEMON_API_USERNAME.get') as DAEMON_API_USERNAME_get:
@@ -127,7 +119,6 @@ class TestImpalaDaemonApi():
         with patch('impala.server.HttpClient') as HttpClient:
           with patch('impala.server.is_webserver_spnego_enabled') as is_webserver_spnego_enabled:
 
-
             DAEMON_API_USERNAME_get.return_value = None
             DAEMON_API_PASSWORD_get.return_value = 'impala'
             is_webserver_spnego_enabled.return_value = False

+ 2 - 6
apps/impala/src/impala/test_impala_flags.py

@@ -15,16 +15,12 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import logging
 import os
-import sys
+import logging
 import tempfile
 
 from impala import conf, impala_flags
 
-
-open_file = open
-
 LOG = logging.getLogger()
 
 
@@ -47,7 +43,7 @@ def test_impala_flags():
       -max_result_cache_size=%d
       -authorized_proxy_user_config=hue=*
     """ % expected_rows
-    open_file(os.path.join(test_impala_conf_dir, 'impalad_flags'), 'w').write(flags)
+    open(os.path.join(test_impala_conf_dir, 'impalad_flags'), 'w').write(flags)
 
     resets.append(conf.IMPALA_CONF_DIR.set_for_testing(test_impala_conf_dir))
     impala_flags.reset()

+ 121 - 107
apps/impala/src/impala/tests.py

@@ -15,53 +15,46 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from builtins import object
-import json
-import logging
-import pytest
 import re
 import sys
+import json
+import logging
+from builtins import object
+from unittest.mock import Mock, patch
 
+import pytest
 from django.urls import reverse
 
 import desktop.conf as desktop_conf
-from desktop.lib.django_test_util import make_logged_in_client
-from desktop.lib.exceptions_renderable import PopupException
-from desktop.lib.test_utils import add_to_group
-from desktop.models import Document
 from beeswax import data_export
-from beeswax.design import hql_query
 from beeswax.data_export import download
-from beeswax.models import SavedQuery, QueryHistory
+from beeswax.design import hql_query
+from beeswax.models import QueryHistory, SavedQuery
 from beeswax.server import dbms
-from beeswax.test_base import get_query_server_config, wait_for_query_to_finish, fetch_query_result_data
+from beeswax.test_base import fetch_query_result_data, get_query_server_config, wait_for_query_to_finish
 from beeswax.tests import _make_query
+from desktop.lib.django_test_util import make_logged_in_client
+from desktop.lib.exceptions_renderable import PopupException
+from desktop.lib.test_utils import add_to_group
+from desktop.models import Document
 from hadoop.pseudo_hdfs4 import get_db_prefix, is_live_cluster
-from useradmin.models import User
-
 from impala import conf
 from impala.dbms import ImpalaDbms
-
-if sys.version_info[0] > 2:
-  from unittest.mock import patch, Mock
-else:
-  from mock import patch, Mock
-
+from useradmin.models import User
 
 LOG = logging.getLogger()
 
 
 class MockDbms(object):
-
   def get_databases(self):
     return ['db1', 'db2']
 
   def get_tables(self, database):
     return ['table1', 'table2']
 
+
 @pytest.mark.django_db
 class TestMockedImpala(object):
-
   def setup_method(self):
     self.client = make_logged_in_client()
 
@@ -116,29 +109,28 @@ class TestMockedImpala(object):
 
         get_hive_metastore_interpreters.return_value = []
         with pytest.raises(PopupException):
-          ddms.invalidate('default') # No hive/metastore configured
+          ddms.invalidate('default')  # No hive/metastore configured
 
         get_hive_metastore_interpreters.return_value = ['hive']
         ddms.invalidate('default')
         ddms.client.query.assert_called_once_with(ddms.client.query.call_args[0][0])
-        assert 'customers' in ddms.client.query.call_args[0][0].hql_query # diff of 1 table
+        assert 'customers' in ddms.client.query.call_args[0][0].hql_query  # diff of 1 table
 
         get_different_tables.return_value = ['customers', '', '', '', '', '', '', '', '', '', '']
         with pytest.raises(PopupException):
-          ddms.invalidate('default') # diff of 11 tables. Limit is 10.
+          ddms.invalidate('default')  # diff of 11 tables. Limit is 10.
 
         ddms.invalidate('default', 'customers')
-        assert ddms.client.query.call_count == 2 # Second call
-        assert 'customers' in ddms.client.query.call_args[0][0].hql_query # invalidate 1 table
+        assert ddms.client.query.call_count == 2  # Second call
+        assert 'customers' in ddms.client.query.call_args[0][0].hql_query  # invalidate 1 table
 
         ddms.invalidate()
-        assert ddms.client.query.call_count == 3 # Third call
-        assert 'customers' not in ddms.client.query.call_args[0][0].hql_query # Full invalidate
+        assert ddms.client.query.call_count == 3  # Third call
+        assert 'customers' not in ddms.client.query.call_args[0][0].hql_query  # Full invalidate
 
 
 @pytest.mark.integration
 class TestImpalaIntegration(object):
-
   @classmethod
   def setup_class(cls):
     cls.finish = []
@@ -155,7 +147,7 @@ class TestImpalaIntegration(object):
     queries = [
       'DROP TABLE IF EXISTS %(db)s.tweets;' % {'db': cls.DATABASE},
       'DROP DATABASE IF EXISTS %(db)s CASCADE;' % {'db': cls.DATABASE},
-      'CREATE DATABASE %(db)s;' % {'db': cls.DATABASE}
+      'CREATE DATABASE %(db)s;' % {'db': cls.DATABASE},
     ]
 
     for query in queries:
@@ -164,19 +156,26 @@ class TestImpalaIntegration(object):
       content = json.loads(resp.content)
       assert content['status'] == 0, resp.content
 
-    queries = ["""
+    queries = [
+      """
       CREATE TABLE tweets (row_num INTEGER, id_str STRING, text STRING) STORED AS PARQUET;
-    """, """
+    """,
+      """
       INSERT INTO TABLE tweets VALUES (1, "531091827395682000", "My dad looks younger than costa");
-    """, """
+    """,
+      """
       INSERT INTO TABLE tweets VALUES (2, "531091827781550000", "There is a thin line between your partner being vengeful and you reaping the consequences of your bad actions towards your partner.");
-    """, """
+    """,  # noqa: E501
+      """
       INSERT INTO TABLE tweets VALUES (3, "531091827768979000", "@Mustang_Sally83 and they need to get into you :))))");
-    """, """
+    """,
+      """
       INSERT INTO TABLE tweets VALUES (4, "531091827114668000", "@RachelZJohnson thank you rach!xxx");
-    """, """
+    """,
+      """
       INSERT INTO TABLE tweets VALUES (5, "531091827949309000", "i think @WWERollins was robbed of the IC title match this week on RAW also i wonder if he will get a rematch i hope so @WWE");
-    """]
+    """,  # noqa: E501
+    ]
 
     for query in queries:
       resp = _make_query(cls.client, query, database=cls.DATABASE, local=False, server_name='impala')
@@ -184,28 +183,31 @@ class TestImpalaIntegration(object):
       content = json.loads(resp.content)
       assert content['status'] == 0, resp.content
 
-
   @classmethod
   def teardown_class(cls):
     # We need to drop tables before dropping the database
-    queries = ["""
+    queries = [
+      """
       DROP TABLE IF EXISTS %(db)s.tweets;
-    """ % {'db': cls.DATABASE}, """
+    """
+      % {'db': cls.DATABASE},
+      """
       DROP DATABASE %(db)s CASCADE;
-    """ % {'db': cls.DATABASE}]
+    """
+      % {'db': cls.DATABASE},
+    ]
     for query in queries:
       resp = _make_query(cls.client, query, database='default', local=False, server_name='impala')
       resp = wait_for_query_to_finish(cls.client, resp, max=180.0)
 
     # Check the cleanup
     databases = cls.db.get_databases()
-    assert not cls.DATABASE in databases
-    assert not '%(db)s_other' % {'db': cls.DATABASE} in databases
+    assert cls.DATABASE not in databases
+    assert '%(db)s_other' % {'db': cls.DATABASE} not in databases
 
     for f in cls.finish:
       f()
 
-
   def test_basic_flow(self):
     dbs = self.db.get_databases()
     assert '_impala_builtins' in dbs, dbs
@@ -247,12 +249,11 @@ class TestImpalaIntegration(object):
     content = json.loads(resp.content)
     assert 0 == content['status']
 
-
   def test_data_download(self):
     hql = 'SELECT * FROM tweets %(limit)s'
 
     FETCH_SIZE = data_export.FETCH_SIZE
-    data_export.FETCH_SIZE = 2 # Decrease fetch size to validate last fetch logic
+    data_export.FETCH_SIZE = 2  # Decrease fetch size to validate last fetch logic
 
     try:
       query = hql_query(hql % {'limit': ''})
@@ -263,7 +264,6 @@ class TestImpalaIntegration(object):
       csv_content = ''.join(csv_resp.streaming_content)
       assert len(csv_content.strip().split('\n')) == 5 + 1
 
-
       query = hql_query(hql % {'limit': 'LIMIT 0'})
 
       handle = self.db.execute_and_wait(query)
@@ -287,7 +287,6 @@ class TestImpalaIntegration(object):
     finally:
       data_export.FETCH_SIZE = FETCH_SIZE
 
-
   def test_explain(self):
     QUERY = """
       SELECT * FROM tweets ORDER BY row_num;
@@ -297,17 +296,15 @@ class TestImpalaIntegration(object):
     assert 'MERGING-EXCHANGE' in json_response['explanation'], json_response
     assert 'SCAN HDFS' in json_response['explanation'], json_response
 
-
   def test_get_table_sample(self):
     client = make_logged_in_client()
 
     resp = client.get(reverse('impala:get_sample_data', kwargs={'database': self.DATABASE, 'table': 'tweets'}))
     data = json.loads(resp.content)
     assert 0 == data['status'], data
-    assert [u'row_num', u'id_str', u'text'] == data['headers'], data
+    assert ['row_num', 'id_str', 'text'] == data['headers'], data
     assert len(data['rows']), data
 
-
   def test_get_session(self):
     session = None
     try:
@@ -327,14 +324,12 @@ class TestImpalaIntegration(object):
         except Exception:
           pass
 
-
   def test_get_settings(self):
     resp = self.client.get(reverse("impala:get_settings"))
     json_resp = json.loads(resp.content)
     assert 0 == json_resp['status']
     assert 'QUERY_TIMEOUT_S' in json_resp['settings']
 
-
   def test_invalidate_tables(self):
     # Helper function to get Impala and Beeswax (HMS) tables
     def get_impala_beeswax_tables():
@@ -347,9 +342,10 @@ class TestImpalaIntegration(object):
       return impala_tables, beeswax_tables
 
     impala_tables, beeswax_tables = get_impala_beeswax_tables()
-    assert impala_tables == beeswax_tables, (
-      "\ntest_invalidate_tables: `%s`\nImpala Tables: %s\nBeeswax Tables: %s" 
-      % (self.DATABASE, ','.join(impala_tables), ','.join(beeswax_tables))
+    assert impala_tables == beeswax_tables, "\ntest_invalidate_tables: `%s`\nImpala Tables: %s\nBeeswax Tables: %s" % (
+      self.DATABASE,
+      ','.join(impala_tables),
+      ','.join(beeswax_tables),
     )
 
     hql = """
@@ -360,18 +356,18 @@ class TestImpalaIntegration(object):
     impala_tables, beeswax_tables = get_impala_beeswax_tables()
     # New table is not found by Impala
     assert 'new_table' in beeswax_tables, beeswax_tables
-    assert not 'new_table' in impala_tables, impala_tables
+    assert 'new_table' not in impala_tables, impala_tables
 
     resp = self.client.post(reverse('impala:invalidate'), {'database': self.DATABASE})
 
     impala_tables, beeswax_tables = get_impala_beeswax_tables()
     # Invalidate picks up new table
-    assert impala_tables == beeswax_tables, (
-      "\ntest_invalidate_tables: `%s`\nImpala Tables: %s\nBeeswax Tables: %s"
-      % (self.DATABASE, ','.join(impala_tables), ','.join(beeswax_tables))
+    assert impala_tables == beeswax_tables, "\ntest_invalidate_tables: `%s`\nImpala Tables: %s\nBeeswax Tables: %s" % (
+      self.DATABASE,
+      ','.join(impala_tables),
+      ','.join(beeswax_tables),
     )
 
-
   def test_refresh_table(self):
     # Helper function to get Impala and Beeswax (HMS) columns
     def get_impala_beeswax_columns():
@@ -382,8 +378,12 @@ class TestImpalaIntegration(object):
       return impala_columns, beeswax_columns
 
     impala_columns, beeswax_columns = get_impala_beeswax_columns()
-    assert impala_columns == beeswax_columns, ("\ntest_refresh_table: `%s`.`%s`\nImpala Columns: %s\nBeeswax Columns: %s"
-      % (self.DATABASE, 'tweets', ','.join(impala_columns), ','.join(beeswax_columns)))
+    assert impala_columns == beeswax_columns, "\ntest_refresh_table: `%s`.`%s`\nImpala Columns: %s\nBeeswax Columns: %s" % (
+      self.DATABASE,
+      'tweets',
+      ','.join(impala_columns),
+      ','.join(beeswax_columns),
+    )
 
     hql = """
       ALTER TABLE tweets ADD COLUMNS (new_column INT);
@@ -393,15 +393,18 @@ class TestImpalaIntegration(object):
     impala_columns, beeswax_columns = get_impala_beeswax_columns()
     # New column is not found by Impala
     assert 'new_column' in beeswax_columns, beeswax_columns
-    assert not 'new_column' in impala_columns, impala_columns
+    assert 'new_column' not in impala_columns, impala_columns
 
     resp = self.client.post(reverse('impala:refresh_table', kwargs={'database': self.DATABASE, 'table': 'tweets'}))
 
     impala_columns, beeswax_columns = get_impala_beeswax_columns()
     # Invalidate picks up new column
-    assert impala_columns == beeswax_columns, ("\ntest_refresh_table: `%s`.`%s`\nImpala Columns: %s\nBeeswax Columns: %s"
-      % (self.DATABASE, 'tweets', ','.join(impala_columns), ','.join(beeswax_columns)))
-
+    assert impala_columns == beeswax_columns, "\ntest_refresh_table: `%s`.`%s`\nImpala Columns: %s\nBeeswax Columns: %s" % (
+      self.DATABASE,
+      'tweets',
+      ','.join(impala_columns),
+      ','.join(beeswax_columns),
+    )
 
   def test_get_exec_summary(self):
     query = """
@@ -427,7 +430,6 @@ class TestImpalaIntegration(object):
     assert 'nodes' in data['summary'], data
     assert len(data['summary']['nodes']) > 0, data['summary']['nodes']
 
-
   def test_get_runtime_profile(self):
     query = """
       SELECT COUNT(1) FROM tweets;
@@ -462,20 +464,23 @@ def create_saved_query(app_name, owner):
 
 def test_ssl_cacerts():
   for desktop_kwargs, conf_kwargs, expected in [
-      ({'present': False}, {'present': False}, ''),
-      ({'present': False}, {'data': 'local-cacerts.pem'}, 'local-cacerts.pem'),
-
-      ({'data': 'global-cacerts.pem'}, {'present': False}, 'global-cacerts.pem'),
-      ({'data': 'global-cacerts.pem'}, {'data': 'local-cacerts.pem'}, 'local-cacerts.pem'),
-      ]:
+    ({'present': False}, {'present': False}, ''),
+    ({'present': False}, {'data': 'local-cacerts.pem'}, 'local-cacerts.pem'),
+    ({'data': 'global-cacerts.pem'}, {'present': False}, 'global-cacerts.pem'),
+    ({'data': 'global-cacerts.pem'}, {'data': 'local-cacerts.pem'}, 'local-cacerts.pem'),
+  ]:
     resets = [
       desktop_conf.SSL_CACERTS.set_for_testing(**desktop_kwargs),
       conf.SSL.CACERTS.set_for_testing(**conf_kwargs),
     ]
 
     try:
-      assert conf.SSL.CACERTS.get() == expected, ('desktop:%s conf:%s expected:%s got:%s' 
-        % (desktop_kwargs, conf_kwargs, expected, conf.SSL.CACERTS.get()))
+      assert conf.SSL.CACERTS.get() == expected, 'desktop:%s conf:%s expected:%s got:%s' % (
+        desktop_kwargs,
+        conf_kwargs,
+        expected,
+        conf.SSL.CACERTS.get(),
+      )
     finally:
       for reset in resets:
         reset()
@@ -483,25 +488,28 @@ def test_ssl_cacerts():
 
 def test_ssl_validate():
   for desktop_kwargs, conf_kwargs, expected in [
-      ({'present': False}, {'present': False}, True),
-      ({'present': False}, {'data': False}, False),
-      ({'present': False}, {'data': True}, True),
-
-      ({'data': False}, {'present': False}, False),
-      ({'data': False}, {'data': False}, False),
-      ({'data': False}, {'data': True}, True),
-
-      ({'data': True}, {'present': False}, True),
-      ({'data': True}, {'data': False}, False),
-      ({'data': True}, {'data': True}, True),
-      ]:
+    ({'present': False}, {'present': False}, True),
+    ({'present': False}, {'data': False}, False),
+    ({'present': False}, {'data': True}, True),
+    ({'data': False}, {'present': False}, False),
+    ({'data': False}, {'data': False}, False),
+    ({'data': False}, {'data': True}, True),
+    ({'data': True}, {'present': False}, True),
+    ({'data': True}, {'data': False}, False),
+    ({'data': True}, {'data': True}, True),
+  ]:
     resets = [
       desktop_conf.SSL_VALIDATE.set_for_testing(**desktop_kwargs),
       conf.SSL.VALIDATE.set_for_testing(**conf_kwargs),
     ]
 
     try:
-      assert conf.SSL.VALIDATE.get() == expected, 'desktop:%s conf:%s expected:%s got:%s' % (desktop_kwargs, conf_kwargs, expected, conf.SSL.VALIDATE.get())
+      assert conf.SSL.VALIDATE.get() == expected, 'desktop:%s conf:%s expected:%s got:%s' % (
+        desktop_kwargs,
+        conf_kwargs,
+        expected,
+        conf.SSL.VALIDATE.get(),
+      )
     finally:
       for reset in resets:
         reset()
@@ -509,9 +517,9 @@ def test_ssl_validate():
 
 def test_thrift_over_http_config():
   resets = [
-      conf.SERVER_HOST.set_for_testing('impalad_host'),
-      conf.SERVER_PORT.set_for_testing(21050),
-      conf.USE_THRIFT_HTTP.set_for_testing(True)
+    conf.SERVER_HOST.set_for_testing('impalad_host'),
+    conf.SERVER_PORT.set_for_testing(21050),
+    conf.USE_THRIFT_HTTP.set_for_testing(True),
   ]
   with patch('impala.dbms.get_hs2_http_port') as get_hs2_http_port:
     get_hs2_http_port.return_value = 30000
@@ -527,10 +535,10 @@ def test_thrift_over_http_config():
 
 def test_thrift_over_http_config_with_proxy_endpoint():
   resets = [
-      conf.SERVER_HOST.set_for_testing('impala_proxy'),
-      conf.SERVER_PORT.set_for_testing(36000),
-      conf.USE_THRIFT_HTTP.set_for_testing(True),
-      conf.PROXY_ENDPOINT.set_for_testing('/endpoint')
+    conf.SERVER_HOST.set_for_testing('impala_proxy'),
+    conf.SERVER_PORT.set_for_testing(36000),
+    conf.USE_THRIFT_HTTP.set_for_testing(True),
+    conf.PROXY_ENDPOINT.set_for_testing('/endpoint'),
   ]
   with patch('impala.dbms.get_hs2_http_port') as get_hs2_http_port:
     get_hs2_http_port.return_value = 30000
@@ -545,16 +553,22 @@ def test_thrift_over_http_config_with_proxy_endpoint():
 
 
 class TestImpalaDbms(object):
-
   def test_get_impala_nested_select(self):
     assert ImpalaDbms.get_nested_select('default', 'customers', 'id', None) == ('id', '`default`.`customers`')
-    assert (ImpalaDbms.get_nested_select('default', 'customers', 'email_preferences', 'categories/promos/') ==
-                 ('email_preferences.categories.promos', '`default`.`customers`'))
-    assert (ImpalaDbms.get_nested_select('default', 'customers', 'addresses', 'key') ==
-                 ('key', '`default`.`customers`.`addresses`'))
-    assert (ImpalaDbms.get_nested_select('default', 'customers', 'addresses', 'value/street_1/') ==
-                 ('street_1', '`default`.`customers`.`addresses`'))
-    assert (ImpalaDbms.get_nested_select('default', 'customers', 'orders', 'item/order_date') ==
-                 ('order_date', '`default`.`customers`.`orders`'))
-    assert (ImpalaDbms.get_nested_select('default', 'customers', 'orders', 'item/items/item/product_id') ==
-                 ('product_id', '`default`.`customers`.`orders`.`items`'))
+    assert ImpalaDbms.get_nested_select('default', 'customers', 'email_preferences', 'categories/promos/') == (
+      'email_preferences.categories.promos',
+      '`default`.`customers`',
+    )
+    assert ImpalaDbms.get_nested_select('default', 'customers', 'addresses', 'key') == ('key', '`default`.`customers`.`addresses`')
+    assert ImpalaDbms.get_nested_select('default', 'customers', 'addresses', 'value/street_1/') == (
+      'street_1',
+      '`default`.`customers`.`addresses`',
+    )
+    assert ImpalaDbms.get_nested_select('default', 'customers', 'orders', 'item/order_date') == (
+      'order_date',
+      '`default`.`customers`.`orders`',
+    )
+    assert ImpalaDbms.get_nested_select('default', 'customers', 'orders', 'item/items/item/product_id') == (
+      'product_id',
+      '`default`.`customers`.`orders`.`items`',
+    )

+ 2 - 5
apps/impala/src/impala/urls.py

@@ -17,14 +17,11 @@
 
 import sys
 
+from django.urls import re_path
+
 from beeswax.urls import urlpatterns as beeswax_urls
 from impala import api as impala_api
 
-if sys.version_info[0] > 2:
-  from django.urls import re_path
-else:
-  from django.conf.urls import url as re_path
-
 urlpatterns = [
   re_path(r'^api/invalidate$', impala_api.invalidate, name='invalidate'),
   re_path(r'^api/refresh/(?P<database>\w+)/(?P<table>\w+)$', impala_api.refresh_table, name='refresh_table'),

+ 16 - 25
apps/jobbrowser/src/jobbrowser/api.py

@@ -15,33 +15,26 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from builtins import object
-import logging
 import sys
-
+import logging
+from builtins import object
 from datetime import datetime, timedelta
-from django.core.paginator import Paginator
 
-from desktop.lib.exceptions_renderable import PopupException
-from desktop.lib.rest.http_client import RestException
-
-from hadoop.conf import YARN_CLUSTERS
-from hadoop.cluster import rm_ha
+from django.core.paginator import Paginator
+from django.utils.translation import gettext as _
 
-import hadoop.yarn.history_server_api as history_server_api
 import hadoop.yarn.mapreduce_api as mapreduce_api
 import hadoop.yarn.node_manager_api as node_manager_api
+import hadoop.yarn.history_server_api as history_server_api
 import hadoop.yarn.resource_manager_api as resource_manager_api
 import hadoop.yarn.spark_history_server_api as spark_history_server_api
-
-from jobbrowser.conf import SHARE_JOBS
-from jobbrowser.yarn_models import Application, YarnV2Job, Job as YarnJob, KilledJob as KilledYarnJob, Container, SparkJob
 from desktop.auth.backend import is_admin
-
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
+from desktop.lib.exceptions_renderable import PopupException
+from desktop.lib.rest.http_client import RestException
+from hadoop.cluster import rm_ha
+from hadoop.conf import YARN_CLUSTERS
+from jobbrowser.conf import SHARE_JOBS
+from jobbrowser.yarn_models import Application, Container, Job as YarnJob, KilledJob as KilledYarnJob, SparkJob, YarnV2Job
 
 LOG = logging.getLogger()
 
@@ -93,8 +86,8 @@ class YarnApi(JobBrowserApi):
 
   @rm_ha
   def get_jobs(self, user, **kwargs):
-    state_filters = {'running': 'UNDEFINED', 'completed': 'SUCCEEDED', 'failed': 'FAILED', 'killed': 'KILLED',}
-    states_filters = {'running': 'NEW,NEW_SAVING,SUBMITTED,ACCEPTED,RUNNING', 'completed': 'FINISHED', 'failed': 'FAILED,KILLED',}
+    state_filters = {'running': 'UNDEFINED', 'completed': 'SUCCEEDED', 'failed': 'FAILED', 'killed': 'KILLED', }
+    states_filters = {'running': 'NEW,NEW_SAVING,SUBMITTED,ACCEPTED,RUNNING', 'completed': 'FINISHED', 'failed': 'FAILED,KILLED', }
     filters = {}
 
     if kwargs['username']:
@@ -108,11 +101,11 @@ class YarnApi(JobBrowserApi):
     if kwargs.get('time_value'):
       filters['startedTimeBegin'] = self._get_started_time_begin(kwargs.get('time_value'), kwargs.get('time_unit'))
 
-    if self.resource_manager_api: # This happens when yarn is not configured, but we need jobbrowser for Impala
+    if self.resource_manager_api:  # This happens when yarn is not configured, but we need jobbrowser for Impala
       json = self.resource_manager_api.apps(**filters)
     else:
       json = {}
-    if type(json) == str and 'This is standby RM' in json:
+    if type(json) is str and 'This is standby RM' in json:
       raise Exception(json)
 
     if json.get('apps'):
@@ -147,12 +140,10 @@ class YarnApi(JobBrowserApi):
                   is_admin(user) or
                   job.user == user.username]
 
-
   def _get_job_from_history_server(self, job_id):
     resp = self.history_server_api.job(self.user, job_id)
     return YarnJob(self.history_server_api, resp['job'])
 
-
   @rm_ha
   def get_job(self, jobid):
     job_id = jobid.replace('application', 'job')
@@ -237,5 +228,5 @@ class ApplicationNotRunning(Exception):
 class JobExpired(Exception):
 
   def __init__(self, job):
-    super(JobExpired, self).__init__('JobExpired: %s' %job)
+    super(JobExpired, self).__init__('JobExpired: %s' % job)
     self.job = job

+ 5 - 12
apps/jobbrowser/src/jobbrowser/api2.py

@@ -15,28 +15,21 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import sys
 import json
 import logging
-import sys
-
 from urllib.request import Request, urlopen
 
 from django.http import HttpResponse
+from django.utils.translation import gettext as _
 
-from desktop.lib.i18n import smart_unicode
 from desktop.lib.django_util import JsonResponse
+from desktop.lib.i18n import smart_str
 from desktop.views import serve_403_error
-
 from jobbrowser.apis.base_api import get_api
 from jobbrowser.apis.query_store import query_store_proxy, stream_download_bundle
-
 from jobbrowser.conf import DISABLE_KILLING_JOBS, USE_PROXY
 
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
-
 LOG = logging.getLogger()
 
 
@@ -49,7 +42,7 @@ def api_error_handler(func):
     except Exception as e:
       LOG.exception('Error running %s' % func)
       response['status'] = -1
-      response['message'] = smart_unicode(e)
+      response['message'] = smart_str(e)
     finally:
       if response:
         return JsonResponse(response)
@@ -167,7 +160,7 @@ def profile(request):
   ])
 
   api = get_api(request.user, interface, cluster=cluster)
-  api._set_request(request) # For YARN
+  api._set_request(request)  # For YARN
 
   resp = api.profile(app_id, app_type, app_property, app_filters)
 

+ 8 - 11
apps/jobbrowser/src/jobbrowser/apis/base_api.py

@@ -15,19 +15,16 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from builtins import object
-import logging
-import posixpath
 import re
 import sys
+import logging
+import posixpath
+from builtins import object
 
-from hadoop.fs.hadoopfs import Hdfs
-from desktop.lib.exceptions_renderable import PopupException
+from django.utils.translation import gettext as _
 
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
+from desktop.lib.exceptions_renderable import PopupException
+from hadoop.fs.hadoopfs import Hdfs
 
 LOG = logging.getLogger()
 
@@ -96,13 +93,13 @@ class Api(object):
 
   def apps(self, filters): return {'apps': [], 'total': 0}
 
-  def app(self, appid): return {} # Also contains progress (0-100) and status [RUNNING, SUCCEEDED, PAUSED, FAILED]
+  def app(self, appid): return {}  # Also contains progress (0-100) and status [RUNNING, SUCCEEDED, PAUSED, FAILED]
 
   def action(self, app_ids, operation): return {}
 
   def logs(self, appid, app_type, log_name, is_embeddable=False): return {'progress': 0, 'logs': ''}
 
-  def profile(self, appid, app_type, app_property, app_filters): return {} # Tasks, XML, counters...
+  def profile(self, appid, app_type, app_property, app_filters): return {}  # Tasks, XML, counters...
 
   def _set_request(self, request):
     self.request = request

+ 10 - 20
apps/jobbrowser/src/jobbrowser/apis/beat_api.py

@@ -15,21 +15,16 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import logging
 import sys
-
+import logging
 from datetime import datetime
 
 from dateutil import parser
-from desktop.lib.scheduler.lib.beat import CeleryBeatApi
+from django.utils.translation import gettext as _
 
+from desktop.lib.scheduler.lib.beat import CeleryBeatApi
 from jobbrowser.apis.base_api import Api
 
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
-
 LOG = logging.getLogger()
 
 
@@ -47,7 +42,7 @@ class BeatApi(Api):
           'status': self._massage_status(app),
           'apiStatus': self._api_status(self._massage_status(app)),
           'type': 'celery-beat',
-          'user': app['description'], # No user id available yet
+          'user': app['description'],  # No user id available yet
           'progress': 50,
           'queue': app['queue'],
           'canWrite': self.user.username == app['description'],
@@ -58,7 +53,6 @@ class BeatApi(Api):
       'total': len(tasks)
     }
 
-
   def app(self, appid):
     appid = appid.rsplit('-')[-1]
     api = CeleryBeatApi(user=self.user)
@@ -81,7 +75,6 @@ class BeatApi(Api):
       }
     }
 
-
   def action(self, app_ids, operation):
     api = CeleryBeatApi(user=self.user)
 
@@ -95,13 +88,15 @@ class BeatApi(Api):
       except Exception:
         LOG.exception('Could not stop job %s' % app_id)
 
-    return {'kills': operations, 'status': len(app_ids) - len(operations), 'message': _('%s signal sent to %s') % (operation['action'], operations)}
-
+    return {
+      'kills': operations,
+      'status': len(app_ids) - len(operations),
+      'message': _('%s signal sent to %s') % (operation['action'], operations),
+    }
 
   def logs(self, appid, app_type, log_name=None, is_embeddable=False):
     return {'logs': ''}
 
-
   def profile(self, appid, app_type, app_property, app_filters):
     appid = appid.rsplit('-')[-1]
 
@@ -112,14 +107,12 @@ class BeatApi(Api):
     else:
       return {}
 
-
   def _api_status(self, status):
     if status == 'RUNNING':
       return 'RUNNING'
     else:
       return 'PAUSED'
 
-
   def _massage_status(self, task):
     return 'RUNNING' if task['enabled'] else 'PAUSED'
 
@@ -170,15 +163,12 @@ class LivyJobApi(Api):
 
     return common
 
-
   def action(self, appid, action):
     return {}
 
-
   def logs(self, appid, app_type, log_name=None, is_embeddable=False):
     return {'logs': ''}
 
-
   def profile(self, appid, app_type, app_property):
     return {}
 
@@ -188,4 +178,4 @@ class LivyJobApi(Api):
     elif status in ['COMPLETED']:
       return 'SUCCEEDED'
     else:
-      return 'FAILED' # INTERRUPTED , KILLED, TERMINATED and FAILED
+      return 'FAILED'  # INTERRUPTED , KILLED, TERMINATED and FAILED

+ 19 - 25
apps/jobbrowser/src/jobbrowser/apis/beeswax_query_api.py

@@ -14,24 +14,18 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-from builtins import filter
-
-import logging
 import re
 import sys
-
+import logging
+from builtins import filter
 from datetime import datetime
 
+from django.utils.translation import gettext as _
+
 from desktop.lib.exceptions_renderable import PopupException
 from desktop.lib.python_util import current_ms_from_utc
-
 from jobbrowser.apis.base_api import Api
 
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
-
 LOG = logging.getLogger()
 
 try:
@@ -43,13 +37,13 @@ except Exception as e:
 class BeeswaxQueryApi(Api):
 
   def __init__(self, user, cluster=None):
-    self.user=user
-    self.cluster=cluster
+    self.user = user
+    self.cluster = cluster
 
   def apps(self, filters):
     filter_map = self._get_filter_map(filters)
     limit = filters.get('pagination', {'limit': 25}).get('limit')
-    jobs = query_history.get_query_history(request_user=filter_map.get('effective_user'), start_date=filter_map.get('date'), start_time=filter_map.get('time'), query_id=filter_map.get('query_id'), status=filter_map.get('status'), limit=limit)
+    jobs = query_history.get_query_history(request_user=filter_map.get('effective_user'), start_date=filter_map.get('date'), start_time=filter_map.get('time'), query_id=filter_map.get('query_id'), status=filter_map.get('status'), limit=limit)  # noqa: E501
 
     current_time = current_ms_from_utc()
     apps = {
@@ -123,7 +117,7 @@ class BeeswaxQueryApi(Api):
   def action(self, appid, action):
     message = {'message': '', 'status': 0}
 
-    return message;
+    return message
 
   def logs(self, appid, app_type, log_name=None, is_embeddable=False):
     return {'logs': ''}
@@ -131,12 +125,12 @@ class BeeswaxQueryApi(Api):
   def profile(self, appid, app_type, app_property, app_filters):
     message = {'message': '', 'status': 0}
 
-    return message;
+    return message
 
   def profile_encoded(self, appid):
     message = {'message': '', 'status': 0}
 
-    return message;
+    return message
 
   def _get_status(self, job):
     return 'RUNNING' if len(job[1]) <= 1 else "FINISHED"
@@ -155,18 +149,18 @@ class BeeswaxQueryApi(Api):
     filter_map = {}
     if filters.get("text"):
       filter_names = {
-        'user':'effective_user',
-        'id':'query_id',
-        'name':'state',
-        'type':'stmt_type',
-        'status':'status'
+        'user': 'effective_user',
+        'id': 'query_id',
+        'name': 'state',
+        'type': 'stmt_type',
+        'status': 'status'
       }
 
       def make_lambda(name, value):
         return lambda app: app[name] == value
 
       for key, name in list(filter_names.items()):
-          text_filter = re.search(r"\s*("+key+")\s*:([^ ]+)", filters.get("text"))
+          text_filter = re.search(r"\s*(" + key + r")\s*:([^ ]+)", filters.get("text"))
           if text_filter and text_filter.group(1) == key:
             filter_map[name] = text_filter.group(2).strip()
 
@@ -191,10 +185,10 @@ class BeeswaxQueryApi(Api):
     elif period == 's':
       return float(time) * 1000
     elif period == 'm':
-      return float(time) * 60000 #1000*60
+      return float(time) * 60000  # 1000*60
     elif period == 'h':
-      return float(time) * 3600000 #1000*60*60
+      return float(time) * 3600000  # 1000*60*60
     elif period == 'd':
       return float(time) * 86400000  # 1000*60*60*24
     else:
-      return float(time)
+      return float(time)

+ 13 - 24
apps/jobbrowser/src/jobbrowser/apis/clusters.py

@@ -15,23 +15,16 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import logging
 import sys
-
+import logging
 from datetime import datetime
-from dateutil import parser
 
+from dateutil import parser
 from django.utils import timezone
-
-from notebook.connectors.altus import DataWarehouse2Api
+from django.utils.translation import gettext as _
 
 from jobbrowser.apis.base_api import Api
-
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
-
+from notebook.connectors.altus import DataWarehouse2Api
 
 LOG = logging.getLogger()
 
@@ -45,19 +38,18 @@ class ClusterApi(Api):
     super(ClusterApi, self).__init__(user)
 
     self.version = version
-    self.api = DataWarehouse2Api(self.user) 
-
+    self.api = DataWarehouse2Api(self.user)
 
   def apps(self, filters):
-    #jobs = self.api.list_clusters()
+    # jobs = self.api.list_clusters()
 
     return {
       u'status': 0,
       u'total': 3,
       u'apps': [
-        {u'status': u'ONLINE', u'name': u'Internal EDH', u'submitted': u'2018-10-04 08:34:39.128886', u'queue': u'group', u'user': u'jo0', u'canWrite': False, u'duration': 0, u'progress': u'100 / 100', u'type': u'GKE 100 nodes 100CPU 20TB', u'id': u'crn:altus:engine:k8s:12a0079b-1591-4ca0-b721-a446bda74e67:cluster:jo0/cbf7bbb1-f956-45e4-a269-d239efbc9996', u'apiStatus': u'RUNNING'},
-        {u'status': u'ONLINE', u'name': u'gke_gcp-eng-dsdw_us-west2-b_impala-demo', u'submitted': u'2018-10-04 08:34:39.128881', u'queue': u'group', u'user': u'r0', u'canWrite': False, u'duration': 0, u'progress': u'4 / 4', u'type': u'GKE 4 nodes 16CPU 64GB', u'id': u'crn:altus:engine:k8s:12a0079b-1591-4ca0-b721-a446bda74e67:cluster:r0/0da5e627-ee33-45c5-9179-cc6b95008d2e', u'apiStatus': u'RUNNING'},
-        {u'status': u'ONLINE', u'name': u'DW-fraud', u'submitted': u'2018-10-04 08:34:39.128881', u'queue': u'group', u'user': u'r0', u'canWrite': False, u'duration': 0, u'progress': u'50 / 50', u'type': u'OpenShift 50 nodes 30CPU 2TB', u'id': u'crn:altus:engine:k8s:12a0079b-1591-4ca0-b721-a446bda74e67:cluster:r0/0da5e627-ee33-45c5-9179-cc6b95008d2e', u'apiStatus': u'RUNNING'},
+        {u'status': u'ONLINE', u'name': u'Internal EDH', u'submitted': u'2018-10-04 08:34:39.128886', u'queue': u'group', u'user': u'jo0', u'canWrite': False, u'duration': 0, u'progress': u'100 / 100', u'type': u'GKE 100 nodes 100CPU 20TB', u'id': u'crn:altus:engine:k8s:12a0079b-1591-4ca0-b721-a446bda74e67:cluster:jo0/cbf7bbb1-f956-45e4-a269-d239efbc9996', u'apiStatus': u'RUNNING'},  # noqa: E501
+        {u'status': u'ONLINE', u'name': u'gke_gcp-eng-dsdw_us-west2-b_impala-demo', u'submitted': u'2018-10-04 08:34:39.128881', u'queue': u'group', u'user': u'r0', u'canWrite': False, u'duration': 0, u'progress': u'4 / 4', u'type': u'GKE 4 nodes 16CPU 64GB', u'id': u'crn:altus:engine:k8s:12a0079b-1591-4ca0-b721-a446bda74e67:cluster:r0/0da5e627-ee33-45c5-9179-cc6b95008d2e', u'apiStatus': u'RUNNING'},  # noqa: E501
+        {u'status': u'ONLINE', u'name': u'DW-fraud', u'submitted': u'2018-10-04 08:34:39.128881', u'queue': u'group', u'user': u'r0', u'canWrite': False, u'duration': 0, u'progress': u'50 / 50', u'type': u'OpenShift 50 nodes 30CPU 2TB', u'id': u'crn:altus:engine:k8s:12a0079b-1591-4ca0-b721-a446bda74e67:cluster:r0/0da5e627-ee33-45c5-9179-cc6b95008d2e', u'apiStatus': u'RUNNING'},  # noqa: E501
       ]
     }
 
@@ -71,14 +63,13 @@ class ClusterApi(Api):
         'user': app['clusterName'].split('-', 1)[0],
         'progress': app.get('progress', 100),
         'queue': 'group',
-        'duration': ((datetime.now() - parser.parse(app['creationDate']).replace(tzinfo=None)).seconds * 1000) if app['creationDate'] else 0,
+        'duration': ((datetime.now() - parser.parse(app['creationDate']).replace(tzinfo=None)).seconds * 1000) if app['creationDate'] else 0,  # noqa: E501
         'submitted': app['creationDate'],
         'canWrite': True
       } for app in sorted(jobs['clusters'], key=lambda a: a['creationDate'], reverse=True)],
       'total': len(jobs['clusters'])
     }
 
-
   def app(self, appid):
     handle = self.api.describe_cluster(cluster_id=appid)
 
@@ -116,20 +107,18 @@ class ClusterApi(Api):
         elif result.get('contents') and message.get('status') != -1:
           message['message'] = result.get('contents')
 
-    return message;
-
+    return message
 
   def logs(self, appid, app_type, log_name=None, is_embeddable=False):
     return {'logs': ''}
 
-
   def profile(self, appid, app_type, app_property):
     return {}
 
   def _api_status(self, status):
-    if status in ['CREATING', 'CREATED', 'ONLINE', 'SCALING_UP', 'SCALING_DOWN', 'STARTING']: # ONLINE ... are from K8s
+    if status in ['CREATING', 'CREATED', 'ONLINE', 'SCALING_UP', 'SCALING_DOWN', 'STARTING']:  # ONLINE ... are from K8s
       return 'RUNNING'
     elif status in ['ARCHIVING', 'COMPLETED', 'TERMINATING', 'STOPPED']:
       return 'SUCCEEDED'
     else:
-      return 'FAILED' # KILLED and FAILED
+      return 'FAILED'  # KILLED and FAILED

+ 7 - 19
apps/jobbrowser/src/jobbrowser/apis/data_eng_api.py

@@ -15,19 +15,14 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import logging
 import sys
+import logging
+from datetime import datetime, timedelta
 
-from datetime import datetime,  timedelta
-
-from notebook.connectors.altus import DataEngApi, DATE_FORMAT
+from django.utils.translation import gettext as _
 
 from jobbrowser.apis.base_api import Api
-
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
+from notebook.connectors.altus import DATE_FORMAT, DataEngApi
 
 LOG = logging.getLogger()
 
@@ -59,11 +54,9 @@ class DataEngClusterApi(Api):
       'total': len(jobs)
     }
 
-
   def app(self, appid):
     return {}
 
-
   def action(self, appid, action):
     message = {'message': '', 'status': 0}
 
@@ -78,13 +71,11 @@ class DataEngClusterApi(Api):
         elif result.get('contents') and message.get('status') != -1:
           message['message'] = result.get('contents')
 
-    return message;
-
+    return message
 
   def logs(self, appid, app_type, log_name=None, is_embeddable=False):
     return {'logs': ''}
 
-
   def profile(self, appid, app_type, app_property):
     return {}
 
@@ -94,7 +85,7 @@ class DataEngClusterApi(Api):
     elif status in ['ARCHIVING', 'COMPLETED', 'TERMINATING']:
       return 'SUCCEEDED'
     else:
-      return 'FAILED' # KILLED and FAILED
+      return 'FAILED'  # KILLED and FAILED
 
 
 class DataEngJobApi(Api):
@@ -155,15 +146,12 @@ class DataEngJobApi(Api):
 
     return common
 
-
   def action(self, appid, action):
     return {}
 
-
   def logs(self, appid, app_type, log_name=None, is_embeddable=False):
     return {'logs': ''}
 
-
   def profile(self, appid, app_type, app_property):
     return {}
 
@@ -173,4 +161,4 @@ class DataEngJobApi(Api):
     elif status in ['COMPLETED']:
       return 'SUCCEEDED'
     else:
-      return 'FAILED' # INTERRUPTED , KILLED, TERMINATED and FAILED
+      return 'FAILED'  # INTERRUPTED , KILLED, TERMINATED and FAILED

+ 9 - 20
apps/jobbrowser/src/jobbrowser/apis/data_warehouse.py

@@ -15,23 +15,16 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import logging
 import sys
-
+import logging
 from datetime import datetime
-from dateutil import parser
 
+from dateutil import parser
 from django.utils import timezone
-
-from notebook.connectors.altus import AnalyticDbApi, DataWarehouse2Api
+from django.utils.translation import gettext as _
 
 from jobbrowser.apis.base_api import Api
-
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
-
+from notebook.connectors.altus import AnalyticDbApi, DataWarehouse2Api
 
 LOG = logging.getLogger()
 
@@ -45,8 +38,7 @@ class DataWarehouseClusterApi(Api):
     super(DataWarehouseClusterApi, self).__init__(user)
 
     self.version = version
-    self.api = DataWarehouse2Api(self.user) if version == 2 else AnalyticDbApi(self.user) 
-
+    self.api = DataWarehouse2Api(self.user) if version == 2 else AnalyticDbApi(self.user)
 
   def apps(self, filters):
     jobs = self.api.list_clusters()
@@ -57,18 +49,17 @@ class DataWarehouseClusterApi(Api):
         'name': '%(clusterName)s' % app,
         'status': app['status'],
         'apiStatus': self._api_status(app['status']),
-        'type': '%(instanceType)s' % app, #'Altus %(workersGroupSize)sX %(instanceType)s %(cdhVersion)s' % app,
+        'type': '%(instanceType)s' % app,  # 'Altus %(workersGroupSize)sX %(instanceType)s %(cdhVersion)s' % app,
         'user': app['clusterName'].split('-', 1)[0],
         'progress': app.get('progress', 100),
         'queue': 'group',
-        'duration': ((datetime.now() - parser.parse(app['creationDate']).replace(tzinfo=None)).seconds * 1000) if app['creationDate'] else 0,
+        'duration': ((datetime.now() - parser.parse(app['creationDate']).replace(tzinfo=None)).seconds * 1000) if app['creationDate'] else 0,  # noqa: E501
         'submitted': app['creationDate'],
         'canWrite': True
       } for app in sorted(jobs['clusters'], key=lambda a: a['creationDate'], reverse=True)],
       'total': len(jobs['clusters'])
     }
 
-
   def app(self, appid):
     handle = self.api.describe_cluster(cluster_id=appid)
 
@@ -104,13 +95,11 @@ class DataWarehouseClusterApi(Api):
         elif result.get('contents') and message.get('status') != -1:
           message['message'] = result.get('contents')
 
-    return message;
-
+    return message
 
   def logs(self, appid, app_type, log_name=None, is_embeddable=False):
     return {'logs': ''}
 
-
   def profile(self, app_id, app_type, app_property, app_filters):
     return {}
 
@@ -122,4 +111,4 @@ class DataWarehouseClusterApi(Api):
     elif status in ['ARCHIVING', 'COMPLETED', 'TERMINATING', 'TERMINATED']:
       return 'SUCCEEDED'
     else:
-      return 'FAILED' # KILLED and FAILED
+      return 'FAILED'  # KILLED and FAILED

+ 6 - 16
apps/jobbrowser/src/jobbrowser/apis/history.py

@@ -15,24 +15,18 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import logging
 import sys
-
+import logging
 from datetime import datetime
+
 from dateutil import parser
+from django.utils.translation import gettext as _
 
 from desktop.models import Document2
-from notebook.api import _get_statement
-from notebook.models import Notebook
-
 from jobbrowser.apis.base_api import Api
 from jobbrowser.conf import MAX_JOB_FETCH
-
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
-
+from notebook.api import _get_statement
+from notebook.models import Notebook
 
 LOG = logging.getLogger()
 
@@ -46,7 +40,7 @@ class HistoryApi(Api):
     for app in tasks:
       # Copied, Document class should have a get_history method (via method or inheritance)
       notebook = Notebook(document=app).get_data()
-      is_notification_manager = False # Supposed SQL Editor query only right now
+      is_notification_manager = False  # Supposed SQL Editor query only right now
       if 'snippets' in notebook:
         statement = notebook['description'] if is_notification_manager else _get_statement(notebook)
         history = {
@@ -86,7 +80,6 @@ class HistoryApi(Api):
       'total': len(tasks)
     }
 
-
   def app(self, appid):
     appid = appid.rsplit('-')[-1]
 
@@ -108,7 +101,6 @@ class HistoryApi(Api):
       }
     }
 
-
   def action(self, app_ids, operation):
     # Notebook API
     pass
@@ -116,13 +108,11 @@ class HistoryApi(Api):
   def logs(self, appid, app_type, log_name=None, is_embeddable=False):
     return {'logs': ''}
 
-
   def profile(self, appid, app_type, app_property, app_filters):
     appid = appid.rsplit('-')[-1]
 
     return {}
 
-
   def _api_status(self, task):
     if task['data']['status'] in ('expired', 'failed'):
       return 'FAILED'

+ 6 - 12
apps/jobbrowser/src/jobbrowser/apis/hive_query_api.py

@@ -14,31 +14,25 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-from builtins import filter
-
+import sys
 import logging
+from builtins import filter
+from datetime import datetime
 from logging import exception
-import sys
 
-from datetime import datetime
+from django.utils.translation import gettext as _
 
 from beeswax.models import QueryHistory
 from desktop.lib.exceptions_renderable import PopupException
 from desktop.lib.python_util import current_ms_from_utc
 from desktop.lib.rest.http_client import HttpClient
 from desktop.lib.rest.resource import Resource
-from notebook.models import _get_notebook_api, make_notebook, MockRequest
-
 from jobbrowser.apis.base_api import Api
-
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
-
+from notebook.models import MockRequest, _get_notebook_api, make_notebook
 
 LOG = logging.getLogger()
 
+
 class HiveQueryApi(Api):
   HEADERS = {'X-Requested-By': 'das'}
 

+ 40 - 63
apps/jobbrowser/src/jobbrowser/apis/job_api.py

@@ -15,31 +15,26 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import sys
 import json
 import logging
-import sys
 
 from django.utils.encoding import smart_str
-from hadoop.yarn import resource_manager_api
+from django.utils.translation import gettext as _
 
 from desktop.lib.django_util import JsonResponse
 from desktop.lib.exceptions import MessageException
 from desktop.lib.exceptions_renderable import PopupException
-from jobbrowser.conf import MAX_JOB_FETCH, LOG_OFFSET
+from hadoop.yarn import resource_manager_api
+from jobbrowser.conf import LOG_OFFSET, MAX_JOB_FETCH
 from jobbrowser.views import job_executor_logs, job_single_logs
 
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
-
-
 LOG = logging.getLogger()
 LOG_OFFSET_BYTES = LOG_OFFSET.get()
 
 
 try:
-  from jobbrowser.api import YarnApi as NativeYarnApi, ApplicationNotRunning, JobExpired
+  from jobbrowser.api import ApplicationNotRunning, JobExpired, YarnApi as NativeYarnApi
   from jobbrowser.apis.base_api import Api, MockDjangoRequest, _extract_query_params
   from jobbrowser.views import job_attempt_logs_json, kill_job, massage_job_for_json
   from jobbrowser.yarn_models import Application
@@ -71,7 +66,7 @@ class JobApi(Api):
 
   def _get_api(self, appid):
     try:
-      if type(appid) == list:
+      if type(appid) is list:
         return self.yarn_api
       elif appid.startswith('task_'):
         return YarnMapReduceTaskApi(self.user, appid)
@@ -82,8 +77,8 @@ class JobApi(Api):
       elif appid.find('_executor_') > 0:
         return SparkExecutorApi(self.user, appid)
       else:
-        return self.yarn_api # application_
-    except:
+        return self.yarn_api  # application_
+    except Exception:
       raise PopupException("Job would have failed due to which there no attempt or appattempt information available")
 
   def _set_request(self, request):
@@ -134,7 +129,6 @@ class YarnApi(Api):
       'total': len(apps)
     }
 
-
   def app(self, appid):
     try:
       job = NativeYarnApi(self.user).get_job(jobid=appid)
@@ -151,7 +145,6 @@ class YarnApi(Api):
       LOG.exception(msg % appid)
       raise PopupException(_(msg) % appid, detail=e)
 
-
     app = massage_job_for_json(job, user=self.user)
 
     common = {
@@ -211,7 +204,6 @@ class YarnApi(Api):
 
     return common
 
-
   def action(self, operation, app_ids):
     if operation['action'] == 'kill':
       kills = []
@@ -226,7 +218,6 @@ class YarnApi(Api):
     else:
       return {}
 
-
   def logs(self, appid, app_type, log_name, is_embeddable=False):
     logs = ''
     logs_list = []
@@ -238,7 +229,7 @@ class YarnApi(Api):
           logs = parseResponse.get('logs')
           logs_list = parseResponse.get('logsList')
           if logs and len(logs) == 4:
-            if app_type == 'YarnV2' and logs[0]: #logs[0] is diagnostics
+            if app_type == 'YarnV2' and logs[0]:  # logs[0] is diagnostics
               logs = logs[0]
             else:
               logs = logs[1]
@@ -254,7 +245,6 @@ class YarnApi(Api):
       LOG.warning('No task attempt found for logs: %s' % smart_str(e))
     return {'logs': logs, 'logsList': logs_list}
 
-
   def profile(self, appid, app_type, app_property, app_filters):
     if app_type == 'MAPREDUCE':
       if app_property == 'tasks':
@@ -286,7 +276,8 @@ class YarnApi(Api):
     elif status == 'SUCCEEDED':
       return 'SUCCEEDED'
     else:
-      return 'FAILED' # FAILED, KILLED
+      return 'FAILED'  # FAILED, KILLED
+
 
 class YarnAttemptApi(Api):
 
@@ -297,7 +288,6 @@ class YarnAttemptApi(Api):
     self.task_id = '_'.join(app_id.replace(start, 'task_').split('_')[:5])
     self.attempt_id = app_id.split('_')[3]
 
-
   def apps(self):
     attempts = NativeYarnApi(self.user).get_task(jobid=self.app_id, task_id=self.task_id).attempts
 
@@ -306,7 +296,6 @@ class YarnAttemptApi(Api):
       'total': len(attempts)
     }
 
-
   def app(self, appid):
     task = NativeYarnApi(self.user).get_task(jobid=self.app_id, task_id=self.task_id).get_attempt(self.attempt_id)
 
@@ -319,7 +308,6 @@ class YarnAttemptApi(Api):
 
     return common
 
-
   def logs(self, appid, app_type, log_name, is_embeddable=False):
     if log_name == 'default':
       log_name = 'stdout'
@@ -329,46 +317,44 @@ class YarnAttemptApi(Api):
 
     return {'progress': 0, 'logs': syslog if log_name == 'syslog' else stderr if log_name == 'stderr' else stdout}
 
-
   def profile(self, appid, app_type, app_property, app_filters):
     if app_property == 'counters':
       return NativeYarnApi(self.user).get_task(jobid=self.app_id, task_id=self.task_id).get_attempt(self.attempt_id).counters
 
     return {}
 
-
   def _massage_task(self, task):
     return {
-        #"elapsedMergeTime" : task.elapsedMergeTime,
-        #"shuffleFinishTime" : task.shuffleFinishTime,
+        # "elapsedMergeTime" : task.elapsedMergeTime,
+        # "shuffleFinishTime" : task.shuffleFinishTime,
         'id': task.appAttemptId if hasattr(task, 'appAttemptId') else '',
         'appAttemptId': task.appAttemptId if hasattr(task, 'appAttemptId') else '',
         'blacklistedNodes': task.blacklistedNodes if hasattr(task, 'blacklistedNodes') else '',
-        'containerId' : task.containerId if hasattr(task, 'containerId') else '',
+        'containerId': task.containerId if hasattr(task, 'containerId') else '',
         'diagnostics': task.diagnostics if hasattr(task, 'diagnostics') else '',
-        "startTimeFormatted" : task.startTimeFormatted if hasattr(task, 'startTimeFormatted') else '',
-        "startTime" : int(task.startTime) if hasattr(task, 'startTime') else '',
-        "finishTime" : int(task.finishedTime) if hasattr(task, 'finishedTime') else '',
-        "finishTimeFormatted" : task.finishTimeFormatted if hasattr(task, 'finishTimeFormatted') else '',
-        "type" : task.type + '_ATTEMPT' if hasattr(task, 'type') else '',
+        "startTimeFormatted": task.startTimeFormatted if hasattr(task, 'startTimeFormatted') else '',
+        "startTime": int(task.startTime) if hasattr(task, 'startTime') else '',
+        "finishTime": int(task.finishedTime) if hasattr(task, 'finishedTime') else '',
+        "finishTimeFormatted": task.finishTimeFormatted if hasattr(task, 'finishTimeFormatted') else '',
+        "type": task.type + '_ATTEMPT' if hasattr(task, 'type') else '',
         'nodesBlacklistedBySystem': task.nodesBlacklistedBySystem if hasattr(task, 'nodesBlacklistedBySystem') else '',
         'nodeId': task.nodeId if hasattr(task, 'nodeId') else '',
         'nodeHttpAddress': task.nodeHttpAddress if hasattr(task, 'nodeHttpAddress') else '',
         'logsLink': task.logsLink if hasattr(task, 'logsLink') else '',
         "app_id": self.app_id,
         "task_id": self.task_id,
-        'duration' : task.duration if hasattr(task, 'duration') else '',
-        'durationFormatted' : task.duration if hasattr(task, 'durationFormatted') else '',
+        'duration': task.duration if hasattr(task, 'duration') else '',
+        'durationFormatted': task.duration if hasattr(task, 'durationFormatted') else '',
         'state': task.status if hasattr(task, 'status') else ''
     }
 
+
 class YarnMapReduceTaskApi(Api):
 
   def __init__(self, user, app_id):
     Api.__init__(self, user)
     self.app_id = '_'.join(app_id.replace('task_', 'application_').split('_')[:3])
 
-
   def apps(self, filters):
     filter_params = {
       'task_types': None,
@@ -403,7 +389,6 @@ class YarnMapReduceTaskApi(Api):
       'total': len(tasks)
     }
 
-
   def app(self, appid):
     task = NativeYarnApi(self.user).get_task(jobid=self.app_id, task_id=appid)
 
@@ -417,7 +402,6 @@ class YarnMapReduceTaskApi(Api):
 
     return common
 
-
   def logs(self, appid, app_type, log_name, is_embeddable=False):
     if log_name == 'default':
       log_name = 'stdout'
@@ -430,7 +414,6 @@ class YarnMapReduceTaskApi(Api):
       logs = ''
     return {'progress': 0, 'logs': logs}
 
-
   def profile(self, appid, app_type, app_property, app_filters):
     if app_property == 'attempts':
       return {
@@ -461,7 +444,7 @@ class YarnMapReduceTaskApi(Api):
     elif status == 'SUCCEEDED':
       return 'SUCCEEDED'
     else:
-      return 'FAILED' # FAILED, KILLED
+      return 'FAILED'  # FAILED, KILLED
 
 
 class YarnMapReduceTaskAttemptApi(Api):
@@ -473,7 +456,6 @@ class YarnMapReduceTaskAttemptApi(Api):
     self.task_id = '_'.join(app_id.replace(start, 'task_').split('_')[:5])
     self.attempt_id = app_id
 
-
   def apps(self):
     attempts = NativeYarnApi(self.user).get_task(jobid=self.app_id, task_id=self.task_id).attempts
 
@@ -482,7 +464,6 @@ class YarnMapReduceTaskAttemptApi(Api):
       'total': len(attempts)
     }
 
-
   def app(self, appid):
     task = NativeYarnApi(self.user).get_task(jobid=self.app_id, task_id=self.task_id).get_attempt(self.attempt_id)
 
@@ -495,7 +476,6 @@ class YarnMapReduceTaskAttemptApi(Api):
 
     return common
 
-
   def logs(self, appid, app_type, log_name, is_embeddable=False):
     if log_name == 'default':
       log_name = 'stdout'
@@ -505,43 +485,40 @@ class YarnMapReduceTaskAttemptApi(Api):
 
     return {'progress': 0, 'logs': syslog if log_name == 'syslog' else stderr if log_name == 'stderr' else stdout}
 
-
   def profile(self, appid, app_type, app_property, app_filters):
     if app_property == 'counters':
       return NativeYarnApi(self.user).get_task(jobid=self.app_id, task_id=self.task_id).get_attempt(self.attempt_id).counters
 
     return {}
 
-
   def _api_status(self, status):
     if status in ['NEW', 'SUBMITTED', 'ACCEPTED', 'RUNNING']:
       return 'RUNNING'
     elif status == 'SUCCEEDED':
       return 'SUCCEEDED'
     else:
-      return 'FAILED' # FAILED, KILLED
-
+      return 'FAILED'  # FAILED, KILLED
 
   def _massage_task(self, task):
     return {
-        #"elapsedMergeTime" : task.elapsedMergeTime,
-        #"shuffleFinishTime" : task.shuffleFinishTime,
-        "assignedContainerId" : task.assignedContainerId if hasattr(task, 'assignedContainerId') else task.amContainerId if hasattr(task, 'amContainerId') else '',
-        "progress" : task.progress if hasattr(task, 'progress') else '',
-        "elapsedTime" : task.elapsedTime if hasattr(task, 'elapsedTime') else '',
-        "state" : task.state if hasattr(task, 'state') else task.appAttemptState if hasattr(task, 'appAttemptState') else '',
-        #"elapsedShuffleTime" : task.elapsedShuffleTime,
-        #"mergeFinishTime" : task.mergeFinishTime,
-        "rack" : task.rack if hasattr(task, 'rack') else '',
-        #"elapsedReduceTime" : task.elapsedReduceTime,
-        "nodeHttpAddress" : task.nodeHttpAddress if hasattr(task, 'nodeHttpAddress') else '',
-        "type" : task.type + '_ATTEMPT' if hasattr(task, 'type') else '',
-        "startTime" : task.startTime if hasattr(task, 'startTime') else '',
-        "id" : task.id if hasattr(task, 'id') else task.appAttemptId if hasattr(task, 'appAttemptId') else '',
-        "finishTime" : task.finishTime if hasattr(task, 'finishTime') else int(task.finishedTime) if hasattr(task, 'finishedTime') else '',
+        # "elapsedMergeTime" : task.elapsedMergeTime,
+        # "shuffleFinishTime" : task.shuffleFinishTime,
+        "assignedContainerId": task.assignedContainerId if hasattr(task, 'assignedContainerId') else task.amContainerId if hasattr(task, 'amContainerId') else '',  # noqa: E501
+        "progress": task.progress if hasattr(task, 'progress') else '',
+        "elapsedTime": task.elapsedTime if hasattr(task, 'elapsedTime') else '',
+        "state": task.state if hasattr(task, 'state') else task.appAttemptState if hasattr(task, 'appAttemptState') else '',
+        # "elapsedShuffleTime" : task.elapsedShuffleTime,
+        # "mergeFinishTime" : task.mergeFinishTime,
+        "rack": task.rack if hasattr(task, 'rack') else '',
+        # "elapsedReduceTime" : task.elapsedReduceTime,
+        "nodeHttpAddress": task.nodeHttpAddress if hasattr(task, 'nodeHttpAddress') else '',
+        "type": task.type + '_ATTEMPT' if hasattr(task, 'type') else '',
+        "startTime": task.startTime if hasattr(task, 'startTime') else '',
+        "id": task.id if hasattr(task, 'id') else task.appAttemptId if hasattr(task, 'appAttemptId') else '',
+        "finishTime": task.finishTime if hasattr(task, 'finishTime') else int(task.finishedTime) if hasattr(task, 'finishedTime') else '',
         "app_id": self.app_id,
         "task_id": self.task_id,
-        'apiStatus': self._api_status(task.state) if hasattr(task, 'state') else self._api_status(task.appAttemptState) if hasattr(task, 'appAttemptState') else '',
+        'apiStatus': self._api_status(task.state) if hasattr(task, 'state') else self._api_status(task.appAttemptState) if hasattr(task, 'appAttemptState') else '',  # noqa: E501
         'host': task.host if hasattr(task, 'host') else '',
         'rpcPort': task.rpcPort if hasattr(task, 'rpcPort') else '',
         'diagnosticsInfo': task.diagnosticsInfo if hasattr(task, 'diagnosticsInfo') else ''

+ 5 - 17
apps/jobbrowser/src/jobbrowser/apis/livy_api.py

@@ -15,17 +15,13 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import logging
 import sys
+import logging
 
-from spark.livy_client import get_api
+from django.utils.translation import gettext as _
 
 from jobbrowser.apis.base_api import Api
-
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
+from spark.livy_client import get_api
 
 LOG = logging.getLogger()
 
@@ -53,7 +49,6 @@ class LivySessionsApi(Api):
       'total': jobs['total']
     }
 
-
   def app(self, appid):
     appid = appid.rsplit('-')[-1]
     api = get_api(self.user)
@@ -76,15 +71,12 @@ class LivySessionsApi(Api):
       }
     }
 
-
   def action(self, appid, action):
     return {}
 
-
   def logs(self, appid, app_type, log_name=None, is_embeddable=False):
     return {'logs': ''}
 
-
   def profile(self, appid, app_type, app_property, app_filters):
     appid = appid.rsplit('-')[-1]
 
@@ -95,14 +87,13 @@ class LivySessionsApi(Api):
     else:
       return {}
 
-
   def _api_status(self, status):
     if status in ['CREATING', 'CREATED', 'TERMINATING']:
       return 'RUNNING'
     elif status in ['ARCHIVING', 'COMPLETED']:
       return 'SUCCEEDED'
     else:
-      return 'FAILED' # KILLED and FAILED
+      return 'FAILED'  # KILLED and FAILED
 
 
 class LivyJobApi(Api):
@@ -151,15 +142,12 @@ class LivyJobApi(Api):
 
     return common
 
-
   def action(self, appid, action):
     return {}
 
-
   def logs(self, appid, app_type, log_name=None, is_embeddable=False):
     return {'logs': ''}
 
-
   def profile(self, appid, app_type, app_property):
     return {}
 
@@ -169,4 +157,4 @@ class LivyJobApi(Api):
     elif status in ['COMPLETED']:
       return 'SUCCEEDED'
     else:
-      return 'FAILED' # INTERRUPTED , KILLED, TERMINATED and FAILED
+      return 'FAILED'  # INTERRUPTED , KILLED, TERMINATED and FAILED

+ 27 - 30
apps/jobbrowser/src/jobbrowser/apis/query_api.py

@@ -15,38 +15,32 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from builtins import filter
-from builtins import range
-import itertools
-import logging
+import os
 import re
 import sys
 import time
+import logging
+import itertools
+from builtins import filter, range
 from datetime import datetime
+from urllib.parse import urlparse
+
 import pytz
 from babel import localtime
-import os
-
-from urllib.parse import urlparse
+from django.utils.translation import gettext as _
 
 from desktop.lib import export_csvxls
 from impala.conf import COORDINATOR_UI_SPNEGO
+from jobbrowser.apis.base_api import Api
 from libanalyze import analyze as analyzer, rules
 from notebook.conf import ENABLE_QUERY_ANALYSIS
 
-from jobbrowser.apis.base_api import Api
-
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
-
-ANALYZER = rules.TopDownAnalysis() # We need to parse some files so save as global
+ANALYZER = rules.TopDownAnalysis()  # We need to parse some files so save as global
 LOG = logging.getLogger()
 
 try:
-  from beeswax.models import Session, Compute
-  from impala.server import get_api as get_impalad_api, _get_impala_server_url
+  from beeswax.models import Compute, Session
+  from impala.server import _get_impala_server_url, get_api as get_impalad_api
 except ImportError as e:
   LOG.exception('Some application are not enabled: %s' % e)
 
@@ -65,7 +59,8 @@ def _get_api(user, cluster=None):
     server_url = _get_impala_server_url(session)
   return get_impalad_api(user=user, url=server_url)
 
-def _convert_to_6_digit_ms_local_time(start_time):  
+
+def _convert_to_6_digit_ms_local_time(start_time):
   if '.' in start_time:
     time, microseconds = start_time.split('.')
     if len(microseconds) > 6:
@@ -73,7 +68,7 @@ def _convert_to_6_digit_ms_local_time(start_time):
     start_time = '.'.join([time, microseconds])
   else:
     start_time = f'{start_time}.000000'
-  
+
   local_tz = pytz.timezone(os.environ.get('TZ', 'UTC'))
   # Convert to datetime object in UTC, convert to provided timezone, and then format back into a string
   return (datetime.strptime(start_time, "%Y-%m-%d %H:%M:%S.%f")
@@ -81,6 +76,7 @@ def _convert_to_6_digit_ms_local_time(start_time):
           .astimezone(local_tz)
           .strftime("%Y-%m-%d %H:%M:%S.%f"))
 
+
 class QueryApi(Api):
 
   def __init__(self, user, impala_api=None, cluster=None):
@@ -96,7 +92,7 @@ class QueryApi(Api):
 
     filter_list = self._get_filter_list(filters)
     jobs_iter = itertools.chain(jobs['in_flight_queries'], jobs['completed_queries'])
-    jobs_iter_filtered = self._n_filter(filter_list, jobs_iter)    
+    jobs_iter_filtered = self._n_filter(filter_list, jobs_iter)
 
     apps = {
       'apps': sorted([{
@@ -130,8 +126,8 @@ class QueryApi(Api):
   def _time_in_ms_groups(self, groups):
     time = 0
     for x in range(0, len(groups), 3):
-      if groups[x+1]:
-        time += self._time_in_ms(groups[x+1], groups[x+2])
+      if groups[x + 1]:
+        time += self._time_in_ms(groups[x + 1], groups[x + 2])
     return time
 
   def _time_in_ms(self, time, period):
@@ -142,9 +138,9 @@ class QueryApi(Api):
     elif period == 's':
       return float(time) * 1000
     elif period == 'm':
-      return float(time) * 60000 #1000*60
+      return float(time) * 60000  # 1000*60
     elif period == 'h':
-      return float(time) * 3600000 #1000*60*60
+      return float(time) * 3600000  # 1000*60*60
     elif period == 'd':
       return float(time) * 86400000  # 1000*60*60*24
     else:
@@ -164,8 +160,8 @@ class QueryApi(Api):
     parsed_api_url = urlparse(self.api.url)
 
     app.update({
-      'progress': float(progress_groups.group(1)) \
-          if progress_groups and progress_groups.group(1) else 100 \
+      'progress': float(progress_groups.group(1))
+          if progress_groups and progress_groups.group(1) else 100
             if self._api_status(app.get('status')) in ['SUCCEEDED', 'FAILED'] else 1,
       'type': 'queries',
       'doc_url': '%s/query_plan?query_id=%s' % (self.api.url, appid) if not COORDINATOR_UI_SPNEGO.get() else
@@ -195,7 +191,7 @@ class QueryApi(Api):
         elif result.get('contents') and message.get('status') != -1:
           message['message'] = result.get('contents')
 
-    return message;
+    return message
 
   def logs(self, appid, app_type, log_name=None, is_embeddable=False):
     return {'logs': ''}
@@ -214,12 +210,11 @@ class QueryApi(Api):
     else:
       return self._query(appid)
 
-
   def profile_encoded(self, appid):
     return self.api.get_query_profile_encoded(query_id=appid)
 
   def _memory(self, appid, app_type, app_property, app_filters):
-    return self.api.get_query_memory(query_id=appid);
+    return self.api.get_query_memory(query_id=appid)
 
   def _metrics(self, appid):
     query_profile = self.api.get_query_profile_encoded(appid)
@@ -253,6 +248,7 @@ class QueryApi(Api):
           return {'svg': 'hi-random'}
         else:
           return {'svg': 'hi-exchange'}
+
       def get_sigma_icon(o):
         if re.search(r'streaming', o['label_detail'], re.IGNORECASE):
           return {'svg': 'hi-sigma'}
@@ -275,6 +271,7 @@ class QueryApi(Api):
         'ANALYTIC': {'type': 'SINGULAR', 'icon': {'svg': 'hi-timeline'}},
         'UNION': {'type': 'UNION', 'icon': {'svg': 'hi-merge'}}
       }
+
       def process(node, mapping=mapping):
         node['id'], node['name'] = node['label'].split(':')
         details = mapping.get(node['name'])
@@ -335,7 +332,7 @@ class QueryApi(Api):
         return lambda app: app[name] == value
 
       for key, name in list(filter_names.items()):
-        text_filter = re.search(r"\s*("+key+")\s*:([^ ]+)", filters.get("text"))
+        text_filter = re.search(r"\s*(" + key + r")\s*:([^ ]+)", filters.get("text"))
         if text_filter and text_filter.group(1) == key:
           filter_list.append(make_lambda(name, text_filter.group(2).strip()))
     if filters.get("time"):

+ 9 - 15
apps/jobbrowser/src/jobbrowser/apis/query_api_tests.py

@@ -16,30 +16,24 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import json
-import logging
 import os
-import pytest
 import sys
+import json
+import logging
+from unittest.mock import Mock, patch
 
+import pytest
 from django.urls import reverse
 
 from desktop.auth.backend import rewrite_user
 from desktop.lib.django_test_util import make_logged_in_client
 from impala.conf import COORDINATOR_UI_SPNEGO
+from jobbrowser.apis.query_api import QueryApi, _convert_to_6_digit_ms_local_time
 from useradmin.models import User
 
-from jobbrowser.apis.query_api import QueryApi
-from jobbrowser.apis.query_api import _convert_to_6_digit_ms_local_time
-
-if sys.version_info[0] > 2:
-  from unittest.mock import patch, Mock
-else:
-  from mock import patch, Mock
-
-
 LOG = logging.getLogger()
 
+
 class TestConvertTo6DigitMsLocalTime():
   @patch.dict(os.environ, {'TZ': 'America/New_York'})
   def convert_6_digit(self):
@@ -60,7 +54,7 @@ class TestConvertTo6DigitMsLocalTime():
     expected_time = "2023-07-14 08:00:00.123000"
 
     assert expected_time == converted_time
-      
+
   @patch.dict(os.environ, {'TZ': 'America/New_York'})
   def convert_9_digit(self):
     start_time = "2023-07-14 12:00:00.123456789"
@@ -79,7 +73,8 @@ class TestConvertTo6DigitMsLocalTime():
     # America/New_York timezone is UTC-4
     expected_time = "2023-07-14 08:00:00.000000"
 
-    assert expected_time == converted_time    
+    assert expected_time == converted_time
+
 
 @pytest.mark.django_db
 class TestApi():
@@ -88,7 +83,6 @@ class TestApi():
     self.client = make_logged_in_client(username="test", groupname="default", recreate=True, is_superuser=False)
     self.user = rewrite_user(User.objects.get(username="test"))
 
-
   def test_download_profile(self):
     with patch('jobbrowser.apis.query_api._get_api') as _get_api:
       with patch('jobbrowser.apis.query_api.QueryApi._query_profile') as _query_profile:

+ 13 - 22
apps/jobbrowser/src/jobbrowser/apis/schedule_api.py

@@ -15,29 +15,24 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from builtins import object
-import logging
-import json
 import sys
+import json
+import logging
+from builtins import object
 
-from liboozie.oozie_api import get_oozie
-from liboozie.utils import format_time
+from django.utils.translation import gettext as _
 
 from jobbrowser.apis.base_api import Api, MockDjangoRequest
-from jobbrowser.apis.workflow_api import _manage_oozie_job, _filter_oozie_jobs
-
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
-
+from jobbrowser.apis.workflow_api import _filter_oozie_jobs, _manage_oozie_job
+from liboozie.oozie_api import get_oozie
+from liboozie.utils import format_time
 
 LOG = logging.getLogger()
 
 
 try:
   from oozie.conf import OOZIE_JOBS_COUNT
-  from oozie.views.dashboard import list_oozie_coordinator, get_oozie_job_log, massaged_oozie_jobs_for_json, has_job_edition_permission
+  from oozie.views.dashboard import get_oozie_job_log, has_job_edition_permission, list_oozie_coordinator, massaged_oozie_jobs_for_json
 except Exception as e:
   LOG.warning('Some application are not enabled: %s' % e)
 
@@ -71,7 +66,6 @@ class ScheduleApi(Api):
       'total': jobs.total
     }
 
-
   def app(self, appid, offset=1, filters={}):
     oozie_api = get_oozie(self.user)
     coordinator = oozie_api.get_coordinator(jobid=appid)
@@ -79,14 +73,14 @@ class ScheduleApi(Api):
     mock_get = MockGet()
     mock_get.update('offset', offset)
 
-    """ 
+    """
       The Oozie job api supports one or more "status" parameters. The valid status values are:
-      
+
       WAITING, READY, SUBMITTED, RUNNING, SUSPENDED, TIMEDOUT, SUCCEEDED, KILLED, FAILED, IGNORED, SKIPPED
-      
+
       The job browser UI has a generic filter mechanism that is re-used across all different type of jobs, that
       parameter is called "states" and it only has three possible values: completed, running or failed
-      
+
       Here we adapt this to fit the API requirements, "state" becomes "status" and the values are translated
       based on how it's been done historically (for instance list_oozie_coordinator.mako around line 725).
     """
@@ -125,18 +119,15 @@ class ScheduleApi(Api):
 
     return common
 
-
   def action(self, app_ids, action):
     return _manage_oozie_job(self.user, action, app_ids)
 
-
   def logs(self, appid, app_type, log_name=None, is_embeddable=False):
     request = MockDjangoRequest(self.user)
     data = get_oozie_job_log(request, job_id=appid)
 
     return {'logs': json.loads(data.content)['log']}
 
-
   def profile(self, appid, app_type, app_property, app_filters):
     if app_property == 'xml':
       oozie_api = get_oozie(self.user)
@@ -199,7 +190,7 @@ class MockGet(object):
 
   @property
   def properties(self):
-    if self._prop == None:
+    if self._prop is None:
       self._prop = {}
     return self._prop
 

+ 3 - 14
apps/jobbrowser/src/jobbrowser/apis/schedule_hive.py

@@ -15,21 +15,16 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import logging
 import sys
-
+import logging
 from datetime import datetime
 
 from dateutil import parser
-from desktop.lib.scheduler.lib.hive import HiveSchedulerApi
+from django.utils.translation import gettext as _
 
+from desktop.lib.scheduler.lib.hive import HiveSchedulerApi
 from jobbrowser.apis.base_api import Api
 
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
-
 LOG = logging.getLogger()
 
 
@@ -58,7 +53,6 @@ class HiveScheduleApi(Api):
       'total': len(tasks)
     }
 
-
   def app(self, appid):
     appid = appid.rsplit('-')[-1]
     api = HiveSchedulerApi(user=self.user)
@@ -83,7 +77,6 @@ class HiveScheduleApi(Api):
         }
     }
 
-
   def action(self, app_ids, operation):
     api = HiveSchedulerApi(user=self.user)
 
@@ -103,11 +96,9 @@ class HiveScheduleApi(Api):
         'message': _('%s signal sent to %s') % (operation['action'], operations)
     }
 
-
   def logs(self, appid, app_type, log_name=None, is_embeddable=False):
     return {'logs': ''}
 
-
   def profile(self, appid, app_type, app_property, app_filters):
     appid = appid.rsplit('-')[-1]
 
@@ -124,13 +115,11 @@ class HiveScheduleApi(Api):
     else:
       return {}
 
-
   def _api_status(self, status):
     if status == 'RUNNING':
       return 'RUNNING'
     else:
       return 'PAUSED'
 
-
   def _massage_status(self, task):
     return 'RUNNING' if task['enabled'] else 'PAUSED'

+ 21 - 20
apps/jobbrowser/src/jobbrowser/apis/workflow_api.py

@@ -15,26 +15,30 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import logging
-import json
 import sys
+import json
+import logging
 
-from jobbrowser.apis.base_api import Api, MockDjangoRequest, _extract_query_params, is_linkable, hdfs_link_js
-from liboozie.oozie_api import get_oozie
+from django.utils.translation import gettext as _
 
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
+from jobbrowser.apis.base_api import Api, MockDjangoRequest, _extract_query_params, hdfs_link_js, is_linkable
+from liboozie.oozie_api import get_oozie
 
 LOG = logging.getLogger()
 
 
 try:
+  from oozie.conf import ENABLE_OOZIE_BACKEND_FILTERING, OOZIE_JOBS_COUNT
   from oozie.forms import ParameterForm
-  from oozie.conf import OOZIE_JOBS_COUNT, ENABLE_OOZIE_BACKEND_FILTERING
-  from oozie.views.dashboard import get_oozie_job_log, list_oozie_workflow, manage_oozie_jobs, bulk_manage_oozie_jobs, \
-      has_dashboard_jobs_access, massaged_oozie_jobs_for_json, has_job_edition_permission
+  from oozie.views.dashboard import (
+    bulk_manage_oozie_jobs,
+    get_oozie_job_log,
+    has_dashboard_jobs_access,
+    has_job_edition_permission,
+    list_oozie_workflow,
+    manage_oozie_jobs,
+    massaged_oozie_jobs_for_json,
+  )
   has_oozie_installed = True
   OOZIE_JOBS_COUNT_LIMIT = OOZIE_JOBS_COUNT.get()
 except Exception as e:
@@ -54,7 +58,7 @@ class WorkflowApi(Api):
     wf_list = oozie_api.get_workflows(**kwargs)
 
     return {
-      'apps':[{
+      'apps': [{
         'id': app['id'],
         'name': app['appName'],
         'status': app['status'],
@@ -70,7 +74,6 @@ class WorkflowApi(Api):
       'total': wf_list.total
     }
 
-
   def app(self, appid):
     if '@' in appid:
       return WorkflowActionApi(self.user).app(appid)
@@ -103,11 +106,9 @@ class WorkflowApi(Api):
 
     return common
 
-
   def action(self, app_ids, action):
     return _manage_oozie_job(self.user, action, app_ids)
 
-
   def logs(self, appid, app_type, log_name=None, is_embeddable=False):
     if '@' in appid:
       return WorkflowActionApi(self.user).logs(appid, app_type)
@@ -117,7 +118,6 @@ class WorkflowApi(Api):
 
     return {'logs': json.loads(data.content)['log']}
 
-
   def profile(self, appid, app_type, app_property, app_filters):
     if '@' in appid:
       return WorkflowActionApi(self.self.user).profile(appid, app_type, app_property)
@@ -133,7 +133,9 @@ class WorkflowApi(Api):
       workflow = oozie_api.get_job(jobid=appid)
       return {
         'properties': workflow.conf_dict,
-        'properties_display': [{'name': key, 'value': val, 'link': is_linkable(key, val) and hdfs_link_js(val)} for key, val in workflow.conf_dict.items()],
+        'properties_display': [
+          {'name': key, 'value': val, 'link': is_linkable(key, val) and hdfs_link_js(val)} for key, val in workflow.conf_dict.items()
+        ],
       }
 
     return {}
@@ -146,7 +148,7 @@ class WorkflowApi(Api):
     elif status == 'SUCCEEDED':
       return 'SUCCEEDED'
     else:
-      return 'FAILED' # KILLED and FAILED
+      return 'FAILED'  # KILLED and FAILED
 
   def _get_variables(self, workflow):
     parameters = []
@@ -185,7 +187,6 @@ class WorkflowActionApi(Api):
 
     return common
 
-
   def logs(self, appid, app_type, log_name=None):
     return {'progress': 0, 'logs': ''}
 
@@ -229,7 +230,7 @@ def _filter_oozie_jobs(user, filters, kwargs):
       kwargs['cnt'] = min(filters['pagination']['limit'], OOZIE_JOBS_COUNT_LIMIT)
 
     if filters.get('states'):
-      states_filters = {'running': ['RUNNING', 'PREP', 'SUSPENDED'], 'completed': ['SUCCEEDED'], 'failed': ['FAILED', 'KILLED'],}
+      states_filters = {'running': ['RUNNING', 'PREP', 'SUSPENDED'], 'completed': ['SUCCEEDED'], 'failed': ['FAILED', 'KILLED'], }
       for _state in filters.get('states'):
         for _status in states_filters[_state]:
           kwargs['filters'].extend([('status', _status)])

+ 4 - 6
apps/jobbrowser/src/jobbrowser/conf.py

@@ -17,13 +17,9 @@
 
 import sys
 
-from desktop.lib.conf import Config, coerce_bool, ConfigSection
-
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext_lazy as _
-else:
-  from django.utils.translation import ugettext_lazy as _
+from django.utils.translation import gettext_lazy as _
 
+from desktop.lib.conf import Config, ConfigSection, coerce_bool
 
 SHARE_JOBS = Config(
   key='share_jobs',
@@ -84,10 +80,12 @@ ENABLE_HISTORY_V2 = Config(
   default=False
 )
 
+
 def is_query_store_url_set():
   """Check if query store url is configured"""
   return QUERY_STORE.SERVER_URL.get() != ''
 
+
 QUERY_STORE = ConfigSection(
   key="query_store",
   help=_("Configs for managing query store interface."),

+ 13 - 16
apps/jobbrowser/src/jobbrowser/models.py

@@ -16,32 +16,26 @@
 # limitations under the License.
 
 from __future__ import division
-from builtins import str
-from builtins import object
-import datetime
-import logging
-import math
-import functools
+
 import re
 import sys
+import math
+import logging
+import datetime
+import functools
+from builtins import object, str
 
 from django.db import connection, models
 from django.urls import reverse
 from django.utils.html import escape
+from django.utils.translation import gettext as _
 
 from desktop.auth.backend import is_admin
 from desktop.conf import REST_CONN_TIMEOUT
 from desktop.lib import i18n
 from desktop.lib.view_util import format_duration_in_millis, location_to_url
-
 from jobbrowser.conf import DISABLE_KILLING_JOBS
 
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
-
-
 LOG = logging.getLogger()
 
 
@@ -49,21 +43,24 @@ def can_view_job(username, job):
   acl = get_acls(job).get('mapreduce.job.acl-view-job', '')
   return acl == '*' or username in acl.split(',')
 
+
 def can_modify_job(username, job):
   acl = get_acls(job).get('mapreduce.job.acl-modify-job', '')
   return acl == '*' or username in acl.split(',')
 
+
 def get_acls(job):
   if job.is_mr2:
     try:
       acls = job.acls
-    except:
+    except Exception:
       LOG.exception('failed to get acls')
       acls = {}
     return acls
   else:
     return job.full_job_conf
 
+
 def can_kill_job(self, user):
   if DISABLE_KILLING_JOBS.get():
     return False
@@ -102,7 +99,7 @@ class LinkJobLogs(object):
   def _replace_hdfs_link(self, is_embeddable=False, match=None):
     try:
       return '<a href="%s">%s</a>' % (location_to_url(match.group(0), strict=False, is_embeddable=is_embeddable), match.group(0))
-    except:
+    except Exception:
       LOG.exception('failed to replace hdfs links: %s' % (match.groups(),))
       return match.group(0)
 
@@ -110,7 +107,7 @@ class LinkJobLogs(object):
   def _replace_mr_link(self, match):
     try:
       return '<a href="/hue%s">%s</a>' % (reverse('jobbrowser:jobbrowser.views.single_job', kwargs={'job': match.group(0)}), match.group(0))
-    except:
+    except Exception:
       LOG.exception('failed to replace mr links: %s' % (match.groups(),))
       return match.group(0)
 

+ 7 - 8
apps/jobbrowser/src/jobbrowser/templatetags/unix_ms_to_datetime.py

@@ -16,24 +16,23 @@
 # limitations under the License.
 
 from __future__ import division
-import datetime
-import django
-import math
+
 import sys
+import math
+import datetime
 
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
+import django
+from django.utils.translation import gettext as _
 
 register = django.template.Library()
 
+
 @register.filter(name='unix_ms_to_datetime')
 def unix_ms_to_datetime(unixtime):
   """unixtime is seconds since the epoch"""
   if unixtime:
     return datetime.datetime.fromtimestamp(math.floor(unixtime / 1000))
   return _("No time")
-unix_ms_to_datetime.is_safe = True
 
 
+unix_ms_to_datetime.is_safe = True

Dosya farkı çok büyük olduğundan ihmal edildi
+ 796 - 723
apps/jobbrowser/src/jobbrowser/tests.py


+ 2 - 6
apps/jobbrowser/src/jobbrowser/urls.py

@@ -17,13 +17,9 @@
 
 import sys
 
-from jobbrowser import views as jobbrowser_views
-from jobbrowser import api2 as jobbrowser_api2
+from django.urls import re_path
 
-if sys.version_info[0] > 2:
-  from django.urls import re_path
-else:
-  from django.conf.urls import url as re_path
+from jobbrowser import api2 as jobbrowser_api2, views as jobbrowser_views
 
 urlpatterns = [
   # "Default"

+ 46 - 46
apps/jobbrowser/src/jobbrowser/views.py

@@ -15,29 +15,27 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from past.builtins import cmp
-from future import standard_library
-standard_library.install_aliases()
-from builtins import filter
-from builtins import str
-import functools
-import logging
 import re
-import string
 import sys
 import time
-import urllib.request, urllib.error, urllib.parse
+import string
+import logging
+import functools
+import urllib.error
 import urllib.parse
-
-from lxml import html
+import urllib.request
+from builtins import filter, str
 from urllib.parse import quote_plus
 
 from django.http import HttpResponseRedirect
-from django.utils.functional import wraps
 from django.urls import reverse
+from django.utils.functional import wraps
+from django.utils.translation import gettext as _
+from lxml import html
+from past.builtins import cmp
 
 from desktop.auth.backend import is_admin
-from desktop.lib.django_util import JsonResponse, render_json, render, copy_query_dict
+from desktop.lib.django_util import JsonResponse, copy_query_dict, render, render_json
 from desktop.lib.exceptions import MessageException
 from desktop.lib.exceptions_renderable import PopupException
 from desktop.lib.json_utils import JSONEncoderForHTML
@@ -45,30 +43,23 @@ from desktop.lib.rest.http_client import RestException
 from desktop.lib.rest.resource import Resource
 from desktop.log.access import access_log_level
 from desktop.views import register_status_bar_view
-
 from hadoop import cluster
-from hadoop.yarn.clients import get_log_client
 from hadoop.yarn import resource_manager_api as resource_manager_api
-
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
+from hadoop.yarn.clients import get_log_client
+from jobbrowser.api import ApplicationNotRunning, JobExpired, get_api
+from jobbrowser.conf import LOG_OFFSET, SHARE_JOBS
+from jobbrowser.models import LinkJobLogs, can_kill_job, can_view_job
+from jobbrowser.yarn_models import Application
 
 LOG = logging.getLogger()
 
 
 try:
   from beeswax.hive_site import hiveserver2_impersonation_enabled
-except:
+except Exception:
   LOG.warning('Hive is not enabled')
   def hiveserver2_impersonation_enabled(): return True
 
-from jobbrowser.conf import LOG_OFFSET, SHARE_JOBS
-from jobbrowser.api import get_api, ApplicationNotRunning, JobExpired
-from jobbrowser.models import can_view_job, can_kill_job, LinkJobLogs
-from jobbrowser.yarn_models import Application
-
 
 LOG_OFFSET_BYTES = LOG_OFFSET.get()
 
@@ -259,6 +250,7 @@ def single_spark_job(request, job):
       'job': job
     })
 
+
 @check_job_permission
 def single_job(request, job):
   def cmp_exec_time(task1, task2):
@@ -270,12 +262,8 @@ def single_job(request, job):
   failed_tasks = job.filter_tasks(task_states=('failed',))
   recent_tasks = job.filter_tasks(task_states=('running', 'succeeded',))
 
-  if sys.version_info[0] > 2:
-    failed_tasks.sort(key=lambda task: task.execStartTimeMs)
-    recent_tasks.sort(key=lambda task: task.execStartTimeMs, reverse=True)
-  else:
-    failed_tasks.sort(cmp_exec_time)
-    recent_tasks.sort(cmp_exec_time, reverse=True)
+  failed_tasks.sort(key=lambda task: task.execStartTimeMs)
+  recent_tasks.sort(key=lambda task: task.execStartTimeMs, reverse=True)
 
   if request.GET.get('format') == 'json':
     json_failed_tasks = [massage_task_for_json(task) for task in failed_tasks]
@@ -341,6 +329,7 @@ def kill_job(request, job):
 
   raise Exception(_("Job did not appear as killed within 15 seconds."))
 
+
 @check_job_permission
 def job_executor_logs(request, job, attempt_index=0, name='syslog', offset=LOG_OFFSET_BYTES):
   response = {'status': -1}
@@ -421,7 +410,7 @@ def job_attempt_logs_json(request, job, attempt_index=0, name='syslog', offset=L
           debug_info += '\nHTML Response: %s' % response
         response['debug'] = debug_info
         LOG.error(debug_info)
-      except:
+      except Exception:
         LOG.exception('failed to create debug info')
 
   return JsonResponse(response)
@@ -441,23 +430,19 @@ def job_single_logs(request, job, offset=LOG_OFFSET_BYTES):
   task = None
 
   failed_tasks = job.filter_tasks(task_states=('failed',))
-  if sys.version_info[0] > 2:
-    failed_tasks.sort(key=functools.cmp_to_key(cmp_exec_time))
-  else:
-    failed_tasks.sort(cmp_exec_time)
+  failed_tasks.sort(key=functools.cmp_to_key(cmp_exec_time))
+
   if failed_tasks:
     task = failed_tasks[0]
-    if not task.taskAttemptIds and len(failed_tasks) > 1: # In some cases the last task ends up without any attempt
+    if not task.taskAttemptIds and len(failed_tasks) > 1:  # In some cases the last task ends up without any attempt
       task = failed_tasks[1]
   else:
     task_states = ['running', 'succeeded']
     if job.is_mr2:
       task_states.append('scheduled')
     recent_tasks = job.filter_tasks(task_states=task_states, task_types=('map', 'reduce',))
-    if sys.version_info[0] > 2:
-      recent_tasks.sort(key=functools.cmp_to_key(cmp_exec_time), reverse=True)
-    else:
-      recent_tasks.sort(cmp_exec_time, reverse=True)
+    recent_tasks.sort(key=functools.cmp_to_key(cmp_exec_time), reverse=True)
+
     if recent_tasks:
       task = recent_tasks[0]
 
@@ -528,6 +513,7 @@ def single_task(request, job, taskid):
     'joblnk': job_link
   })
 
+
 @check_job_permission
 def single_task_attempt(request, job, taskid, attemptid):
   jt = get_api(request.user, request.jt)
@@ -547,6 +533,7 @@ def single_task_attempt(request, job, taskid, attemptid):
       "task": task
     })
 
+
 @check_job_permission
 def single_task_attempt_logs(request, job, taskid, attemptid, offset=LOG_OFFSET_BYTES):
   jt = get_api(request.user, request.jt)
@@ -603,6 +590,7 @@ def single_task_attempt_logs(request, job, taskid, attemptid, offset=LOG_OFFSET_
   else:
     return render("attempt_logs.mako", request, context)
 
+
 @check_job_permission
 def task_attempt_counters(request, job, taskid, attemptid):
   """
@@ -617,6 +605,7 @@ def task_attempt_counters(request, job, taskid, attemptid):
     counters = attempt.counters
   return render("counters.html", request, {'counters': counters})
 
+
 @access_log_level(logging.WARN)
 def kill_task_attempt(request, attemptid):
   """
@@ -626,6 +615,7 @@ def kill_task_attempt(request, attemptid):
   ret = request.jt.kill_task_attempt(request.jt.thriftattemptid_from_string(attemptid))
   return render_json({})
 
+
 def trackers(request):
   """
   We get here from /trackers
@@ -634,6 +624,7 @@ def trackers(request):
 
   return render("tasktrackers.mako", request, {'trackers': trackers})
 
+
 def single_tracker(request, trackerid):
   jt = get_api(request.user, request.jt)
 
@@ -643,6 +634,7 @@ def single_tracker(request, trackerid):
     raise PopupException(_('The tracker could not be contacted.'), detail=e)
   return render("tasktracker.mako", request, {'tracker': tracker})
 
+
 def container(request, node_manager_http_address, containerid):
   jt = get_api(request.user, request.jt)
 
@@ -660,12 +652,14 @@ def clusterstatus(request):
   """
   return render("clusterstatus.html", request, Cluster(request.jt))
 
+
 def queues(request):
   """
   We get here from /queues
   """
   return render("queues.html", request, {"queuelist": request.jt.queues()})
 
+
 @check_job_permission
 def set_job_priority(request, job):
   """
@@ -676,8 +670,10 @@ def set_job_priority(request, job):
   request.jt.set_job_priority(jid, ThriftJobPriority._NAMES_TO_VALUES[priority])
   return render_json({})
 
+
 CONF_VARIABLE_REGEX = r"\$\{(.+)\}"
 
+
 def make_substitutions(conf):
   """
   Substitute occurences of ${foo} with conf[foo], recursively, in all the values
@@ -687,6 +683,7 @@ def make_substitutions(conf):
   this code does not have.
   """
   r = re.compile(CONF_VARIABLE_REGEX)
+
   def sub(s, depth=0):
     # Malformed / malicious confs could make this loop infinitely
     if depth > 100:
@@ -696,7 +693,7 @@ def make_substitutions(conf):
     if m:
       for g in [g for g in m.groups() if g in conf]:
         substr = "${%s}" % g
-        s = s.replace(substr, sub(conf[g], depth+1))
+        s = s.replace(substr, sub(conf[g], depth + 1))
     return s
 
   for k, v in list(conf.items()):
@@ -704,7 +701,8 @@ def make_substitutions(conf):
   return conf
 
 ##################################
-## Helper functions
+# Helper functions
+
 
 def get_shorter_id(hadoop_job_id):
   return "_".join(hadoop_job_id.split("_")[-2:])
@@ -746,7 +744,7 @@ def get_state_link(request, option=None, val='', VALID_OPTIONS=("state", "user",
   return "&".join(["%s=%s" % (key, quote_plus(value)) for key, value in states.items()])
 
 
-## All Unused below
+# All Unused below
 
 # DEAD?
 def dock_jobs(request):
@@ -755,6 +753,8 @@ def dock_jobs(request):
   return render("jobs_dock_info.mako", request, {
     'jobs': matching_jobs
   }, force_template=True)
+
+
 register_status_bar_view(dock_jobs)
 
 
@@ -802,7 +802,7 @@ def jobbrowser(request):
     return lambda job: job.status == state
 
   status = request.jt.cluster_status()
-  alljobs = [] #get_matching_jobs(request)
+  alljobs = []  # get_matching_jobs(request)
   runningjobs = list(filter(check_job_state('RUNNING'), alljobs))
   completedjobs = list(filter(check_job_state('COMPLETED'), alljobs))
   failedjobs = list(filter(check_job_state('FAILED'), alljobs))

+ 12 - 17
apps/jobbrowser/src/jobbrowser/yarn_models.py

@@ -15,36 +15,26 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from __future__ import division
-from future import standard_library
-standard_library.install_aliases()
-from builtins import str
-from builtins import object
-import logging
-import math
 import os
 import re
 import sys
+import math
 import time
+import logging
 import urllib.parse
+from builtins import object, str
 
+from django.utils.translation import gettext as _
 from lxml import html
 
 from desktop.lib.exceptions_renderable import PopupException
 from desktop.lib.rest.http_client import HttpClient
 from desktop.lib.rest.resource import Resource
 from desktop.lib.view_util import big_filesizeformat, format_duration_in_millis
-
 from hadoop import cluster
 from hadoop.yarn.clients import get_log_client
-
 from jobbrowser.models import format_unixtime_ms
 
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
-
 LOG = logging.getLogger()
 
 
@@ -257,7 +247,6 @@ class Job(object):
         else:
           self.progress = self.reduces_percent_complete
 
-
   def _fixup(self):
     jobid = self.id
 
@@ -343,6 +332,7 @@ class Job(object):
       self._job_attempts = self.api.job_attempts(self.id)['jobAttempts']
     return self._job_attempts
 
+
 class YarnV2Job(Job):
   def __init__(self, api, attrs):
     self.api = api
@@ -418,6 +408,7 @@ class YarnV2Job(Job):
 
     return self._job_attempts
 
+
 # There's are tasks for Oozie workflow so we create a dummy one.
 class YarnTask(object):
   def __init__(self, job):
@@ -427,6 +418,7 @@ class YarnTask(object):
     json = self.job.api.appattempts_attempt(self.job.id, attempt_id)
     return YarnV2Attempt(self, json)
 
+
 class KilledJob(Job):
 
   def __init__(self, api, attrs):
@@ -629,7 +621,8 @@ class Attempt(object):
 
     for name in ('stdout', 'stderr', 'syslog'):
       link = '/%s/' % name
-      if self.type == 'Oozie Launcher' and not self.task.job.status == 'FINISHED': # Yarn currently dumps with 500 error with doas in running state
+      # Yarn currently dumps with 500 error with doas in running state
+      if self.type == 'Oozie Launcher' and not self.task.job.status == 'FINISHED':
         params = {}
       else:
         params = {
@@ -654,13 +647,14 @@ class Attempt(object):
           if response:
             debug_info += '\nHTML Response: %s' % response
           LOG.error(debug_info)
-        except:
+        except Exception:
           LOG.exception('failed to build debug info')
 
       logs.append(log)
 
     return logs + [''] * (3 - len(logs))
 
+
 class YarnV2Attempt(Attempt):
   def __init__(self, task, attrs):
     self.task = task
@@ -691,6 +685,7 @@ class YarnV2Attempt(Attempt):
     setattr(self, 'status', 'RUNNING' if self.finishedTime == 0 else 'SUCCEEDED')
     setattr(self, 'properties', {})
 
+
 class Container(object):
 
   def __init__(self, attrs):

+ 3 - 8
apps/jobsub/src/jobsub/conf.py

@@ -16,16 +16,11 @@
 # limitations under the License.
 
 import os.path
-import sys
 
-from desktop.lib.conf import Config
-from desktop.lib import paths
-
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext_lazy as _
-else:
-  from django.utils.translation import ugettext_lazy as _
+from django.utils.translation import gettext_lazy as _
 
+from desktop.lib import paths
+from desktop.lib.conf import Config
 
 LOCAL_DATA_DIR = Config(
   key="local_data_dir",

+ 6 - 8
apps/jobsub/src/jobsub/forms.py

@@ -15,24 +15,21 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from builtins import object
-import logging
 import sys
+import logging
+from builtins import object
 
 from django import forms
+from django.utils.translation import gettext as _
 
 from desktop.lib.django_forms import MultiForm
 from jobsub import models
 
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
-
 LOG = logging.getLogger()
 
 # This aligns with what Oozie accepts as a workflow name
-_OOZIE_WORKFLOW_NAME_REGEX = '^([a-zA-Z_]([\-_a-zA-Z0-9])*){1,39}$'
+_OOZIE_WORKFLOW_NAME_REGEX = r'^([a-zA-Z_]([\-_a-zA-Z0-9])*){1,39}$'
+
 
 class WorkflowDesignForm(forms.ModelForm):
   """Used for specifying a design"""
@@ -112,6 +109,7 @@ def design_form_by_type(action_type):
   cls = _ACTION_TYPE_TO_FORM_CLS[action_type]
   return MultiForm(wf=WorkflowDesignForm, action=cls)
 
+
 def design_form_by_instance(design_obj, data=None):
   action_obj = design_obj.get_root_action()
   cls = _ACTION_TYPE_TO_FORM_CLS[action_obj.action_type]

+ 7 - 10
apps/jobsub/src/jobsub/models.py

@@ -15,21 +15,17 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from builtins import str
-import logging
 import sys
+import logging
+from builtins import str
 
 from django.db import models
 from django.urls import reverse
+from django.utils.translation import gettext_lazy as _
 
-from desktop.lib.parameterization import find_parameters, bind_parameters
+from desktop.lib.parameterization import bind_parameters, find_parameters
 from useradmin.models import User
 
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext_lazy as _
-else:
-  from django.utils.translation import ugettext_lazy as _
-
 LOG = logging.getLogger()
 
 
@@ -63,7 +59,7 @@ class JobDesign(models.Model):
     return reverse("jobsub.views.submit_design", kwargs=dict(id=self.id))
 
   def clone(self):
-    clone_kwargs = dict([(field.name, getattr(self, field.name)) for field in self._meta.fields if field.name != 'id']);
+    clone_kwargs = dict([(field.name, getattr(self, field.name)) for field in self._meta.fields if field.name != 'id'])
     return self.__class__.objects.create(**clone_kwargs)
 
   def to_jsonable(self):
@@ -75,6 +71,7 @@ class JobDesign(models.Model):
       'data': repr(self.data)
     }
 
+
 class CheckForSetup(models.Model):
   """
   A model which should have at most one row, indicating
@@ -86,7 +83,7 @@ class CheckForSetup(models.Model):
   setup_level = models.IntegerField(default=0)
 
 
-################################## New Models ################################
+# New Models ################################
 
 PATH_MAX = 512
 

+ 1 - 4
apps/jobsub/src/jobsub/old_migrations/0005_unify_with_oozie.py

@@ -9,10 +9,7 @@ from django.db import models
 from oozie.importlib.jobdesigner import convert_jobsub_design
 from oozie.models import Workflow, Kill, Start, End
 
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
+from django.utils.translation import gettext as _
 
 
 class Migration(DataMigration):

+ 1 - 4
apps/jobsub/src/jobsub/old_migrations/0006_chg_varchars_to_textfields.py

@@ -6,10 +6,7 @@ from south.db import db
 from south.v2 import SchemaMigration
 from django.db import models
 
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
+from django.utils.translation import gettext as _
 
 
 class Migration(SchemaMigration):

+ 2 - 5
apps/jobsub/src/jobsub/urls.py

@@ -17,12 +17,9 @@
 
 import sys
 
-from jobsub import views as jobsub_views
+from django.urls import re_path
 
-if sys.version_info[0] > 2:
-  from django.urls import re_path
-else:
-  from django.conf.urls import url as re_path
+from jobsub import views as jobsub_views
 
 urlpatterns = [
   # The base view is the "list" view, which we alias as /

+ 10 - 14
apps/jobsub/src/jobsub/views.py

@@ -25,30 +25,24 @@ to the cluster.  A parameterized, submitted job design
 is a "job submission".  Submissions can be "watched".
 """
 
-from builtins import str
-import logging
 import sys
 import time as py_time
+import logging
+from builtins import str
+
+from django.utils.translation import gettext as _
 
 from desktop import appmanager
+from desktop.auth.backend import is_admin
 from desktop.lib.django_util import render, render_json
 from desktop.lib.exceptions import StructuredException
 from desktop.lib.exceptions_renderable import PopupException
 from desktop.lib.view_util import is_ajax
 from desktop.log.access import access_warn
 from desktop.models import Document
-
-from oozie.models import Workflow
 from oozie.forms import design_form_by_type
-from oozie.utils import model_to_dict, format_dict_field_values,\
-                        sanitize_node_dict
-
-from desktop.auth.backend import is_admin
-
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
+from oozie.models import Workflow
+from oozie.utils import format_dict_field_values, model_to_dict, sanitize_node_dict
 
 LOG = logging.getLogger()
 MAX_DESIGNS = 250
@@ -115,9 +109,11 @@ def list_designs(request):
       'apps': appmanager.get_apps_dict()
     })
 
+
 def not_available(request):
   return render("not_available.mako", request, {})
 
+
 def _get_design(user, design_id):
   """Raise PopupException if design doesn't exist"""
   try:
@@ -206,7 +202,7 @@ def save_design(request, design_id):
   data = format_dict_field_values(request.POST.copy())
   _save_design(request.user, design_id, data)
 
-  return get_design(request, design_id);
+  return get_design(request, design_id)
 
 
 def _save_design(user, design_id, data):

+ 2 - 13
apps/metastore/src/metastore/forms.py

@@ -15,22 +15,11 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import sys
-
 from django import forms
+from django.utils.translation import gettext as _, gettext_lazy as _t
 
-from desktop.lib.django_forms import simple_formset_factory, DependencyAwareForm
-from desktop.lib.django_forms import ChoiceOrOtherField, MultiForm, SubmitButton
 from filebrowser.forms import PathField
 
-from beeswax import common
-from beeswax.models import SavedQuery
-
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _, gettext_lazy as _t
-else:
-  from django.utils.translation import ugettext as _, ugettext_lazy as _t
-
 
 class DbForm(forms.Form):
   """For 'show tables'"""
@@ -63,6 +52,6 @@ class LoadDataForm(forms.Form):
       # We give these numeric names because column names
       # may be unpleasantly arbitrary.
       name = "partition_%d" % i
-      char_field = forms.CharField(required=True, label=_t("%(column_name)s (partition key with type %(column_type)s)") % {'column_name': column.name, 'column_type': column.type})
+      char_field = forms.CharField(required=True, label=_t("%(column_name)s (partition key with type %(column_type)s)") % {'column_name': column.name, 'column_type': column.type})  # noqa: E501
       self.fields[name] = char_field
       self.partition_columns[name] = column.name

+ 0 - 1
apps/metastore/src/metastore/templates/metastore.mako

@@ -23,7 +23,6 @@ else:
 
 from desktop import conf
 from desktop.conf import USE_NEW_EDITOR
-from desktop.lib.i18n import smart_unicode
 from desktop.views import commonheader, commonfooter, _ko
 from desktop.webpack_utils import get_hue_bundles
 from metastore.conf import SHOW_TABLE_ERD

+ 115 - 114
apps/metastore/src/metastore/tests.py

@@ -16,37 +16,36 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from future import standard_library
-
-standard_library.install_aliases()
-from builtins import object
+import sys
 import json
 import logging
-import pytest
-import sys
-import urllib.request, urllib.parse, urllib.error
+import urllib.error
+import urllib.parse
+import urllib.request
+from builtins import object
+from unittest.mock import Mock, patch
 
-from django.utils.encoding import smart_str
+import pytest
 from django.urls import reverse
-
-from desktop.lib.django_test_util import make_logged_in_client, assert_equal_mod_whitespace
-from desktop.lib.test_utils import add_permission, grant_access
-from hadoop.pseudo_hdfs4 import is_live_cluster
-from metastore import parser
-from useradmin.models import HuePermission, GroupPermission, User, Group
+from django.utils.encoding import smart_str
 
 from beeswax.conf import LIST_PARTITIONS_LIMIT
-from beeswax.views import collapse_whitespace
-from beeswax.test_base import make_query, wait_for_query_to_finish, verify_history, get_query_server_config, \
-  fetch_query_result_data
 from beeswax.models import QueryHistory
 from beeswax.server import dbms
-from beeswax.test_base import BeeswaxSampleProvider
-
-if sys.version_info[0] > 2:
-  from unittest.mock import patch, Mock
-else:
-  from mock import patch, Mock
+from beeswax.test_base import (
+  BeeswaxSampleProvider,
+  fetch_query_result_data,
+  get_query_server_config,
+  make_query,
+  verify_history,
+  wait_for_query_to_finish,
+)
+from beeswax.views import collapse_whitespace
+from desktop.lib.django_test_util import assert_equal_mod_whitespace, make_logged_in_client
+from desktop.lib.test_utils import add_permission, grant_access
+from hadoop.pseudo_hdfs4 import is_live_cluster
+from metastore import parser
+from useradmin.models import Group, GroupPermission, HuePermission, User
 
 LOG = logging.getLogger()
 
@@ -59,21 +58,22 @@ def _make_query(client, query, submission_type="Execute",
   res = make_query(client, query, submission_type,
                    udfs, settings, resources,
                    wait, name, desc, local, is_parameterized, max, database, email_notify, **kwargs)
-  
+
   # Should be in the history if it's submitted.
   if submission_type == 'Execute':
     fragment = collapse_whitespace(smart_str(query[:20]))
     verify_history(client, fragment=fragment)
-  
+
   return res
 
+
 @pytest.mark.django_db
 class TestApi():
   def setup_method(self):
     self.client = make_logged_in_client(username="test", groupname="default", recreate=True, is_superuser=False)
-    
+
     self.user = User.objects.get(username="test")
-  
+
   def test_show_tables(self):
     grant_access("test", "default", "metastore")
     with patch('beeswax.server.dbms.get') as get:
@@ -89,11 +89,11 @@ class TestApi():
         ),
         server_name='hive'
       )
-      
+
       response = self.client.post('/metastore/tables/sfdc?format=json')
-      
+
       get.assert_called()
-    
+
     assert response.status_code == 200
     data = json.loads(response.content)
     assert data['status'] == 0
@@ -125,52 +125,53 @@ class TestApi():
         )
         response = self.client.post('/metastore/tables/sfdc?format=json')
         get.assert_called()
-  
+
     assert response.status_code == 200
     data = json.loads(response.content)
     assert data['status'] == 0
     assert data['table_names'] == ['customer', 'opportunities']
     assert data['tables'] == [{'name': 'customer'}, {'name': 'opportunities'}]
 
+
 @pytest.mark.django_db
 @pytest.mark.integration
 @pytest.mark.requires_hadoop
 class TestMetastoreWithHadoop(BeeswaxSampleProvider):
-  
+
   def setup_method(self):
     user = User.objects.get(username='test')
     self.db = dbms.get(user, get_query_server_config())
-    
+
     add_permission("test", "test", "write", "metastore")
-  
+
   def test_basic_flow(self):
     # Default database should exist
     response = self.client.get("/metastore/databases")
     assert self.db_name in response.context[0]["databases"]
-    
+
     # Table should have been created
     response = self.client.get("/metastore/tables/")
     assert 200 == response.status_code
-    
+
     # Switch databases
     response = self.client.get("/metastore/tables/%s?format=json" % self.db_name)
     data = json.loads(response.content)
     assert 'name' in data["tables"][0]
     assert "test" in data["table_names"]
-    
+
     # Should default to "default" database
     response = self.client.get("/metastore/tables/not_there")
     assert 200 == response.status_code
-    
+
     # And have detail
     response = self.client.post("/metastore/table/%s/test/?format=json" % self.db_name, {'format': 'json'})
     data = json.loads(response.content)
     assert "foo" in [col['name'] for col in data['cols']]
     assert "SerDe Library:" in [prop['col_name'] for prop in data['properties']], data
-    
+
     # Remember the number of history items. Use a generic fragment 'test' to pass verification.
     history_cnt = verify_history(self.client, fragment='test')
-    
+
     # Show table data.
     response = self.client.get("/metastore/table/%s/test/read" % self.db_name, follow=True)
     response = self.client.get(
@@ -181,7 +182,7 @@ class TestMetastoreWithHadoop(BeeswaxSampleProvider):
     assert len(results['results']) > 0
     # This should NOT go into the query history.
     assert verify_history(self.client, fragment='test') == history_cnt, 'Implicit queries should not be saved in the history'
-  
+
   def test_show_tables(self):
     hql = """
         CREATE TABLE test_show_tables_1 (a int) COMMENT 'Test for show_tables';
@@ -190,7 +191,7 @@ class TestMetastoreWithHadoop(BeeswaxSampleProvider):
       """
     resp = _make_query(self.client, hql, database=self.db_name)
     resp = wait_for_query_to_finish(self.client, resp, max=30.0)
-    
+
     # Table should have been created
     response = self.client.get("/metastore/tables/%s?filter=show_tables&format=json" % self.db_name)
     assert 200 == response.status_code
@@ -199,14 +200,14 @@ class TestMetastoreWithHadoop(BeeswaxSampleProvider):
     assert 'name' in data["tables"][0]
     assert 'comment' in data["tables"][0]
     assert 'type' in data["tables"][0]
-    
+
     hql = """
         CREATE TABLE test_show_tables_4 (a int) COMMENT 'Test for show_tables';
         CREATE TABLE test_show_tables_5 (a int) COMMENT 'Test for show_tables';
       """
     resp = _make_query(self.client, hql, database=self.db_name)
     resp = wait_for_query_to_finish(self.client, resp, max=30.0)
-    
+
     # Table should have been created
     response = self.client.get("/metastore/tables/%s?filter=show_tables&format=json" % self.db_name)
     assert 200 == response.status_code
@@ -215,30 +216,30 @@ class TestMetastoreWithHadoop(BeeswaxSampleProvider):
     assert 'name' in data["tables"][0]
     assert 'comment' in data["tables"][0]
     assert 'type' in data["tables"][0]
-    
+
     hql = """
         CREATE INDEX test_index ON TABLE test_show_tables_1 (a) AS 'COMPACT' WITH DEFERRED REBUILD;
       """
     resp = _make_query(self.client, hql, wait=True, local=False, max=30.0, database=self.db_name)
-    
+
     # By default, index table should not appear in show tables view
     response = self.client.get("/metastore/tables/%s?format=json" % self.db_name)
     assert 200 == response.status_code
     data = json.loads(response.content)
-    assert not 'test_index' in data['tables']
-  
+    assert 'test_index' not in data['tables']
+
   def test_describe_view(self):
     resp = self.client.post('/metastore/table/%s/myview' % self.db_name, data={'format': 'json'})
     assert 200 == resp.status_code, resp.content
     data = json.loads(resp.content)
     assert data['is_view']
     assert "myview" == data['name']
-  
+
   def test_describe_partitions(self):
     response = self.client.post("/metastore/table/%s/test_partitions" % self.db_name, data={'format': 'json'})
     data = json.loads(response.content)
     assert 2 == len(data['partition_keys']), data
-    
+
     response = self.client.post("/metastore/table/%s/test_partitions/partitions" % self.db_name,
                                 data={'format': 'json'}, follow=True)
     data = json.loads(response.content)
@@ -247,11 +248,11 @@ class TestMetastoreWithHadoop(BeeswaxSampleProvider):
     assert '12345' in partition_columns, partition_columns
     assert "baz_foo" in partition_columns
     assert '67890' in partition_columns
-    
+
     # Not partitioned
     response = self.client.get("/metastore/table/%s/test/partitions" % self.db_name, follow=True)
     assert "is not partitioned." in response.content
-  
+
   def test_describe_partitioned_table_with_limit(self):
     # We have 2 partitions in the test table
     finish = LIST_PARTITIONS_LIMIT.set_for_testing("1")
@@ -261,7 +262,7 @@ class TestMetastoreWithHadoop(BeeswaxSampleProvider):
       assert 1 == len(partition_values_json)
     finally:
       finish()
-    
+
     finish = LIST_PARTITIONS_LIMIT.set_for_testing("3")
     try:
       response = self.client.get("/metastore/table/%s/test_partitions/partitions" % self.db_name)
@@ -269,11 +270,11 @@ class TestMetastoreWithHadoop(BeeswaxSampleProvider):
       assert 2 == len(partition_values_json)
     finally:
       finish()
-  
+
   def test_read_partitions(self):
     if not is_live_cluster():
       raise SkipTest
-    
+
     partition_spec = "baz='baz_one',boom=12345"
     response = self.client.get(
       "/metastore/table/%s/test_partitions/partitions/%s/read" % (self.db_name, partition_spec), follow=True)
@@ -282,7 +283,7 @@ class TestMetastoreWithHadoop(BeeswaxSampleProvider):
     response = wait_for_query_to_finish(self.client, response, max=30.0)
     results = fetch_query_result_data(self.client, response)
     assert len(results['results']) > 0, results
-  
+
   def test_browse_partition(self):
     partition_spec = "baz='baz_one',boom=12345"
     response = self.client.get(
@@ -293,19 +294,19 @@ class TestMetastoreWithHadoop(BeeswaxSampleProvider):
       path = '/user/hive/warehouse/test_partitions/baz=baz_one/boom=12345'
     filebrowser_path = urllib.parse.unquote(reverse("filebrowser:filebrowser.views.view", kwargs={'path': path}))
     assert response.request['PATH_INFO'] == filebrowser_path
-  
+
   def test_drop_partition(self):
     # Create partition first
     partition_spec = "baz='baz_drop',boom=54321"
     hql = 'ALTER TABLE `%s`.`test_partitions` ADD IF NOT EXISTS PARTITION (%s);' % (self.db_name, partition_spec)
     resp = _make_query(self.client, hql, database=self.db_name)
     wait_for_query_to_finish(self.client, resp, max=30.0)
-    
+
     # Assert partition exists
     response = self.client.get("/metastore/table/%s/test_partitions/partitions" % self.db_name, {'format': 'json'})
     data = json.loads(response.content)
     assert "baz_drop" in [part['columns'][0] for part in data['partition_values_json']], data
-    
+
     # Drop partition
     self.client.post(
       "/metastore/table/%s/test_partitions/partitions/drop" % self.db_name,
@@ -319,8 +320,8 @@ class TestMetastoreWithHadoop(BeeswaxSampleProvider):
     )
     response = self.client.get("/metastore/table/%s/test_partitions/partitions" % self.db_name, {'format': 'json'})
     data = json.loads(response.content)
-    assert not "baz_drop" in [part['columns'][0] for part in data['partition_values_json']], data
-  
+    assert "baz_drop" not in [part['columns'][0] for part in data['partition_values_json']], data
+
   def test_drop_multi_tables(self):
     hql = """
       CREATE TABLE test_drop_1 (a int);
@@ -329,7 +330,7 @@ class TestMetastoreWithHadoop(BeeswaxSampleProvider):
     """
     resp = _make_query(self.client, hql, database=self.db_name)
     resp = wait_for_query_to_finish(self.client, resp, max=30.0)
-    
+
     # Drop them
     resp = self.client.get('/metastore/tables/drop/%s' % self.db_name, follow=True)
     assert 'want to delete' in resp.content, resp.content
@@ -338,7 +339,7 @@ class TestMetastoreWithHadoop(BeeswaxSampleProvider):
       {u'table_selection': [u'test_drop_1', u'test_drop_2', u'test_drop_3'], 'is_embeddable': True}
     )
     assert resp.status_code == 302
-  
+
   def test_drop_multi_tables_with_skip_trash(self):
     hql = """
       CREATE TABLE test_drop_multi_tables_with_skip_trash_1 (a int);
@@ -347,7 +348,7 @@ class TestMetastoreWithHadoop(BeeswaxSampleProvider):
     """
     resp = _make_query(self.client, hql, database=self.db_name)
     resp = wait_for_query_to_finish(self.client, resp, max=30.0)
-    
+
     # Drop them
     resp = self.client.get('/metastore/tables/drop/%s' % self.db_name, follow=True)
     assert 'want to delete' in resp.content, resp.content
@@ -361,19 +362,19 @@ class TestMetastoreWithHadoop(BeeswaxSampleProvider):
       }
     )
     assert resp.status_code == 302
-    
+
     response = self.client.get("/metastore/tables/%s?format=json" % self.db_name)
     assert 200 == response.status_code
     data = json.loads(response.content)
-    assert not 'test_drop_multi_tables_with_skip_trash_1' in data['tables']
-    assert not 'test_drop_multi_tables_with_skip_trash_2' in data['tables']
-    assert not 'test_drop_multi_tables_with_skip_trash_3' in data['tables']
-  
+    assert 'test_drop_multi_tables_with_skip_trash_1' not in data['tables']
+    assert 'test_drop_multi_tables_with_skip_trash_2' not in data['tables']
+    assert 'test_drop_multi_tables_with_skip_trash_3' not in data['tables']
+
   def test_drop_multi_databases(self):
     db1 = '%s_test_drop_1' % self.db_name
     db2 = '%s_test_drop_2' % self.db_name
     db3 = '%s_test_drop_3' % self.db_name
-    
+
     try:
       hql = """
         CREATE DATABASE %(db1)s;
@@ -382,13 +383,13 @@ class TestMetastoreWithHadoop(BeeswaxSampleProvider):
       """ % {'db1': db1, 'db2': db2, 'db3': db3}
       resp = _make_query(self.client, hql)
       resp = wait_for_query_to_finish(self.client, resp, max=30.0)
-      
+
       # Add a table to db1
       hql = "CREATE TABLE " + "`" + db1 + "`." + "`test_drop_1` (a int);"
       resp = _make_query(self.client, hql, database=db1)
       resp = wait_for_query_to_finish(self.client, resp, max=30.0)
       assert resp.status_code == 200
-      
+
       # Drop them
       resp = self.client.get('/metastore/databases/drop', follow=True)
       assert 'want to delete' in resp.content, resp.content
@@ -399,34 +400,34 @@ class TestMetastoreWithHadoop(BeeswaxSampleProvider):
       make_query(self.client, 'DROP DATABASE IF EXISTS %(db)s' % {'db': db1}, wait=True)
       make_query(self.client, 'DROP DATABASE IF EXISTS %(db)s' % {'db': db2}, wait=True)
       make_query(self.client, 'DROP DATABASE IF EXISTS %(db)s' % {'db': db3}, wait=True)
-  
+
   def test_load_data(self):
     """
     Test load data queries.
     These require Hadoop, because they ask the metastore
     about whether a table is partitioned.
     """
-    
+
     # Check that view works
     resp = self.client.get("/metastore/table/%s/test/load" % self.db_name, follow=True)
     assert 'Path' in resp.content
-    
+
     data_dir = '%(prefix)s/tmp' % {'prefix': self.cluster.fs_prefix}
     data_path = data_dir + '/foo'
     self.cluster.fs.mkdir(data_dir)
     self.cluster.fs.create(data_path, data='123')
-    
+
     # Try the submission
     response = self.client.post("/metastore/table/%s/test/load" % self.db_name, {'path': data_path, 'overwrite': True},
                                 follow=True)
     data = json.loads(response.content)
     query = QueryHistory.objects.get(id=data['query_history_id'])
-    
+
     assert_equal_mod_whitespace(
       "LOAD DATA INPATH '%(data_path)s' OVERWRITE INTO TABLE `%(db)s`.`test`" % {'data_path': data_path,
                                                                                  'db': self.db_name}, query.query
     )
-    
+
     resp = self.client.post("/metastore/table/%s/test/load" % self.db_name, {'path': data_path, 'overwrite': False},
                             follow=True)
     query = QueryHistory.objects.latest('id')
@@ -434,7 +435,7 @@ class TestMetastoreWithHadoop(BeeswaxSampleProvider):
       "LOAD DATA INPATH '%(data_path)s' INTO TABLE `%(db)s`.`test`" % {'data_path': data_path, 'db': self.db_name},
       query.query
     )
-    
+
     # Try it with partitions
     resp = self.client.post(
       "/metastore/table/%s/test_partitions/load" % self.db_name,
@@ -448,21 +449,21 @@ class TestMetastoreWithHadoop(BeeswaxSampleProvider):
         'data_path': data_path, 'db': self.db_name
       }
     )
-  
+
   def test_has_write_access_frontend(self):
     client = make_logged_in_client(username='write_access_frontend', groupname='write_access_frontend',
                                    is_superuser=False)
     grant_access("write_access_frontend", "write_access_frontend", "metastore")
     user = User.objects.get(username='write_access_frontend')
-    
+
     response = client.get("/metastore/databases")
-    assert not "Drop</button>" in response.content, response.content
-    assert not "Create a new database" in response.content, response.content
-    
+    assert "Drop</button>" not in response.content, response.content
+    assert "Create a new database" not in response.content, response.content
+
     response = client.get("/metastore/tables/")
-    assert not "Drop</button>" in response.content, response.content
-    assert not "Create a new table" in response.content, response.content
-    
+    assert "Drop</button>" not in response.content, response.content
+    assert "Create a new table" not in response.content, response.content
+
     # Add access
     group, created = Group.objects.get_or_create(name='write_access_frontend')
     perm, created = HuePermission.objects.get_or_create(app='metastore', action='write')
@@ -471,75 +472,75 @@ class TestMetastoreWithHadoop(BeeswaxSampleProvider):
     response = client.get("/metastore/databases")
     assert "Drop</button>" in response.content, response.content
     assert "Create a new database" in response.content, response.content
-    
+
     response = client.get("/metastore/tables/")
     assert "Drop</button>" in response.content, response.content
     assert "Create a new table" in response.content, response.content
-  
+
   def test_has_write_access_backend(self):
     client = make_logged_in_client(username='write_access_backend', groupname='write_access_backend',
                                    is_superuser=False)
     grant_access("write_access_backend", "write_access_backend", "metastore")
     grant_access("write_access_backend", "write_access_backend", "beeswax")
     user = User.objects.get(username='write_access_backend')
-    
+
     # Only fails if we were using Sentry and won't allow SELECT to user
     resp = _make_query(client, 'CREATE TABLE test_perm_1 (a int);', database=self.db_name)
     resp = wait_for_query_to_finish(client, resp, max=30.0)
-    
+
     def check(client, http_codes):
       resp = client.get('/metastore/tables/drop/%s' % self.db_name)
       assert resp.status_code in http_codes, resp.content
-      
+
       resp = client.post('/metastore/tables/drop/%s' % self.db_name, {u'table_selection': [u'test_perm_1']})
       assert resp.status_code in http_codes, resp.content
-    
+
     check(client, [301])  # Denied
-    
+
     # Add access
     group, created = Group.objects.get_or_create(name='write_access_backend')
     perm, created = HuePermission.objects.get_or_create(app='metastore', action='write')
     GroupPermission.objects.get_or_create(group=group, hue_permission=perm)
-    
+
     check(client, [200, 302])  # Ok
-  
+
   def test_alter_database(self):
     resp = self.client.post(reverse("metastore:get_database_metadata", kwargs={'database': self.db_name}))
     json_resp = json.loads(resp.content)
     assert 'data' in json_resp, json_resp
     assert 'parameters' in json_resp['data'], json_resp
-    assert not 'message=After Alter' in json_resp['data']['parameters'], json_resp
-    
+    assert 'message=After Alter' not in json_resp['data']['parameters'], json_resp
+
     # Alter message
     resp = self.client.post(reverse("metastore:alter_database", kwargs={'database': self.db_name}),
                             {'properties': json.dumps({'message': 'After Alter'})})
     json_resp = json.loads(resp.content)
     assert 0 == json_resp['status'], json_resp
     assert '{message=After Alter}' == json_resp['data']['parameters'], json_resp
-  
+
   def test_alter_table(self):
     resp = _make_query(self.client, "CREATE TABLE test_alter_table (a int) COMMENT 'Before Alter';",
                        database=self.db_name)
     resp = wait_for_query_to_finish(self.client, resp, max=30.0)
-    
+
     resp = self.client.get('/metastore/table/%s/test_alter_table' % self.db_name)
     assert 'test_alter_table', resp.content
     assert 'Before Alter', resp.content
-    
+
     # Alter name
     resp = self.client.post(reverse("metastore:alter_table",
                                     kwargs={'database': self.db_name, 'table': 'test_alter_table'}),
                             {'new_table_name': 'table_altered'})
     json_resp = json.loads(resp.content)
     assert 'table_altered' == json_resp['data']['name'], json_resp
-    
+
     # Alter comment
     resp = self.client.post(reverse("metastore:alter_table",
                                     kwargs={'database': self.db_name, 'table': 'table_altered'}),
                             {'comment': 'After Alter'})
     json_resp = json.loads(resp.content)
     assert 'After Alter' == json_resp['data']['comment'], json_resp
-    
+
     # Invalid table name returns error response
     resp = self.client.post(reverse("metastore:alter_table",
                                     kwargs={'database': self.db_name, 'table': 'table_altered'}),
@@ -547,15 +548,15 @@ class TestMetastoreWithHadoop(BeeswaxSampleProvider):
     json_resp = json.loads(resp.content)
     assert 1 == json_resp['status'], json_resp
     assert 'Failed to alter table' in json_resp['data'], json_resp
-  
+
   def test_alter_column(self):
     resp = _make_query(self.client, 'CREATE TABLE test_alter_column (before_alter int);', database=self.db_name)
     resp = wait_for_query_to_finish(self.client, resp, max=30.0)
-    
+
     resp = self.client.get('/metastore/table/%s/test_alter_column' % self.db_name)
     assert 'before_alter', resp.content
     assert 'int', resp.content
-    
+
     # Alter name, type and comment
     resp = self.client.post(
       reverse("metastore:alter_column", kwargs={'database': self.db_name, 'table': 'test_alter_column'}),
@@ -566,7 +567,7 @@ class TestMetastoreWithHadoop(BeeswaxSampleProvider):
     assert 'after_alter' == json_resp['data']['name'], json_resp
     assert 'string' == json_resp['data']['type'], json_resp
     assert 'alter comment' == json_resp['data']['comment'], json_resp
-    
+
     # Invalid column type returns error response
     resp = self.client.post(reverse("metastore:alter_column",
                                     kwargs={'database': self.db_name, 'table': 'test_alter_column'}),
@@ -584,7 +585,7 @@ class TestParser(object):
     column = {'name': name, 'type': type, 'comment': comment}
     parse_tree = parser.parse_column(name, type, comment)
     assert parse_tree == column
-  
+
   def test_parse_varchar(self):
     name = 'varchar'
     type = 'varchar(1000)'
@@ -592,7 +593,7 @@ class TestParser(object):
     column = {'name': name, 'type': type, 'comment': comment}
     parse_tree = parser.parse_column(name, type, comment)
     assert parse_tree == column
-  
+
   def test_parse_decimal(self):
     name = 'simple'
     type = 'decimal(12,2)'
@@ -600,7 +601,7 @@ class TestParser(object):
     column = {'name': name, 'type': type, 'comment': comment}
     parse_tree = parser.parse_column(name, type, comment)
     assert parse_tree == column
-  
+
   def test_parse_array(self):
     name = 'array'
     type = 'array<string>'
@@ -608,7 +609,7 @@ class TestParser(object):
     column = {'name': name, 'type': 'array', 'comment': comment, 'item': {'type': 'string'}}
     parse_tree = parser.parse_column(name, type, comment)
     assert parse_tree == column
-  
+
   def test_parse_map(self):
     name = 'map'
     type = 'map<string,int>'
@@ -616,7 +617,7 @@ class TestParser(object):
     column = {'name': name, 'type': 'map', 'comment': comment, 'key': {'type': 'string'}, 'value': {'type': 'int'}}
     parse_tree = parser.parse_column(name, type, comment)
     assert parse_tree == column
-  
+
   def test_parse_struct(self):
     name = 'struct'
     type = 'struct<name:string,age:int>'
@@ -627,7 +628,7 @@ class TestParser(object):
     }
     parse_tree = parser.parse_column(name, type, comment)
     assert parse_tree == column
-  
+
   def test_parse_nested(self):
     name = 'nested'
     type = 'array<struct<name:string,age:int>>'
@@ -638,7 +639,7 @@ class TestParser(object):
     }
     parse_tree = parser.parse_column(name, type, comment)
     assert parse_tree == column
-  
+
   def test_parse_nested_with_array(self):
     name = 'nested'
     type = ('struct<fieldname1:bigint,fieldname2:int,fieldname3:int,fieldname4:array<bigint>,'

+ 2 - 5
apps/metastore/src/metastore/urls.py

@@ -17,12 +17,9 @@
 
 import sys
 
-from metastore import views as metastore_views
+from django.urls import re_path
 
-if sys.version_info[0] > 2:
-  from django.urls import re_path
-else:
-  from django.conf.urls import url as re_path
+from metastore import views as metastore_views
 
 urlpatterns = [
   re_path(r'^$', metastore_views.index, name='index'),

+ 31 - 40
apps/metastore/src/metastore/views.py

@@ -15,47 +15,39 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from future import standard_library
-standard_library.install_aliases()
-from builtins import str
+import sys
 import json
 import logging
-import sys
-import urllib.request, urllib.parse, urllib.error
+import urllib.error
+import urllib.parse
+import urllib.request
+from builtins import str
 
 from django.db.models import Q
-from django.urls import reverse
 from django.shortcuts import redirect
+from django.urls import reverse
 from django.utils.functional import wraps
+from django.utils.translation import gettext as _
 from django.views.decorators.http import require_http_methods
 
-from desktop.conf import has_connectors
-from desktop.context_processors import get_app_name
-from desktop.lib.django_util import JsonResponse, render
-from desktop.lib.exceptions_renderable import PopupException
-from desktop.models import Document2, get_cluster_config, _get_apps
-
-from beeswax.design import hql_query
 from beeswax.common import find_compute
+from beeswax.design import hql_query
 from beeswax.models import SavedQuery
 from beeswax.server import dbms
 from beeswax.server.dbms import get_query_server_config
+from desktop.auth.backend import is_admin
+from desktop.conf import has_connectors
+from desktop.context_processors import get_app_name
+from desktop.lib.django_util import JsonResponse, render
+from desktop.lib.exceptions_renderable import PopupException
 from desktop.lib.view_util import location_to_url
-from metadata.conf import has_optimizer, has_catalog, get_optimizer_url, get_catalog_url
-from notebook.connectors.base import Notebook, QueryError
-from notebook.models import make_notebook
-
+from desktop.models import Document2, _get_apps, get_cluster_config
+from metadata.conf import get_catalog_url, get_optimizer_url, has_catalog, has_optimizer
 from metastore.conf import FORCE_HS2_METADATA
-from metastore.forms import LoadDataForm, DbForm
+from metastore.forms import DbForm, LoadDataForm
 from metastore.settings import DJANGO_APPS
-
-from desktop.auth.backend import is_admin
-
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
-
+from notebook.connectors.base import Notebook, QueryError
+from notebook.models import make_notebook
 
 LOG = logging.getLogger()
 SAVE_RESULTS_CTAS_TIMEOUT = 300         # seconds
@@ -84,6 +76,7 @@ def index(request):
 Database Views
 """
 
+
 def databases(request):
   search_filter = request.GET.get('filter', '')
 
@@ -235,13 +228,15 @@ def table_queries(request, database, table):
 """
 Table Views
 """
+
+
 def show_tables(request, database=None):
   cluster = _find_cluster(request)
 
   db = _get_db(user=request.user, cluster=cluster)
 
   if database is None:
-    database = 'default' # Assume always 'default'
+    database = 'default'  # Assume always 'default'
 
   if request.GET.get("format", "html") == "json":
     try:
@@ -259,7 +254,7 @@ def show_tables(request, database=None):
 
       search_filter = request.GET.get('filter', '')
 
-      tables = db.get_tables_meta(database=database, table_names=search_filter) # SparkSql returns []
+      tables = db.get_tables_meta(database=database, table_names=search_filter)  # SparkSql returns []
       table_names = [table['name'] for table in tables]
     except Exception as e:
       raise PopupException(_('Failed to retrieve tables for database: %s' % database), detail=e)
@@ -304,7 +299,7 @@ def get_table_metadata(request, database, table):
       'hdfs_link': table_metadata.hdfs_link,
       'is_view': table_metadata.is_view
     }
-  except:
+  except Exception:
     msg = "Cannot get metadata for table: `%s`.`%s`"
     LOG.exception(msg) % (database, table)
     response['status'] = 1
@@ -347,7 +342,7 @@ def describe_table(request, database, table):
     if app_name != 'impala' and table.partition_keys:
       try:
         partitions = [_massage_partition(database, table, partition) for partition in db.get_partitions(database, table)]
-      except:
+      except Exception:
         LOG.exception('Table partitions could not be retrieved')
 
     return render(renderable, request, {
@@ -522,6 +517,7 @@ def read_table(request, database, table):
   except Exception as e:
     raise PopupException(_('Cannot read table'), detail=e)
 
+
 @check_has_write_access_permission
 def load_table(request, database, table):
   response = {'status': -1, 'data': 'None'}
@@ -552,10 +548,7 @@ def load_table(request, database, table):
         }
         query_history = db.load_data(database, table.name, form_data, design, generate_ddl_only=generate_ddl_only)
         if generate_ddl_only:
-          if sys.version_info[0] > 2:
-            last_executed = json.loads(request.POST.get('start_time'))
-          else:
-            last_executed = json.loads(request.POST.get('start_time'), '-1')
+          last_executed = json.loads(request.POST.get('start_time'))
           job = make_notebook(
             name=_('Load data in %s.%s') % (database, table.name),
             editor_type=source_type,
@@ -592,10 +585,8 @@ def load_table(request, database, table):
            'database': database,
            'app_name': 'beeswax'
        }, force_template=True).content
-    if sys.version_info[0] > 2:
-      response['data'] = popup.decode()
-    else:
-      response['data'] = popup
+
+    response['data'] = popup.decode()
 
   return JsonResponse(response)
 
@@ -622,7 +613,7 @@ def describe_partitions(request, database, table):
 
   try:
     partitions = db.get_partitions(database, table_obj, partition_spec, reverse_sort=reverse_sort)
-  except:
+  except Exception:
     LOG.exception('Table partitions could not be retrieved')
     partitions = []
   massaged_partitions = [_massage_partition(database, table_obj, partition) for partition in partitions]
@@ -763,7 +754,6 @@ def has_write_access(user):
   return is_admin(user) or user.has_hue_permission(action="write", app=DJANGO_APPS[0])
 
 
-
 def _get_db(user, source_type=None, cluster=None):
   if source_type is None:
     cluster_config = get_cluster_config(user)
@@ -786,6 +776,7 @@ def _find_cluster(request):
   cluster = find_compute(cluster=cluster, user=request.user, namespace_id=namespace_id, dialect=source_type)
   return cluster
 
+
 def _get_servername(db):
   if has_connectors():
     return db.client.query_server['server_name']

+ 10 - 12
apps/oozie/src/oozie/conf.py

@@ -15,21 +15,16 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import os.path
 import sys
+import os.path
+
+from django.utils.translation import gettext as _, gettext_lazy as _t
 
-from desktop.lib.conf import Config, coerce_bool
 from desktop.lib import paths
+from desktop.lib.conf import Config, coerce_bool
 from liboozie.conf import get_oozie_status
-
 from oozie.settings import NICE_NAME
 
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _, gettext_lazy as _t
-else:
-  from django.utils.translation import ugettext as _, ugettext_lazy as _t
-
-
 DEFINITION_XSLT_DIR = Config(
   key="definition_xslt_dir",
   default=os.path.join(os.path.dirname(__file__), "importlib", "xslt"),
@@ -61,7 +56,9 @@ LOCAL_SAMPLE_DATA_DIR = Config(
 REMOTE_SAMPLE_DIR = Config(
   key="remote_data_dir",
   default="/user/hue/oozie/workspaces",
-  help=_t("Location on HDFS where the Oozie workflows are stored. Parameters are $TIME and $USER, e.g. /user/$USER/hue/workspaces/workflow-$TIME")
+  help=_t(
+    "Location on HDFS where the Oozie workflows are stored. Parameters are $TIME and $USER, e.g. /user/$USER/hue/workspaces/workflow-$TIME"
+  ),
 )
 
 
@@ -69,6 +66,7 @@ def get_oozie_job_count():
   '''Returns the maximum of jobs fetched by the API depending on the Hue version'''
   return 100
 
+
 OOZIE_JOBS_COUNT = Config(
   key='oozie_jobs_count',
   dynamic_default=get_oozie_job_count,
@@ -76,14 +74,14 @@ OOZIE_JOBS_COUNT = Config(
   help=_t('Maximum number of Oozie workflows or coodinators or bundles to retrieve in one API call.')
 )
 
-ENABLE_V2 = Config( # Until Hue 4
+ENABLE_V2 = Config(  # Until Hue 4
   key='enable_v2',
   default=True,
   type=coerce_bool,
   help=_t('Use version 2 of Editor.')
 )
 
-ENABLE_CRON_SCHEDULING = Config( # Until Hue 3.8
+ENABLE_CRON_SCHEDULING = Config(  # Until Hue 3.8
   key='enable_cron_scheduling',
   default=True,
   type=coerce_bool,

+ 5 - 12
apps/oozie/src/oozie/decorators.py

@@ -15,25 +15,18 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import sys
 import json
 import logging
-import sys
 
 from django.utils.functional import wraps
+from django.utils.translation import gettext as _
 
+from desktop.auth.backend import is_admin
 from desktop.conf import USE_NEW_EDITOR
 from desktop.lib.exceptions_renderable import PopupException
 from desktop.models import Document, Document2
-
-from oozie.models import Job, Node, Dataset
-
-from desktop.auth.backend import is_admin
-
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
-
+from oozie.models import Dataset, Job, Node
 
 LOG = logging.getLogger()
 
@@ -121,7 +114,7 @@ def check_editor_access_permission(view_func):
   return wraps(view_func)(decorate)
 
 
-## Oozie v1 below
+# Oozie v1 below
 
 
 def check_job_access_permission(exception_class=PopupException):

+ 35 - 16
apps/oozie/src/oozie/forms.py

@@ -15,30 +15,45 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from builtins import object
+import sys
 import logging
+from builtins import object
 from datetime import datetime, timedelta
-import sys
+from functools import partial
 from time import mktime, struct_time
 
 from django import forms
 from django.core.exceptions import ValidationError
 from django.forms.widgets import TextInput
-from functools import partial
+from django.utils.translation import gettext_lazy as _t
 
 from desktop.lib.django_forms import MultiForm, SplitDateTimeWidget
 from desktop.models import Document
-
 from oozie.conf import ENABLE_CRON_SCHEDULING
-from oozie.models import Workflow, Node, Java, Mapreduce, Streaming, Coordinator,\
-  Dataset, DataInput, DataOutput, Pig, Link, Hive, Sqoop, Ssh, Shell, DistCp, Fs,\
-  Email, SubWorkflow, Generic, Bundle, BundledCoordinator
-
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext_lazy as _t
-else:
-  from django.utils.translation import ugettext_lazy as _t
-
+from oozie.models import (
+  Bundle,
+  BundledCoordinator,
+  Coordinator,
+  DataInput,
+  DataOutput,
+  Dataset,
+  DistCp,
+  Email,
+  Fs,
+  Generic,
+  Hive,
+  Java,
+  Link,
+  Mapreduce,
+  Node,
+  Pig,
+  Shell,
+  Sqoop,
+  Ssh,
+  Streaming,
+  SubWorkflow,
+  Workflow,
+)
 
 LOG = logging.getLogger()
 
@@ -78,6 +93,7 @@ class ParameterForm(forms.Form):
     params = filter(lambda key: key in ParameterForm.NON_PARAMETERS, conf_dict.keys())
     return [{'name': name, 'value': conf_dict[name]} for name in params]
 
+
 class WorkflowForm(forms.ModelForm):
   class Meta(object):
     model = Workflow
@@ -97,6 +113,7 @@ class WorkflowForm(forms.ModelForm):
 
 SCHEMA_VERSION_CHOICES = ['0.4']
 
+
 class ImportWorkflowForm(WorkflowForm):
   definition_file = forms.FileField(label=_t("Local workflow.xml file"))
   resource_archive = forms.FileField(label=_t("Workflow resource archive (zip)"), required=False)
@@ -122,6 +139,7 @@ class NodeMetaForm(forms.ModelForm):
     model = Node
     exclude = ALWAYS_HIDE + ('name', 'description')
 
+
 class JavaForm(forms.ModelForm):
   class Meta(object):
     model = Java
@@ -285,6 +303,7 @@ class EmailForm(forms.ModelForm):
       'body': forms.Textarea(attrs={'class': 'span8'}),
     }
 
+
 class SubWorkflowForm(forms.ModelForm):
 
   def __init__(self, *args, **kwargs):
@@ -304,7 +323,7 @@ class SubWorkflowForm(forms.ModelForm):
   def clean_sub_workflow(self):
     try:
       return Workflow.objects.get(id=int(self.cleaned_data.get('sub_workflow')))
-    except:
+    except Exception:
       LOG.exception('The sub-workflow could not be found.')
       return None
 
@@ -565,6 +584,7 @@ class BundleForm(forms.ModelForm):
       'schema_version': forms.widgets.HiddenInput(),
     }
 
+
 class UpdateCoordinatorForm(forms.Form):
   endTime = forms.SplitDateTimeField(
     label='End Time', input_time_formats=[TIME_FORMAT], required=False, initial=datetime.today() + timedelta(days=3),
@@ -586,12 +606,11 @@ class UpdateCoordinatorForm(forms.Form):
     super(UpdateCoordinatorForm, self).__init__(*args, **kwargs)
 
     self.fields['endTime'].initial = datetime.fromtimestamp(mktime(oozie_coordinator.endTime))
-    if type(oozie_coordinator.pauseTime) == struct_time:
+    if type(oozie_coordinator.pauseTime) is struct_time:
       self.fields['pauseTime'].initial = datetime.fromtimestamp(mktime(oozie_coordinator.pauseTime))
     self.fields['concurrency'].initial = oozie_coordinator.concurrency
 
 
-
 def design_form_by_type(node_type, user, workflow):
   klass_form = _node_type_TO_FORM_CLS[node_type]
 

+ 6 - 10
apps/oozie/src/oozie/importlib/coordinators.py

@@ -15,24 +15,19 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import json
-import logging
 import os
-from lxml import etree
 import sys
+import json
+import logging
 
 from django.core import serializers
+from django.utils.translation import gettext as _
+from lxml import etree
 
 from oozie import conf
-from oozie.models import Workflow, Dataset, DataInput, DataOutput
+from oozie.models import DataInput, DataOutput, Dataset, Workflow
 from oozie.utils import oozie_to_django_datetime, oozie_to_hue_frequency
 
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
-
-
 LOG = logging.getLogger()
 
 OOZIE_NAMESPACES = ['uri:oozie:coordinator:0.1', 'uri:oozie:coordinator:0.2', 'uri:oozie:coordinator:0.3', 'uri:oozie:coordinator:0.4']
@@ -127,6 +122,7 @@ def _reconcile_datasets(coordinator, objects, root, namespace):
     dataoutput.save()
   # @TODO(abe): reconcile instance times
 
+
 def _set_properties(coordinator, root, namespace):
   namespaces = {
     'n': namespace

+ 22 - 32
apps/oozie/src/oozie/importlib/workflows.py

@@ -31,29 +31,22 @@ Action extensions are also versioned.
 Every action extension will have its own version via /xslt/<workflow version>/extensions/<name of extensions>.<version>.xslt
 """
 
-from builtins import str
-from past.builtins import basestring
-import json
-import logging
-from lxml import etree
 import os
 import re
 import sys
+import json
+import logging
+from builtins import str
 
 from django.core import serializers
 from django.utils.encoding import smart_str
+from django.utils.translation import gettext as _
+from lxml import etree
+from past.builtins import basestring
 
 from desktop.models import Document
-
-from oozie.conf import DEFINITION_XSLT_DIR, DEFINITION_XSLT2_DIR
-from oozie.models import Workflow, Node, Link, Start, End,\
-                         Decision, DecisionEnd, Fork, Join,\
-                         Kill
-
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
+from oozie.conf import DEFINITION_XSLT2_DIR, DEFINITION_XSLT_DIR
+from oozie.models import Decision, DecisionEnd, End, Fork, Join, Kill, Link, Node, Start, Workflow
 
 LOG = logging.getLogger()
 
@@ -552,7 +545,7 @@ def _preprocess_nodes(workflow, transformed_root, workflow_definition_root, node
     if 'cred' in action_el.attrib:
       for full_node in nodes:
         if full_node.name == action_el.attrib['name']:
-          full_node.credentials = [{"name": cred, "value": True} for cred in action_el.attrib['cred'].split(',')];
+          full_node.credentials = [{"name": cred, "value": True} for cred in action_el.attrib['cred'].split(',')]
 
   for full_node in nodes:
     if full_node.node_type == 'start':
@@ -703,13 +696,12 @@ def import_workflow_root(workflow, workflow_definition_root, metadata=None, fs=N
 
 def import_workflow(workflow, workflow_definition, metadata=None, fs=None):
   # Parse Workflow Definition
-  if sys.version_info[0] > 2:
-    # In Py3 anything like <?xml version="1.0" encoding="UTF-8"?> at the beginning
-    # of a workflow XML cannot be parsed via etree.fromstring(), since the
-    # workflow_definition string needs to be encoded.
-    workflow_definition_root = etree.XML(workflow_definition.encode())
-  else:
-    workflow_definition_root = etree.fromstring(workflow_definition)
+
+  # In Py3 anything like <?xml version="1.0" encoding="UTF-8"?> at the beginning
+  # of a workflow XML cannot be parsed via etree.fromstring(), since the
+  # workflow_definition string needs to be encoded.
+  workflow_definition_root = etree.XML(workflow_definition.encode())
+
   if workflow_definition_root is None:
     raise RuntimeError(
       _("Could not find any nodes in Workflow definition. Maybe it's malformed?"))
@@ -719,13 +711,12 @@ def import_workflow(workflow, workflow_definition, metadata=None, fs=None):
 
 def generate_v2_graph_nodes(workflow_definition):
   # Parse Workflow Definition
-  if sys.version_info[0] > 2:
-    # In Py3 anything like <?xml version="1.0" encoding="UTF-8"?> at the beginning
-    # of a workflow XML cannot be parsed via etree.fromstring(), since the
-    # workflow_definition string needs to be encoded.
-    workflow_definition_root = etree.XML(workflow_definition.encode())
-  else:
-    workflow_definition_root = etree.fromstring(workflow_definition)
+
+  # In Py3 anything like <?xml version="1.0" encoding="UTF-8"?> at the beginning
+  # of a workflow XML cannot be parsed via etree.fromstring(), since the
+  # workflow_definition string needs to be encoded.
+  workflow_definition_root = etree.XML(workflow_definition.encode())
+
   if workflow_definition_root is None:
     raise MalformedWfDefException()
 
@@ -748,13 +739,12 @@ def generate_v2_graph_nodes(workflow_definition):
 
   # Transform XML using XSLT
   transformed_root = transform(workflow_definition_root)
-  node_list = re.sub('[\s]', '', str(transformed_root))
+  node_list = re.sub(r'[\s]', '', str(transformed_root))
   node_list = json.loads(node_list)
 
   return [node for node in node_list if node]
 
 
-
 class MalformedWfDefException(Exception):
   pass
 

+ 11 - 26
apps/oozie/src/oozie/management/commands/oozie_setup.py

@@ -15,35 +15,28 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import json
-import logging
 import os
-from lxml import etree
 import sys
+import json
+import logging
 
 from django.core import management
 from django.core.management.base import BaseCommand
 from django.db import transaction
+from django.utils.translation import gettext as _
+from lxml import etree
 
 from desktop.conf import USE_NEW_EDITOR
 from desktop.models import Directory, Document, Document2, Document2Permission
 from hadoop import cluster
 from liboozie.submittion import create_directories
 from notebook.models import make_notebook
-
-from useradmin.models import get_default_user_group, install_sample_user
-
-from oozie.conf import LOCAL_SAMPLE_DATA_DIR, LOCAL_SAMPLE_DIR, REMOTE_SAMPLE_DIR, ENABLE_V2
-from oozie.models import Workflow, Coordinator, Bundle
-from oozie.importlib.workflows import import_workflow_root
-from oozie.importlib.coordinators import import_coordinator_root
+from oozie.conf import ENABLE_V2, LOCAL_SAMPLE_DATA_DIR, LOCAL_SAMPLE_DIR, REMOTE_SAMPLE_DIR
 from oozie.importlib.bundles import import_bundle_root
-
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
-
+from oozie.importlib.coordinators import import_coordinator_root
+from oozie.importlib.workflows import import_workflow_root
+from oozie.models import Bundle, Coordinator, Workflow
+from useradmin.models import get_default_user_group, install_sample_user
 
 LOG = logging.getLogger()
 
@@ -71,8 +64,7 @@ class Command(BaseCommand):
           workflow.save()
           Workflow.objects.initialize(workflow)
           import_workflow_root(workflow=workflow, workflow_definition_root=workflow_root, metadata=metadata, fs=self.fs)
-          workflow.doc.all().delete() # Delete doc as it messes up the example sharing
-
+          workflow.doc.all().delete()  # Delete doc as it messes up the example sharing
 
   def _import_coordinators(self, directory):
 
@@ -93,7 +85,6 @@ class Command(BaseCommand):
           coordinator.save()
           import_coordinator_root(coordinator=coordinator, coordinator_definition_root=coordinator_root, metadata=metadata)
 
-
   def _import_bundles(self, directory):
 
     for example_directory_name in os.listdir(directory):
@@ -114,7 +105,6 @@ class Command(BaseCommand):
           bundle.save()
           import_bundle_root(bundle=bundle, bundle_definition_root=bundle_root, metadata=metadata)
 
-
   def _install_mapreduce_example(self):
     doc2 = None
     name = _('MapReduce Sleep Job')
@@ -275,7 +265,6 @@ class Command(BaseCommand):
 
     return doc2
 
-
   def _install_pyspark_example(self):
     doc2 = None
     name = _('PySpark Pi Estimator Job')
@@ -332,7 +321,6 @@ class Command(BaseCommand):
     unmanaged_dir = os.path.join(data_dir, 'unmanaged')
     self._import_workflows(unmanaged_dir, managed=False)
 
-
   def handle(self, *args, **options):
     self.user = install_sample_user()
     self.fs = cluster.get_hdfs()
@@ -369,10 +357,7 @@ class Command(BaseCommand):
 
     if ENABLE_V2.get():
       with transaction.atomic():
-        if sys.version_info[0] > 2:
-          management.call_command('loaddata', 'initial_oozie_examples.json', verbosity=2)
-        else:
-          management.call_command('loaddata', 'initial_oozie_examples.json', verbosity=2, commit=False)
+        management.call_command('loaddata', 'initial_oozie_examples.json', verbosity=2)
 
     # Install editor oozie examples without doc1 link
     LOG.info("Using Hue 4, will install oozie editor samples.")

+ 47 - 60
apps/oozie/src/oozie/models.py

@@ -15,63 +15,49 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from future import standard_library
-standard_library.install_aliases()
-from builtins import range
-from past.builtins import basestring
-from builtins import object
-import json
-import copy
-import logging
 import re
 import sys
+import copy
+import json
 import time
+import logging
 import zipfile
-
+from builtins import object, range
 from datetime import datetime, timedelta
-from string import Template
+from io import BytesIO as string_io
 from itertools import chain
+from string import Template
 
-from django.db import models, transaction
-from django.db.models import Q
-from django.urls import reverse
-from django.core.validators import RegexValidator
+import django.utils.timezone as dtz
 from django.contrib.contenttypes.fields import GenericRelation
 from django.contrib.contenttypes.models import ContentType
+from django.core.validators import RegexValidator
+from django.db import models, transaction
+from django.db.models import Q
 from django.forms.models import inlineformset_factory
-from django.utils.encoding import smart_str
-import django.utils.timezone as dtz
+from django.urls import reverse
+from django.utils.encoding import force_str, smart_str
+from django.utils.translation import gettext as _, gettext_lazy as _t
+from past.builtins import basestring
 
 from desktop.auth.backend import is_admin
-from desktop.log.access import access_warn
 from desktop.lib import django_mako
 from desktop.lib.exceptions_renderable import PopupException
 from desktop.lib.json_utils import JSONEncoderForHTML
+from desktop.log.access import access_warn
 from desktop.models import Document
 from hadoop.fs.exceptions import WebHdfsException
 from hadoop.fs.hadoopfs import Hdfs
-from liboozie.submittion import Submission
-from liboozie.submittion import create_directories
-from useradmin.models import User
-
+from liboozie.submittion import Submission, create_directories
 from oozie.conf import REMOTE_SAMPLE_DIR
-from oozie.utils import utc_datetime_format
 from oozie.timezones import TIMEZONES
-
-if sys.version_info[0] > 2:
-  from io import BytesIO as string_io
-  from django.utils.encoding import force_str
-  from django.utils.translation import gettext as _, gettext_lazy as _t
-else:
-  from cStringIO import StringIO as string_io
-  from django.utils.encoding import force_unicode as force_str
-  from django.utils.translation import ugettext as _, ugettext_lazy as _t
-
+from oozie.utils import utc_datetime_format
+from useradmin.models import User
 
 LOG = logging.getLogger()
 
 PATH_MAX = 512
-name_validator = RegexValidator(regex='^[a-zA-Z_][\-_a-zA-Z0-9]{1,39}$',
+name_validator = RegexValidator(regex=r'^[a-zA-Z_][\-_a-zA-Z0-9]{1,39}$',
                                 message=_('Enter a valid value: combination of 2 - 40 letters and digits starting by a letter'))
 # To sync in worklow.models.js
 DEFAULT_SLA = [
@@ -127,10 +113,10 @@ class Job(models.Model):
     db_index=True,
     verbose_name=_t('Owner'),
     help_text=_t('Person who can modify the job.')
-  ) # Deprecated
-  name = models.CharField(max_length=255, blank=False, validators=[name_validator], # Deprecated
+  )  # Deprecated
+  name = models.CharField(max_length=255, blank=False, validators=[name_validator],  # Deprecated
       help_text=_t('Name of the job, which must be unique per user.'), verbose_name=_t('Name'))
-  description = models.CharField(max_length=1024, blank=True, verbose_name=_t('Description'), # Deprecated
+  description = models.CharField(max_length=1024, blank=True, verbose_name=_t('Description'),  # Deprecated
                                  help_text=_t('The purpose of the job.'))
   last_modified = models.DateTimeField(auto_now=True, db_index=True, verbose_name=_t('Last modified'))
   schema_version = models.CharField(max_length=128, verbose_name=_t('Schema version'),
@@ -138,11 +124,11 @@ class Job(models.Model):
   deployment_dir = models.CharField(max_length=1024, blank=True, verbose_name=_t('HDFS deployment directory'),
                                     help_text=_t('The path on the HDFS where all the workflows and '
                                                 'dependencies must be uploaded.'))
-  is_shared = models.BooleanField(default=False, db_index=True, verbose_name=_t('Is shared'), # Deprecated
+  is_shared = models.BooleanField(default=False, db_index=True, verbose_name=_t('Is shared'),  # Deprecated
                                   help_text=_t('Enable other users to have access to this job.'))
   parameters = models.TextField(default='[{"name":"oozie.use.system.libpath","value":"true"}]', verbose_name=_t('Oozie parameters'),
                                 help_text=_t('Parameters used at the submission time (e.g. market=US, oozie.use.system.libpath=true).'))
-  is_trashed = models.BooleanField(default=False, db_index=True, verbose_name=_t('Is trashed'), blank=True, # Deprecated
+  is_trashed = models.BooleanField(default=False, db_index=True, verbose_name=_t('Is trashed'), blank=True,  # Deprecated
                                    help_text=_t('If this job is trashed.'))
   doc = GenericRelation(Document, related_query_name='oozie_doc')
   data = models.TextField(blank=True, default=json.dumps({}))  # e.g. data=json.dumps({'sla': [python data], ...})
@@ -231,7 +217,7 @@ class Job(models.Model):
     for param in self.get_parameters():
       params[param['name'].strip()] = param['value']
 
-    return  [{'name': name, 'value': value} for name, value in params.items()]
+    return [{'name': name, 'value': value} for name, value in params.items()]
 
   def can_read(self, user):
     try:
@@ -271,7 +257,7 @@ class Job(models.Model):
 
   @property
   def sla_enabled(self):
-    return self.sla[0]['value'] # #1 is enabled
+    return self.sla[0]['value']  # #1 is enabled
 
 
 class WorkflowManager(models.Manager):
@@ -331,8 +317,8 @@ class WorkflowManager(models.Manager):
   def destroy(self, workflow, fs):
     Submission(workflow.owner, workflow, fs, None, {}).remove_deployment_dir()
     try:
-      workflow.coordinator_set.update(workflow=None) # In Django 1.3 could do ON DELETE set NULL
-    except:
+      workflow.coordinator_set.update(workflow=None)  # In Django 1.3 could do ON DELETE set NULL
+    except Exception:
       LOG.exception('failed to destroy workflow')
 
     workflow.save()
@@ -371,7 +357,7 @@ class Workflow(Job):
     return json.loads(self.job_properties)
 
   def clone(self, fs, new_owner=None):
-    source_deployment_dir = self.deployment_dir # Needed
+    source_deployment_dir = self.deployment_dir  # Needed
     nodes = self.node_set.all()
     links = Link.objects.filter(parent__workflow=self)
 
@@ -458,7 +444,7 @@ class Workflow(Job):
 
     graph_edges = set([edge for node in self.node_set.all() for edge in node.get_children_links()])
 
-    return len(graph_edges - removed_edges) > 0 # Graph does not have unseen edges
+    return len(graph_edges - removed_edges) > 0  # Graph does not have unseen edges
 
   def find_parameters(self):
     params = set()
@@ -482,7 +468,7 @@ class Workflow(Job):
     """Return a flatten node list ordered by the hierarchy of the nodes in the workflow"""
     def flatten(nodes):
       flat = []
-      if type(nodes) == list:
+      if type(nodes) is list:
         for node in nodes:
           flat.extend(flatten(node))
       else:
@@ -512,7 +498,7 @@ class Workflow(Job):
       return reverse('oozie:edit_workflow', kwargs={'workflow': self.id}) + '#editWorkflow'
 
   def get_hierarchy(self):
-    node = Start.objects.get(workflow=self) # Uncached version of start.
+    node = Start.objects.get(workflow=self)  # Uncached version of start.
     kill = Kill.objects.get(workflow=node.workflow)
     # Special case: manage error email actions separately
     try:
@@ -531,7 +517,7 @@ class Workflow(Job):
     parents = node.get_parents()
 
     if isinstance(node, End):
-      return [] # Not returning the end node
+      return []  # Not returning the end node
     elif isinstance(node, Decision):
       children = node.get_children('start')
       return [[node] + [[self.get_hierarchy_rec(node=child) for child in children],
@@ -567,7 +553,7 @@ class Workflow(Job):
 
   @classmethod
   def gen_status_graph_from_xml(cls, user, oozie_workflow):
-    from oozie.importlib.workflows import import_workflow # Circular dependency
+    from oozie.importlib.workflows import import_workflow  # Circular dependency
 
     try:
       with transaction.atomic():
@@ -588,7 +574,7 @@ class Workflow(Job):
     if mapping is None:
       mapping = {}
     tmpl = 'editor/gen/workflow.xml.mako'
-    xml = re.sub(re.compile('\s*\n+', re.MULTILINE), '\n', django_mako.render_to_string(tmpl, {'workflow': self, 'mapping': mapping}))
+    xml = re.sub(re.compile('\\s*\n+', re.MULTILINE), '\n', django_mako.render_to_string(tmpl, {'workflow': self, 'mapping': mapping}))
     return force_str(xml)
 
   def compress(self, mapping=None, fp=string_io()):
@@ -632,7 +618,7 @@ class Workflow(Job):
   @property
   def credentials(self):
     sub_lists = [node.credentials for node in self.node_list if hasattr(node, 'credentials')]
-    return set([item['name'] for l in sub_lists for item in l if item['value']])
+    return set([item['name'] for sub in sub_lists for item in sub if item['value']])
 
 
 class Link(models.Model):
@@ -835,7 +821,7 @@ class Node(models.Model):
 
   @property
   def sla_enabled(self):
-    return self.sla[0]['value'] # #1 is enabled
+    return self.sla[0]['value']  # #1 is enabled
 
   @property
   def credentials(self):
@@ -1033,7 +1019,7 @@ class Hive(Action):
   params = models.TextField(
     default="[]",
     verbose_name=_t('Parameters'),
-    help_text=_t('The %(type)s parameters of the script. E.g. N=5, INPUT=${inputDir}')  % {'type': node_type.title()}
+    help_text=_t('The %(type)s parameters of the script. E.g. N=5, INPUT=${inputDir}') % {'type': node_type.title()}
   )
   files = models.TextField(default="[]", verbose_name=_t('Files'),
       help_text=_t('List of names or paths of files to be added to the distributed cache and the task running directory.'))
@@ -1126,7 +1112,7 @@ class Ssh(Action):
   command = models.CharField(max_length=256, verbose_name=_t('%(type)s command') % {'type': node_type.title()},
                              help_text=_t('The command that will be executed.'))
   params = models.TextField(default="[]", verbose_name=_t('Arguments'),
-                            help_text=_t('The arguments of the %(type)s command.')  % {'type': node_type.title()})
+                            help_text=_t('The arguments of the %(type)s command.') % {'type': node_type.title()})
   capture_output = models.BooleanField(
     default=False,
     verbose_name=_t('Capture output'),
@@ -1206,7 +1192,6 @@ class DistCp(Action):
                                           'Properties specified in the Job Properties element override properties specified in the '
                                           'files specified in the Job XML element.'))
 
-
   def get_properties(self):
     return json.loads(self.job_properties)
 
@@ -1239,7 +1224,6 @@ class Fs(Action):
   touchzs = models.TextField(default="[]", verbose_name=_t('Create or touch a file'), blank=True,
                             help_text=_t('Creates a zero length file in the specified path if none exists or touch it.'))
 
-
   def get_deletes(self):
     return json.loads(self.deletes)
 
@@ -1467,9 +1451,9 @@ DATASET_FREQUENCY = ['MINUTE', 'HOUR', 'DAY', 'MONTH', 'YEAR']
 class Coordinator(Job):
   frequency_number = models.SmallIntegerField(default=1, choices=FREQUENCY_NUMBERS, verbose_name=_t('Frequency number'),
                                               help_text=_t('The number of units of the rate at which '
-                                                           'data is periodically created.')) # unused
+                                                           'data is periodically created.'))  # unused
   frequency_unit = models.CharField(max_length=20, choices=FREQUENCY_UNITS, default='days', verbose_name=_t('Frequency unit'),
-                                    help_text=_t('The unit of the rate at which data is periodically created.')) # unused
+                                    help_text=_t('The unit of the rate at which data is periodically created.'))  # unused
   timezone = models.CharField(
     max_length=32,
     choices=TIMEZONES,
@@ -1532,7 +1516,7 @@ class Coordinator(Job):
     if mapping is None:
       mapping = {}
     tmpl = "editor/gen/coordinator.xml.mako"
-    return re.sub(re.compile('\s*\n+', re.MULTILINE), '\n',
+    return re.sub(re.compile('\\s*\n+', re.MULTILINE), '\n',
       django_mako.render_to_string(tmpl, {'coord': self, 'mapping': mapping})).encode('utf-8', 'xmlcharrefreplace')
 
   def clone(self, new_owner=None):
@@ -1601,7 +1585,7 @@ class Coordinator(Job):
     index = [prop['name'] for prop in props]
 
     for prop in self.coordinatorworkflow.get_parameters():
-      if not prop['name'] in index:
+      if prop['name'] not in index:
         props.append(prop)
         index.append(prop['name'])
 
@@ -1726,6 +1710,7 @@ class Coordinator(Job):
     if sys.version_info[0] < 3:
       manager_inheritance_from_future = True
 
+
 class DatasetManager(models.Manager):
   def can_read_or_exception(self, request, dataset_id):
     if dataset_id is None:
@@ -1897,7 +1882,7 @@ class Bundle(Job):
     tmpl = "editor/gen/bundle.xml.mako"
 
     return force_str(
-              re.sub(re.compile('\s*\n+', re.MULTILINE), '\n', django_mako.render_to_string(tmpl, {
+              re.sub(re.compile('\\s*\n+', re.MULTILINE), '\n', django_mako.render_to_string(tmpl, {
                 'bundle': self,
                 'mapping': mapping
            })))
@@ -1992,6 +1977,7 @@ class Bundle(Job):
     if sys.version_info[0] < 3:
       manager_inheritance_from_future = True
 
+
 class HistoryManager(models.Manager):
   def create_from_submission(self, submission):
     History.objects.create(submitter=submission.user,
@@ -2099,6 +2085,7 @@ def find_parameters(instance, fields=None):
 
   return params
 
+
 def find_json_parameters(fields):
   # To make smarter
   # Input is list of json dict

+ 70 - 83
apps/oozie/src/oozie/models2.py

@@ -16,54 +16,43 @@
 # limitations under the License.
 
 from __future__ import division
-from builtins import str
-from past.builtins import basestring
-from builtins import object
-import json
-import logging
-import math
+
 import os
 import re
 import sys
+import json
+import math
 import time
 import uuid
-
+import logging
+from builtins import object, str
 from datetime import datetime, timedelta
-from dateutil.parser import parse
 from string import Template
 from xml.sax.saxutils import escape
 
-from django.urls import reverse
+from dateutil.parser import parse
 from django.db.models import Q
+from django.urls import reverse
+from django.utils.encoding import force_str
+from django.utils.translation import gettext as _
+from past.builtins import basestring
 
 from azure.abfs.__init__ import abfspath
-
 from desktop.conf import USE_DEFAULT_CONFIGURATION
 from desktop.lib import django_mako
 from desktop.lib.exceptions_renderable import PopupException
 from desktop.lib.i18n import smart_str
 from desktop.lib.json_utils import JSONEncoderForHTML
-from desktop.models import DefaultConfiguration, Document2, Document
-
-from hadoop.fs.hadoopfs import Hdfs
+from desktop.models import DefaultConfiguration, Document, Document2
 from hadoop.fs.exceptions import WebHdfsException
-
+from hadoop.fs.hadoopfs import Hdfs
 from liboozie.conf import SECURITY_ENABLED
 from liboozie.oozie_api import get_oozie
-from liboozie.submission2 import Submission
-from liboozie.submission2 import create_directories
+from liboozie.submission2 import Submission, create_directories
 from notebook.models import Notebook
-
 from oozie.conf import REMOTE_SAMPLE_DIR
-from oozie.utils import utc_datetime_format, UTC_TIME_FORMAT, convert_to_server_timezone
-from oozie.importlib.workflows import generate_v2_graph_nodes, MalformedWfDefException, InvalidTagWithNamespaceException
-
-if sys.version_info[0] > 2:
-  from django.utils.encoding import force_str
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.encoding import force_unicode as force_str
-  from django.utils.translation import ugettext as _
+from oozie.importlib.workflows import InvalidTagWithNamespaceException, MalformedWfDefException, generate_v2_graph_nodes
+from oozie.utils import UTC_TIME_FORMAT, convert_to_server_timezone, utc_datetime_format
 
 WORKFLOW_DEPTH_LIMIT = 24
 LOG = logging.getLogger()
@@ -80,7 +69,7 @@ class Job(object):
     if params.get('nominal_time') == '':
       params['nominal_time'] = datetime.today().strftime(UTC_TIME_FORMAT)
 
-    return  [{'name': name, 'value': value} for name, value in params.items() if with_lib_path or name != 'oozie.use.system.libpath']
+    return [{'name': name, 'value': value} for name, value in params.items() if with_lib_path or name != 'oozie.use.system.libpath']
 
   @classmethod
   def get_workspace(cls, user):
@@ -296,7 +285,7 @@ class Workflow(Job):
     try:
       _get_hierarchy_from_adj_list(adj_list, adj_list['start']['ok_to'], node_hierarchy)
     except WorkflowDepthReached:
-      LOG.warning("The Workflow: %s with id: %s, has reached the maximum allowed depth for Graph display " \
+      LOG.warning("The Workflow: %s with id: %s, has reached the maximum allowed depth for Graph display "
         % (oozie_workflow.appName, oozie_workflow.id))
       # Hide graph same as when total nodes > 30
       return {}
@@ -492,7 +481,7 @@ class Workflow(Job):
         [(workflow.uuid, Workflow(document=workflow, user=self.user)) for workflow in Document2.objects.filter(uuid__in=sub_wfs_ids)]
     )
 
-    xml = re.sub(re.compile('>\s*\n+', re.MULTILINE), '>\n', django_mako.render_to_string(tmpl, {
+    xml = re.sub(re.compile('>\\s*\n+', re.MULTILINE), '>\n', django_mako.render_to_string(tmpl, {
       'wf': self,
       'workflow': data['workflow'],
       'nodes': nodes,
@@ -555,7 +544,6 @@ class Workflow(Job):
         elif row['widgets'][0]['id'] == node_id:
           return row
 
-
     # Create wf data with above nodes
     return json.dumps({
       'layout': [{
@@ -626,7 +614,7 @@ def _update_adj_list(adj_list):
 
 def _dig_nodes(nodes, adj_list, user, wf_nodes, nodes_uuid_set):
   for node in nodes:
-    if type(node) != list:
+    if type(node) is not list:
       node = adj_list[node]
       if node['uuid'] not in nodes_uuid_set:
         properties = {}
@@ -698,9 +686,9 @@ def _dig_nodes(nodes, adj_list, user, wf_nodes, nodes_uuid_set):
 def _create_workflow_layout(nodes, adj_list, nodes_uuid_set, size=12):
   wf_rows = []
   for node in nodes:
-    if type(node) == list and len(node) == 1:
+    if type(node) is list and len(node) == 1:
       node = node[0]
-    if type(node) != list:
+    if type(node) is not list:
       _append_to_wf_rows(
         wf_rows, nodes_uuid_set, row_id=adj_list[node]['uuid'],
         row={
@@ -813,6 +801,7 @@ def _get_hierarchy_from_adj_list_helper(adj_list, curr_node, node_hierarchy, wor
     node_hierarchy.append(curr_node)
     return _get_hierarchy_from_adj_list_helper(adj_list, adj_list[curr_node]['ok_to'], node_hierarchy, workflow_depth - 1)
 
+
 def _create_graph_adjaceny_list(nodes):
   start_node = [node for node in nodes if node.get('node_type') == 'start'][0]
   adj_list = {'start': start_node}
@@ -846,11 +835,13 @@ class Node(object):
     if self.data['type'] == 'fork':
       links = [link for link in self.data['children'] if link['to'] in node_mapping]
       if len(links) != len(self.data['children']):
-        LOG.warning('Fork has some children links that do not exist, ignoring them: links %s, existing links %s, links %s, existing links %s' \
-                 % (len(links), len(self.data['children']), links, self.data['children']))
+        LOG.warning(
+          'Fork has some children links that do not exist, ignoring them: links %s, existing links %s, links %s, existing links %s'
+          % (len(links), len(self.data['children']), links, self.data['children'])
+        )
         self.data['children'] = links
 
-    if self.data['type'] == AltusAction.TYPE or (('altus' in mapping.get('cluster', '') and (self.data['type'] == SparkDocumentAction.TYPE \
+    if self.data['type'] == AltusAction.TYPE or (('altus' in mapping.get('cluster', '') and (self.data['type'] == SparkDocumentAction.TYPE
     or self.data['type'] == 'spark-document'))) or mapping.get('auto-cluster'):
       shell_command_name = self.data['name'] + '.sh'
       self.data['properties']['shell_command'] = shell_command_name
@@ -866,14 +857,14 @@ class Node(object):
       properties = notebook.get_data()['snippets'][0]['properties']
 
       self.data['properties']['main_class'] = properties['class']
-      self.data['properties']['app_jar'] = properties['app_jar'] # Not used here
+      self.data['properties']['app_jar'] = properties['app_jar']  # Not used here
       self.data['properties']['files'] = [{'value': f['path']} for f in properties['files']]
       self.data['properties']['arguments'] = [{'value': prop} for prop in properties['arguments']]
     elif self.data['type'] == SparkDocumentAction.TYPE or self.data['type'] == 'spark-document':
       notebook = Notebook(document=Document2.objects.get_by_uuid(user=self.user, uuid=self.data['properties']['uuid']))
       properties = notebook.get_data()['snippets'][0]['properties']
 
-      if self.data['type'] == 'spark-document': # Oozie Document Action
+      if self.data['type'] == 'spark-document':  # Oozie Document Action
         self.data['properties']['app_name'] = properties['app_name']
 
       self.data['properties']['class'] = properties['class']
@@ -893,7 +884,7 @@ class Node(object):
         self.data['properties']['parameters'] = []
       for param in action['variables']:
         self.data['properties']['parameters'].insert(0, {'value': '%(name)s=%(value)s' % param})
-      self.data['properties']['arguments'] = [] # Not Picked yet
+      self.data['properties']['arguments'] = []  # Not Picked yet
 
       job_properties = []
       for prop in action['properties']['hadoopProperties']:
@@ -944,7 +935,6 @@ class Node(object):
         self.data['properties']['files'].append({'value': shell_command})
         self.data['properties']['shell_command'] = Hdfs.basename(shell_command)
 
-
     elif self.data['type'] == MapReduceDocumentAction.TYPE:
       notebook = Notebook(document=Document2.objects.get_by_uuid(user=self.user, uuid=self.data['properties']['uuid']))
       action = notebook.get_data()['snippets'][0]
@@ -1090,9 +1080,9 @@ class Node(object):
       node_type = ShellAction.TYPE
     elif self.data['type'] == AltusAction.TYPE:
       node_type = ShellAction.TYPE
-    elif mapping.get('cluster') and 'document' in node_type: # Workflow
+    elif mapping.get('cluster') and 'document' in node_type:  # Workflow
       node_type = ShellAction.TYPE
-    elif mapping.get('auto-cluster') and 'document' in node_type: # Scheduled workflow
+    elif mapping.get('auto-cluster') and 'document' in node_type:  # Scheduled workflow
       node_type = ShellAction.TYPE
 
     return 'editor2/gen/workflow-%s.xml.mako' % node_type
@@ -1347,7 +1337,7 @@ class HiveAction(Action):
           'name': 'parameters',
           'label': _('Parameters'),
           'value': [],
-          'help_text': _('The %(type)s parameters of the script. E.g. N=5, INPUT=${inputDir}')  % {'type': TYPE.title()},
+          'help_text': _('The %(type)s parameters of the script. E.g. N=5, INPUT=${inputDir}') % {'type': TYPE.title()},
           'type': ''
      },
      # Common
@@ -1432,7 +1422,7 @@ class HiveServer2Action(Action):
           'name': 'parameters',
           'label': _('Parameters'),
           'value': [],
-          'help_text': _('The %(type)s parameters of the script. E.g. N=5, INPUT=${inputDir}')  % {'type': TYPE.title()},
+          'help_text': _('The %(type)s parameters of the script. E.g. N=5, INPUT=${inputDir}') % {'type': TYPE.title()},
           'type': ''
      },
      'arguments': {
@@ -1529,7 +1519,7 @@ def _get_impala_url():
 class ImpalaAction(HiveServer2Action):
   # Executed as shell action until Oozie supports an Impala Action
   TYPE = 'impala'
-  DEFAULT_CREDENTIALS = '' # None at this time, need to upload user keytab
+  DEFAULT_CREDENTIALS = ''  # None at this time, need to upload user keytab
 
   FIELDS = HiveServer2Action.FIELDS.copy()
   del FIELDS['jdbc_url']
@@ -2258,7 +2248,6 @@ class SparkAction(Action):
     return [cls.FIELDS['files'], cls.FIELDS['jars']]
 
 
-
 class AltusAction(Action):
   TYPE = 'altus'
   FIELDS = {
@@ -2441,7 +2430,7 @@ class HiveDocumentAction(Action):
 
 class ImpalaDocumentAction(HiveDocumentAction):
   TYPE = 'impala-document'
-  DEFAULT_CREDENTIALS = '' # None at this time, need to upload user keytab
+  DEFAULT_CREDENTIALS = ''  # None at this time, need to upload user keytab
 
   FIELDS = HiveServer2Action.FIELDS.copy()
   del FIELDS['jdbc_url']
@@ -2606,11 +2595,11 @@ class SparkDocumentAction(Action):
           'value': [],
           'help_text': _('Arguments, one by one, e.g. 1000, /path/a.')
      },
-     'parameters': { # For Oozie Action Document
+     'parameters': {  # For Oozie Action Document
           'name': 'parameters',
           'label': _('Parameters'),
           'value': [],
-          'help_text': _('The %(type)s parameters of the script. E.g. N=5, INPUT=${inputDir}')  % {'type': TYPE.title()},
+          'help_text': _('The %(type)s parameters of the script. E.g. N=5, INPUT=${inputDir}') % {'type': TYPE.title()},
           'type': ''
      },
      # Common
@@ -2668,7 +2657,7 @@ class PigDocumentAction(Action):
           'name': 'parameters',
           'label': _('Parameters'),
           'value': [],
-          'help_text': _('The %(type)s parameters of the script. E.g. N=5, INPUT=${inputDir}')  % {'type': TYPE.title()},
+          'help_text': _('The %(type)s parameters of the script. E.g. N=5, INPUT=${inputDir}') % {'type': TYPE.title()},
           'type': ''
      },
      # Common
@@ -2742,7 +2731,7 @@ class SqoopDocumentAction(Action):
           'name': 'parameters',
           'label': _('Parameters'),
           'value': [],
-          'help_text': _('The %(type)s parameters of the script. E.g. N=5, INPUT=${inputDir}')  % {'type': TYPE.title()},
+          'help_text': _('The %(type)s parameters of the script. E.g. N=5, INPUT=${inputDir}') % {'type': TYPE.title()},
           'type': ''
      },
      # Common
@@ -2816,7 +2805,7 @@ class DistCpDocumentAction(Action):
           'name': 'parameters',
           'label': _('Parameters'),
           'value': [],
-          'help_text': _('The %(type)s parameters of the script. E.g. N=5, INPUT=${inputDir}')  % {'type': TYPE.title()},
+          'help_text': _('The %(type)s parameters of the script. E.g. N=5, INPUT=${inputDir}') % {'type': TYPE.title()},
           'type': ''
      },
       # Common
@@ -2873,7 +2862,7 @@ class ShellDocumentAction(Action):
           'name': 'parameters',
           'label': _('Parameters'),
           'value': [],
-          'help_text': _('The %(type)s parameters of the script. E.g. N=5, INPUT=${inputDir}')  % {'type': TYPE.title()},
+          'help_text': _('The %(type)s parameters of the script. E.g. N=5, INPUT=${inputDir}') % {'type': TYPE.title()},
           'type': ''
      },
      # Common
@@ -2947,7 +2936,7 @@ class MapReduceDocumentAction(Action):
           'name': 'parameters',
           'label': _('Parameters'),
           'value': [],
-          'help_text': _('The %(type)s parameters of the script. E.g. N=5, INPUT=${inputDir}')  % {'type': TYPE.title()},
+          'help_text': _('The %(type)s parameters of the script. E.g. N=5, INPUT=${inputDir}') % {'type': TYPE.title()},
           'type': ''
      },
      # Common
@@ -3057,7 +3046,6 @@ for node in NODES.values():
   WORKFLOW_NODE_PROPERTIES.update(node.FIELDS)
 
 
-
 def find_parameters(instance, fields=None):
   """Find parameters in the given fields"""
   if fields is None:
@@ -3078,6 +3066,7 @@ def find_parameters(instance, fields=None):
 
   return params
 
+
 def find_json_parameters(fields):
   # Input is list of json dict
   params = []
@@ -3092,13 +3081,15 @@ def find_json_parameters(fields):
 
   return params
 
+
 def find_dollar_variables(text):
-  return re.findall('[^\n\\\\]\$([^\{ \'\"\-;\(\)]+)', text, re.MULTILINE)
+  return re.findall('[^\n\\\\]\\$([^\\{ \'\"\\-;\\(\\)]+)', text, re.MULTILINE)
+
 
 def find_dollar_braced_variables(text):
   vars = set()
 
-  for var in re.findall('\$\{([A-Za-z0-9:_-]+)\}', text, re.MULTILINE):
+  for var in re.findall(r'\$\{([A-Za-z0-9:_-]+)\}', text, re.MULTILINE):
     if ':' in var:
       var = var.split(':', 1)[1]
     vars.add(var)
@@ -3162,9 +3153,9 @@ def import_workflow_from_hue_3_7(old_wf):
     wf_rows = []
 
     for node in nodes:
-      if type(node) == list and len(node) == 1:
+      if type(node) is list and len(node) == 1:
         node = node[0]
-      if type(node) != list:
+      if type(node) is not list:
         wf_rows.append({
           "widgets": [{
             "size": size, "name": node.name.title(), "id": uuids[node.id], "widgetType": "%s-widget" % node.node_type,
@@ -3193,7 +3184,7 @@ def import_workflow_from_hue_3_7(old_wf):
                         "widgets": c['widgets'],
                         "columns": []
                       }
-                    for c in col] if type(col) == list else [{
+                    for c in col] if type(col) is list else [{
                         "id": str(uuid.uuid4()),
                         "widgets": col['widgets'],
                         "columns": []
@@ -3222,11 +3213,10 @@ def import_workflow_from_hue_3_7(old_wf):
   if wf_rows:
     data['layout'][0]['rows'] = [data['layout'][0]['rows'][0]] + wf_rows + [data['layout'][0]['rows'][-1]]
 
-
   # Content
   def _dig_nodes(nodes):
     for node in nodes:
-      if type(node) != list:
+      if type(node) is not list:
         properties = {}
         if '%s-widget' % node.node_type in NODES:
           properties = dict(NODES['%s-widget' % node.node_type].get_fields())
@@ -3368,7 +3358,6 @@ def import_workflow_from_hue_3_7(old_wf):
   return Workflow(data=json.dumps(data))
 
 
-
 class Coordinator(Job):
   XML_FILE_NAME = 'coordinator.xml'
   PROPERTY_APP_PATH = 'oozie.coord.application.path'
@@ -3388,7 +3377,7 @@ class Coordinator(Job):
           'id': None,
           'uuid': None,
           'name': 'My Schedule',
-          'variables': [], # Aka workflow parameters
+          'variables': [],  # Aka workflow parameters
           'properties': {
               'description': '',
               'deployment_dir': '',
@@ -3429,11 +3418,11 @@ class Coordinator(Job):
     _data = self.data.copy()
 
     start_date = [a for a in self._data['properties']['parameters'] if a['name'] == 'start_date']
-    if start_date and type(start_date[0]['value']) == datetime:
+    if start_date and type(start_date[0]['value']) is datetime:
       start_date[0]['value'] = start_date[0]['value'].strftime('%Y-%m-%dT%H:%M:%S')
 
     end_date = [a for a in self._data['properties']['parameters'] if a['name'] == 'end_date']
-    if end_date and type(end_date[0]['value']) == datetime:
+    if end_date and type(end_date[0]['value']) is datetime:
       end_date[0]['value'] = end_date[0]['value'].strftime('%Y-%m-%dT%H:%M:%S')
 
     return _data
@@ -3446,10 +3435,10 @@ class Coordinator(Job):
 
   @property
   def data(self):
-    if type(self._data['properties']['start']) != datetime and not '$' in self._data['properties']['start']:
+    if type(self._data['properties']['start']) is not datetime and '$' not in self._data['properties']['start']:
       self._data['properties']['start'] = parse(self._data['properties']['start'])
 
-    if type(self._data['properties']['end']) != datetime and not '$' in self._data['properties']['end']:
+    if type(self._data['properties']['end']) is not datetime and '$' not in self._data['properties']['end']:
       self._data['properties']['end'] = parse(self._data['properties']['end'])
 
     if self.document is not None:
@@ -3462,7 +3451,7 @@ class Coordinator(Job):
 
   @property
   def name(self):
-    from notebook.connectors.oozie_batch import OozieApi # Import dependency
+    from notebook.connectors.oozie_batch import OozieApi  # Import dependency
 
     if self.data['properties']['document']:
       return _("%s for %s") % (OozieApi.SCHEDULE_JOB_PREFIX, self.data['name'] or self.data['type'])
@@ -3495,7 +3484,7 @@ class Coordinator(Job):
 
     # Get missed params from workflow
     for prop in self.workflow.find_parameters():
-      if not prop in params:
+      if prop not in params:
         params.add(prop)
 
     # Remove the ones filled up by coordinator
@@ -3566,7 +3555,7 @@ class Coordinator(Job):
 
     tmpl = "editor2/gen/coordinator.xml.mako"
     return re.sub(
-      re.compile('\s*\n+', re.MULTILINE), '\n', django_mako.render_to_string(tmpl, {'coord': self, 'mapping': mapping})
+      re.compile('\\s*\n+', re.MULTILINE), '\n', django_mako.render_to_string(tmpl, {'coord': self, 'mapping': mapping})
     ).encode('utf-8', 'xmlcharrefreplace')
 
   def clear_workflow_params(self):
@@ -3575,7 +3564,7 @@ class Coordinator(Job):
 
   @property
   def properties(self):
-    props = [{'name': dataset['workflow_variable'], 'value': dataset['dataset_variable']} \
+    props = [{'name': dataset['workflow_variable'], 'value': dataset['dataset_variable']}
       for dataset in self.data['variables'] if dataset['dataset_type'] == 'parameter']
     props += self.data['properties']['parameters']
     return props
@@ -3681,7 +3670,6 @@ class Dataset(object):
     return not self.is_int(self.data['advanced_end_instance'])
 
 
-
 class Bundle(Job):
   XML_FILE_NAME = 'bundle.xml'
   PROPERTY_APP_PATH = 'oozie.bundle.application.path'
@@ -3735,7 +3723,7 @@ class Bundle(Job):
 
   @property
   def data(self):
-    if type(self._data['properties']['kickoff']) == str and sys.version_info[2] == 2:
+    if type(self._data['properties']['kickoff']) is str and sys.version_info[2] == 2:
       self._data['properties']['kickoff'] = parse(self._data['properties']['kickoff'])
 
     if self.document is not None:
@@ -3750,7 +3738,7 @@ class Bundle(Job):
     mapping.update(dict(list(self.get_coordinator_docs().values('uuid', 'name'))))
     tmpl = "editor2/gen/bundle.xml.mako"
     return force_str(
-              re.sub(re.compile('\s*\n+', re.MULTILINE), '\n', django_mako.render_to_string(tmpl, {
+              re.sub(re.compile('\\s*\n+', re.MULTILINE), '\n', django_mako.render_to_string(tmpl, {
                 'bundle': self,
                 'mapping': mapping
            })))
@@ -3794,7 +3782,7 @@ class Bundle(Job):
       params.add(param)
 
     # Remove the ones filled up by bundle
-    removable_names = [p['name']  for coord in self.data['coordinators'] for p in coord['properties']]
+    removable_names = [p['name'] for coord in self.data['coordinators'] for p in coord['properties']]
 
     return dict([(param, '') for param in list(params) if param not in removable_names])
 
@@ -3864,7 +3852,7 @@ def _save_workflow(workflow, layout, user, fs=None):
     dependency_docs = Document2.objects.filter(uuid__in=dependencies)
     workflow_doc.dependencies.add(*dependency_docs)
 
-  if workflow['properties'].get('imported'): # We convert from and old workflow format (3.8 <) to the latest
+  if workflow['properties'].get('imported'):  # We convert from and old workflow format (3.8 <) to the latest
     workflow['properties']['imported'] = False
     workflow_instance = Workflow(workflow=workflow, user=user)
     _import_workspace(fs, user, workflow_instance)
@@ -3922,7 +3910,6 @@ class WorkflowBuilder(object):
 
     return workflow_doc
 
-
   def create_notebook_workflow(self, user, notebook=None, name=None, managed=False):
     nodes = []
 
@@ -3945,7 +3932,7 @@ class WorkflowBuilder(object):
 
       nodes.append(node)
 
-    workflow_doc = self.get_workflow(nodes, name, notebook['uuid'], user, managed=managed) # TODO optionally save
+    workflow_doc = self.get_workflow(nodes, name, notebook['uuid'], user, managed=managed)  # TODO optionally save
 
     return workflow_doc
 
@@ -3963,7 +3950,7 @@ class WorkflowBuilder(object):
     return {
         u'id': node_id,
         u'name': u'hive-%s' % node_id[:4],
-        u"type": u"hive-document-widget", # if is_document_node else u"hive2-widget",
+        u"type": u"hive-document-widget",  # if is_document_node else u"hive2-widget",
         u'properties': {
             u'files': [],
             u'job_xml': u'',
@@ -4113,7 +4100,7 @@ class WorkflowBuilder(object):
 
     node = self._get_spark_node(node_id, user)
     node['properties']['class'] = snippet['properties']['class']
-    node['properties']['jars'] = snippet['properties']['app_jar'] # Not used, submission add it to oozie.libpath instead
+    node['properties']['jars'] = snippet['properties']['app_jar']  # Not used, submission add it to oozie.libpath instead
     node['properties']['files'] = [{'value': f['path']} for f in snippet['properties']['files']]
     node['properties']['spark_opts'] = snippet['properties']['spark_opts']
     node['properties']['spark_arguments'] = [{'value': f} for f in snippet['properties']['arguments']]
@@ -4427,7 +4414,7 @@ class WorkflowBuilder(object):
 
     node = self._get_java_node(node_id, credentials)
     node['properties']['main_class'] = snippet['properties']['class']
-    node['properties']['app_jar'] = snippet['properties']['app_jar'] # Not used, submission add it to oozie.libpath instead
+    node['properties']['app_jar'] = snippet['properties']['app_jar']  # Not used, submission add it to oozie.libpath instead
     node['properties']['files'] = [{'value': f['path']} for f in snippet['properties']['files']]
     node['properties']['arguments'] = [{'value': f} for f in snippet['properties']['arguments']]
 
@@ -4520,7 +4507,7 @@ class WorkflowBuilder(object):
     for node in nodes:
       data['workflow']['nodes'].append(node)
 
-      _prev_node['children'][0]['to'] = node['id'] # We link nodes
+      _prev_node['children'][0]['to'] = node['id']  # We link nodes
       _prev_node = node
 
     workflow_doc = _save_workflow(data['workflow'], {}, user)

Dosya farkı çok büyük olduğundan ihmal edildi
+ 46 - 27
apps/oozie/src/oozie/models2_tests.py


+ 1 - 4
apps/oozie/src/oozie/old_migrations/0020_chg_large_varchars_to_textfields.py

@@ -6,10 +6,7 @@ from south.db import db
 from south.v2 import SchemaMigration
 from django.db import models
 
-if sys.version_info[0] > 2:
-  from django.utils.translation import gettext as _
-else:
-  from django.utils.translation import ugettext as _
+from django.utils.translation import gettext as _
 
 class Migration(SchemaMigration):
 

Bu fark içinde çok fazla dosya değişikliği olduğu için bazı dosyalar gösterilmiyor