Browse Source

HUE-8737 [core] Futurize desktop/libs/notebook for Python 3.5

Ying Chen 6 years ago
parent
commit
0c24379e97
28 changed files with 215 additions and 130 deletions
  1. 13 5
      desktop/libs/notebook/src/notebook/api.py
  2. 7 6
      desktop/libs/notebook/src/notebook/connectors/altus.py
  3. 17 9
      desktop/libs/notebook/src/notebook/connectors/altus_adb.py
  4. 3 2
      desktop/libs/notebook/src/notebook/connectors/base.py
  5. 3 3
      desktop/libs/notebook/src/notebook/connectors/hbase.py
  6. 3 3
      desktop/libs/notebook/src/notebook/connectors/hive_metastore.py
  7. 30 19
      desktop/libs/notebook/src/notebook/connectors/hiveserver2.py
  8. 3 1
      desktop/libs/notebook/src/notebook/connectors/hiveserver2_tests.py
  9. 6 5
      desktop/libs/notebook/src/notebook/connectors/jdbc.py
  10. 4 3
      desktop/libs/notebook/src/notebook/connectors/jdbc_vertica.py
  11. 2 2
      desktop/libs/notebook/src/notebook/connectors/kafka.py
  12. 1 1
      desktop/libs/notebook/src/notebook/connectors/oozie_batch.py
  13. 5 3
      desktop/libs/notebook/src/notebook/connectors/rdbms.py
  14. 6 5
      desktop/libs/notebook/src/notebook/connectors/solr.py
  15. 1 1
      desktop/libs/notebook/src/notebook/connectors/spark_batch.py
  16. 7 5
      desktop/libs/notebook/src/notebook/connectors/spark_shell.py
  17. 1 0
      desktop/libs/notebook/src/notebook/connectors/spark_tests.py
  18. 14 7
      desktop/libs/notebook/src/notebook/connectors/sql_alchemy.py
  19. 3 2
      desktop/libs/notebook/src/notebook/connectors/sql_alchemy_tests.py
  20. 11 5
      desktop/libs/notebook/src/notebook/dashboard_api.py
  21. 13 12
      desktop/libs/notebook/src/notebook/decorators.py
  22. 1 0
      desktop/libs/notebook/src/notebook/management/commands/send_query_stats.py
  23. 23 14
      desktop/libs/notebook/src/notebook/models.py
  24. 2 1
      desktop/libs/notebook/src/notebook/monkey_patches.py
  25. 9 2
      desktop/libs/notebook/src/notebook/sql_utils.py
  26. 10 3
      desktop/libs/notebook/src/notebook/tasks.py
  27. 13 8
      desktop/libs/notebook/src/notebook/tests.py
  28. 4 3
      desktop/libs/notebook/src/notebook/views.py

+ 13 - 5
desktop/libs/notebook/src/notebook/api.py

@@ -15,11 +15,14 @@
 # See the License for the specific language governing permissions and
 # See the License for the specific language governing permissions and
 # limitations under the License.
 # limitations under the License.
 
 
+from future import standard_library
+standard_library.install_aliases()
 import json
 import json
 import logging
 import logging
 
 
 import sqlparse
 import sqlparse
-import urllib
+import sys
+
 
 
 from django.urls import reverse
 from django.urls import reverse
 from django.db.models import Q
 from django.db.models import Q
@@ -42,6 +45,11 @@ from notebook.decorators import api_error_handler, check_document_access_permiss
 from notebook.models import escape_rows, make_notebook
 from notebook.models import escape_rows, make_notebook
 from notebook.views import upgrade_session_properties, get_api
 from notebook.views import upgrade_session_properties, get_api
 
 
+if sys.version_info[0] > 2:
+  import urllib.request, urllib.error
+  from urllib.parse import unquote as urllib_unquote
+else:
+  from urllib import unquote as urllib_unquote
 
 
 LOG = logging.getLogger(__name__)
 LOG = logging.getLogger(__name__)
 
 
@@ -145,7 +153,7 @@ def _execute_notebook(request, notebook, snippet):
           response['history_uuid'] = history.uuid
           response['history_uuid'] = history.uuid
           if notebook['isSaved']: # Keep track of history of saved queries
           if notebook['isSaved']: # Keep track of history of saved queries
             response['history_parent_uuid'] = history.dependencies.filter(type__startswith='query-').latest('last_modified').uuid
             response['history_parent_uuid'] = history.dependencies.filter(type__startswith='query-').latest('last_modified').uuid
-  except QueryError, ex: # We inject the history information from _historify() to the failed queries
+  except QueryError as ex: # We inject the history information from _historify() to the failed queries
     if response.get('history_id'):
     if response.get('history_id'):
       ex.extra['history_id'] = response['history_id']
       ex.extra['history_id'] = response['history_id']
     if response.get('history_uuid'):
     if response.get('history_uuid'):
@@ -520,7 +528,7 @@ def close_notebook(request):
       response['result'].append(get_api(request, session).close_session(session))
       response['result'].append(get_api(request, session).close_session(session))
     except QueryExpired:
     except QueryExpired:
       pass
       pass
-    except Exception, e:
+    except Exception as e:
       LOG.exception('Error closing session %s' % str(e))
       LOG.exception('Error closing session %s' % str(e))
 
 
   for snippet in [_s for _s in notebook['snippets'] if _s['type'] in ('hive', 'impala')]:
   for snippet in [_s for _s in notebook['snippets'] if _s['type'] in ('hive', 'impala')]:
@@ -531,7 +539,7 @@ def close_notebook(request):
         LOG.info('Not closing SQL snippet as still running.')
         LOG.info('Not closing SQL snippet as still running.')
     except QueryExpired:
     except QueryExpired:
       pass
       pass
-    except Exception, e:
+    except Exception as e:
       LOG.exception('Error closing statement %s' % str(e))
       LOG.exception('Error closing statement %s' % str(e))
 
 
   response['status'] = 0
   response['status'] = 0
@@ -638,7 +646,7 @@ def export_result(request):
   notebook = json.loads(request.POST.get('notebook', '{}'))
   notebook = json.loads(request.POST.get('notebook', '{}'))
   snippet = json.loads(request.POST.get('snippet', '{}'))
   snippet = json.loads(request.POST.get('snippet', '{}'))
   data_format = json.loads(request.POST.get('format', '"hdfs-file"'))
   data_format = json.loads(request.POST.get('format', '"hdfs-file"'))
-  destination = urllib.unquote(json.loads(request.POST.get('destination', '""')))
+  destination = urllib_unquote(json.loads(request.POST.get('destination', '""')))
   overwrite = json.loads(request.POST.get('overwrite', 'false'))
   overwrite = json.loads(request.POST.get('overwrite', 'false'))
   is_embedded = json.loads(request.POST.get('is_embedded', 'false'))
   is_embedded = json.loads(request.POST.get('is_embedded', 'false'))
   start_time = json.loads(request.POST.get('start_time', '-1'))
   start_time = json.loads(request.POST.get('start_time', '-1'))

+ 7 - 6
desktop/libs/notebook/src/notebook/connectors/altus.py

@@ -15,6 +15,7 @@
 # See the License for the specific language governing permissions and
 # See the License for the specific language governing permissions and
 # limitations under the License.
 # limitations under the License.
 
 
+from builtins import object
 import logging
 import logging
 import json
 import json
 
 
@@ -61,15 +62,15 @@ def _exec(service, command, parameters=None):
     json_resp = resp.json()
     json_resp = resp.json()
     LOG.debug(json_resp)
     LOG.debug(json_resp)
     return json_resp
     return json_resp
-  except Exception, e:
+  except Exception as e:
     raise PopupException(e, title=_('Error accessing'))
     raise PopupException(e, title=_('Error accessing'))
 
 
 
 
-class IAMApi(): pass
+class IAMApi(object): pass
 # altus iam list-user-assigned-roles --user=crn:altus:ia
 # altus iam list-user-assigned-roles --user=crn:altus:ia
 
 
 
 
-class SdxApi():
+class SdxApi(object):
 
 
   def __init__(self, user): pass
   def __init__(self, user): pass
 
 
@@ -94,7 +95,7 @@ class SdxApi():
     return namespaces
     return namespaces
 
 
 
 
-class DataEngApi():
+class DataEngApi(object):
 
 
   def __init__(self, user): pass
   def __init__(self, user): pass
 
 
@@ -234,7 +235,7 @@ class DataEngApi():
     return _exec('dataeng', 'describeCluster')
     return _exec('dataeng', 'describeCluster')
 
 
 
 
-class AnalyticDbApi():
+class AnalyticDbApi(object):
 
 
   def __init__(self, user): pass
   def __init__(self, user): pass
 
 
@@ -277,7 +278,7 @@ class AnalyticDbApi():
     return _exec('dataware', 'describeCluster', {'clusterName': cluster_id})
     return _exec('dataware', 'describeCluster', {'clusterName': cluster_id})
 
 
 
 
-class DataWarehouse2Api():
+class DataWarehouse2Api(object):
 
 
   def __init__(self, user=None):
   def __init__(self, user=None):
     self._api_url = '%s/dw' % K8S.API_URL.get().rstrip('/')
     self._api_url = '%s/dw' % K8S.API_URL.get().rstrip('/')

+ 17 - 9
desktop/libs/notebook/src/notebook/connectors/altus_adb.py

@@ -15,9 +15,12 @@
 # See the License for the specific language governing permissions and
 # See the License for the specific language governing permissions and
 # limitations under the License.
 # limitations under the License.
 
 
+from future import standard_library
+standard_library.install_aliases()
+from builtins import object
 import logging
 import logging
 import json
 import json
-import urllib
+import sys
 
 
 from django.urls import reverse
 from django.urls import reverse
 from django.utils.translation import ugettext as _
 from django.utils.translation import ugettext as _
@@ -25,6 +28,11 @@ from django.utils.translation import ugettext as _
 from notebook.connectors.altus import AnalyticDbApi
 from notebook.connectors.altus import AnalyticDbApi
 from notebook.connectors.base import Api, QueryError
 from notebook.connectors.base import Api, QueryError
 
 
+if sys.version_info[0] > 2:
+  import urllib.request, urllib.error
+  from urllib.parse import quote as urllib_quote, quote_plus as urllib_quote_plus
+else:
+  from urllib import quote as urllib_quote, quote_plus as urllib_quote_plus
 
 
 LOG = logging.getLogger(__name__)
 LOG = logging.getLogger(__name__)
 
 
@@ -88,7 +96,7 @@ class AltusAdbApi(Api):
     return HueQuery(self.user, cluster_crn=self.cluster_name).do_post(url_path=url_path)
     return HueQuery(self.user, cluster_crn=self.cluster_name).do_post(url_path=url_path)
 
 
 
 
-class HueQuery():
+class HueQuery(object):
   def __init__(self, user, cluster_crn):
   def __init__(self, user, cluster_crn):
     self.user = user
     self.user = user
     self.cluster_crn = cluster_crn
     self.cluster_crn = cluster_crn
@@ -156,7 +164,7 @@ class HueQuery():
               }
               }
             }'''
             }'''
 
 
-    payload = payload.replace('SELECT+*+FROM+web_logs+LIMIT+100', urllib.quote_plus(query.replace('\n', ' ')))
+    payload = payload.replace('SELECT+*+FROM+web_logs+LIMIT+100', urllib_quote_plus(query.replace('\n', ' ')))
 
 
     resp = self.api.submit_hue_query(self.cluster_crn, payload)
     resp = self.api.submit_hue_query(self.cluster_crn, payload)
 
 
@@ -176,8 +184,8 @@ class HueQuery():
 
 
     snippet['result']['handle'] = handle
     snippet['result']['handle'] = handle
 
 
-    notebook_payload = urllib.quote(json.dumps(notebook))
-    snippet_payload = urllib.quote(json.dumps(snippet))
+    notebook_payload = urllib_quote(json.dumps(notebook))
+    snippet_payload = urllib_quote(json.dumps(snippet))
 
 
     payload = '''
     payload = '''
             {
             {
@@ -243,10 +251,10 @@ class HueQuery():
 
 
     snippet['result']['handle'] = handle
     snippet['result']['handle'] = handle
 
 
-    notebook_payload = urllib.quote(json.dumps(notebook))
-    snippet_payload = urllib.quote(json.dumps(snippet))
-    rows_payload = urllib.quote(json.dumps(rows))
-    start_over_payload = urllib.quote(json.dumps(start_over))
+    notebook_payload = urllib_quote(json.dumps(notebook))
+    snippet_payload = urllib_quote(json.dumps(snippet))
+    rows_payload = urllib_quote(json.dumps(rows))
+    start_over_payload = urllib_quote(json.dumps(start_over))
 
 
     payload = '''
     payload = '''
             {
             {

+ 3 - 2
desktop/libs/notebook/src/notebook/connectors/base.py

@@ -15,6 +15,7 @@
 # See the License for the specific language governing permissions and
 # See the License for the specific language governing permissions and
 # limitations under the License.
 # limitations under the License.
 
 
+from builtins import object
 import json
 import json
 import logging
 import logging
 import re
 import re
@@ -538,7 +539,7 @@ def _get_snippet_name(notebook, unique=False, table_format=False):
   return name
   return name
 
 
 
 
-class ExecutionWrapper():
+class ExecutionWrapper(object):
   def __init__(self, api, notebook, snippet, callback=None):
   def __init__(self, api, notebook, snippet, callback=None):
     self.api = api
     self.api = api
     self.notebook = notebook
     self.notebook = notebook
@@ -600,7 +601,7 @@ class ExecutionWrapper():
       self.api.close_statement(self.notebook, self.snippet)
       self.api.close_statement(self.notebook, self.snippet)
 
 
 
 
-class ResultWrapper():
+class ResultWrapper(object):
   def __init__(self, cols, rows, has_more):
   def __init__(self, cols, rows, has_more):
     self._cols = cols
     self._cols = cols
     self._rows = rows
     self._rows = rows

+ 3 - 3
desktop/libs/notebook/src/notebook/connectors/hbase.py

@@ -33,7 +33,7 @@ LOG = logging.getLogger(__name__)
 
 
 try:
 try:
   from hbase.api import HbaseApi
   from hbase.api import HbaseApi
-except ImportError, e:
+except ImportError as e:
   LOG.warn("HBase app is not enabled: %s" % e)
   LOG.warn("HBase app is not enabled: %s" % e)
 
 
 
 
@@ -41,7 +41,7 @@ def query_error_handler(func):
   def decorator(*args, **kwargs):
   def decorator(*args, **kwargs):
     try:
     try:
       return func(*args, **kwargs)
       return func(*args, **kwargs)
-    except Exception, e:
+    except Exception as e:
       message = force_unicode(str(e))
       message = force_unicode(str(e))
       raise QueryError(message)
       raise QueryError(message)
   return decorator
   return decorator
@@ -67,7 +67,7 @@ class HBaseApi(Api):
         response['columns'] = []
         response['columns'] = []
       else:
       else:
         raise PopupException('Could not find column `%s`.`%s`.`%s`' % (database, table, column))
         raise PopupException('Could not find column `%s`.`%s`.`%s`' % (database, table, column))
-    except Exception, e:
+    except Exception as e:
       LOG.warn('Autocomplete data fetching error: %s' % e)
       LOG.warn('Autocomplete data fetching error: %s' % e)
       response['code'] = 500
       response['code'] = 500
       response['error'] = e.message
       response['error'] = e.message

+ 3 - 3
desktop/libs/notebook/src/notebook/connectors/hive_metastore.py

@@ -36,7 +36,7 @@ try:
   from beeswax.api import _autocomplete
   from beeswax.api import _autocomplete
   from beeswax.server import dbms
   from beeswax.server import dbms
   from beeswax.server.dbms import get_query_server_config, QueryServerException
   from beeswax.server.dbms import get_query_server_config, QueryServerException
-except ImportError, e:
+except ImportError as e:
   LOG.warn('Hive and HiveMetastoreServer interfaces are not enabled: %s' % e)
   LOG.warn('Hive and HiveMetastoreServer interfaces are not enabled: %s' % e)
   hive_settings = None
   hive_settings = None
 
 
@@ -45,13 +45,13 @@ def query_error_handler(func):
   def decorator(*args, **kwargs):
   def decorator(*args, **kwargs):
     try:
     try:
       return func(*args, **kwargs)
       return func(*args, **kwargs)
-    except StructuredException, e:
+    except StructuredException as e:
       message = force_unicode(str(e))
       message = force_unicode(str(e))
       if 'timed out' in message:
       if 'timed out' in message:
         raise OperationTimeout(e)
         raise OperationTimeout(e)
       else:
       else:
         raise QueryError(message)
         raise QueryError(message)
-    except QueryServerException, e:
+    except QueryServerException as e:
       message = force_unicode(str(e))
       message = force_unicode(str(e))
       if 'Invalid query handle' in message or 'Invalid OperationHandle' in message:
       if 'Invalid query handle' in message or 'Invalid OperationHandle' in message:
         raise QueryExpired(e)
         raise QueryExpired(e)

+ 30 - 19
desktop/libs/notebook/src/notebook/connectors/hiveserver2.py

@@ -15,12 +15,18 @@
 # See the License for the specific language governing permissions and
 # See the License for the specific language governing permissions and
 # limitations under the License.
 # limitations under the License.
 
 
+from __future__ import division
+from future import standard_library
+standard_library.install_aliases()
+from builtins import next
+from past.utils import old_div
+from builtins import object
 import binascii
 import binascii
 import copy
 import copy
 import json
 import json
 import logging
 import logging
 import re
 import re
-import urllib
+import sys
 
 
 from django.urls import reverse
 from django.urls import reverse
 from django.utils.translation import ugettext as _
 from django.utils.translation import ugettext as _
@@ -39,6 +45,11 @@ from metadata.optimizer_client import OptimizerApi
 
 
 from notebook.connectors.base import Api, QueryError, QueryExpired, OperationTimeout, OperationNotSupported, _get_snippet_name, Notebook, get_interpreter
 from notebook.connectors.base import Api, QueryError, QueryExpired, OperationTimeout, OperationNotSupported, _get_snippet_name, Notebook, get_interpreter
 
 
+if sys.version_info[0] > 2:
+  import urllib.request, urllib.error
+  from urllib.parse import quote as urllib_quote, unquote as urllib_unquote
+else:
+  from urllib import quote as urllib_quote, unquote as urllib_unquote
 
 
 LOG = logging.getLogger(__name__)
 LOG = logging.getLogger(__name__)
 
 
@@ -53,7 +64,7 @@ try:
   from beeswax.server import dbms
   from beeswax.server import dbms
   from beeswax.server.dbms import get_query_server_config, QueryServerException
   from beeswax.server.dbms import get_query_server_config, QueryServerException
   from beeswax.views import parse_out_jobs
   from beeswax.views import parse_out_jobs
-except ImportError, e:
+except ImportError as e:
   LOG.warn('Hive and HiveServer2 interfaces are not enabled: %s' % e)
   LOG.warn('Hive and HiveServer2 interfaces are not enabled: %s' % e)
   hive_settings = None
   hive_settings = None
 
 
@@ -61,7 +72,7 @@ try:
   from impala import api   # Force checking if Impala is enabled
   from impala import api   # Force checking if Impala is enabled
   from impala.conf import CONFIG_WHITELIST as impala_settings
   from impala.conf import CONFIG_WHITELIST as impala_settings
   from impala.server import get_api as get_impalad_api, ImpalaDaemonApiException, _get_impala_server_url
   from impala.server import get_api as get_impalad_api, ImpalaDaemonApiException, _get_impala_server_url
-except ImportError, e:
+except ImportError as e:
   LOG.warn("Impala app is not enabled")
   LOG.warn("Impala app is not enabled")
   impala_settings = None
   impala_settings = None
 
 
@@ -70,7 +81,7 @@ try:
   from jobbrowser.conf import ENABLE_QUERY_BROWSER
   from jobbrowser.conf import ENABLE_QUERY_BROWSER
   from jobbrowser.apis.query_api import _get_api
   from jobbrowser.apis.query_api import _get_api
   has_query_browser = ENABLE_QUERY_BROWSER.get()
   has_query_browser = ENABLE_QUERY_BROWSER.get()
-except (AttributeError, ImportError), e:
+except (AttributeError, ImportError) as e:
   LOG.warn("Job Browser app is not enabled")
   LOG.warn("Job Browser app is not enabled")
   has_query_browser = False
   has_query_browser = False
 
 
@@ -82,13 +93,13 @@ def query_error_handler(func):
   def decorator(*args, **kwargs):
   def decorator(*args, **kwargs):
     try:
     try:
       return func(*args, **kwargs)
       return func(*args, **kwargs)
-    except StructuredException, e:
+    except StructuredException as e:
       message = force_unicode(str(e))
       message = force_unicode(str(e))
       if 'timed out' in message:
       if 'timed out' in message:
         raise OperationTimeout(e)
         raise OperationTimeout(e)
       else:
       else:
         raise QueryError(message)
         raise QueryError(message)
-    except QueryServerException, e:
+    except QueryServerException as e:
       message = force_unicode(str(e))
       message = force_unicode(str(e))
       if 'Invalid query handle' in message or 'Invalid OperationHandle' in message:
       if 'Invalid query handle' in message or 'Invalid OperationHandle' in message:
         raise QueryExpired(e)
         raise QueryExpired(e)
@@ -196,7 +207,7 @@ class HS2Api(Api):
     try:
     try:
       decoded_guid = session.get_handle().sessionId.guid
       decoded_guid = session.get_handle().sessionId.guid
       response['session_id'] = unpack_guid(decoded_guid)
       response['session_id'] = unpack_guid(decoded_guid)
-    except Exception, e:
+    except Exception as e:
       LOG.warn('Failed to decode session handle: %s' % e)
       LOG.warn('Failed to decode session handle: %s' % e)
 
 
     if lang == 'impala' and session:
     if lang == 'impala' and session:
@@ -254,7 +265,7 @@ class HS2Api(Api):
         if query.database and not statement['statement'].lower().startswith('set'):
         if query.database and not statement['statement'].lower().startswith('set'):
           db.use(query.database)
           db.use(query.database)
       handle = db.client.query(query, with_multiple_session=True) # Note: with_multiple_session currently ignored
       handle = db.client.query(query, with_multiple_session=True) # Note: with_multiple_session currently ignored
-    except QueryServerException, ex:
+    except QueryServerException as ex:
       raise QueryError(ex.message, handle=statement)
       raise QueryError(ex.message, handle=statement)
 
 
     # All good
     # All good
@@ -302,7 +313,7 @@ class HS2Api(Api):
     handle = self._get_handle(snippet)
     handle = self._get_handle(snippet)
     try:
     try:
       results = db.fetch(handle, start_over=start_over, rows=rows)
       results = db.fetch(handle, start_over=start_over, rows=rows)
-    except QueryServerException, ex:
+    except QueryServerException as ex:
       if re.search('(client inactivity)|(Invalid query handle)', str(ex)) and ex.message:
       if re.search('(client inactivity)|(Invalid query handle)', str(ex)) and ex.message:
         raise QueryExpired(message=ex.message)
         raise QueryExpired(message=ex.message)
       else:
       else:
@@ -378,7 +389,7 @@ class HS2Api(Api):
       try:
       try:
         handle = self._get_handle(snippet)
         handle = self._get_handle(snippet)
         db.close_operation(handle)
         db.close_operation(handle)
-      except Exception, e:
+      except Exception as e:
         if 'no valid handle' in str(e):
         if 'no valid handle' in str(e):
           return {'status': -1}  # skipped
           return {'status': -1}  # skipped
         else:
         else:
@@ -409,7 +420,7 @@ class HS2Api(Api):
       started = logs.count('Starting Job')
       started = logs.count('Starting Job')
       ended = logs.count('Ended Job')
       ended = logs.count('Ended Job')
 
 
-      progress = int((started + ended) * 100 / (total * 2))
+      progress = int(old_div((started + ended) * 100, (total * 2)))
       return max(progress, 5)  # Return 5% progress as a minimum
       return max(progress, 5)  # Return 5% progress as a minimum
     elif snippet['type'] == 'impala':
     elif snippet['type'] == 'impala':
       match = re.findall('(\d+)% Complete', logs, re.MULTILINE)
       match = re.findall('(\d+)% Complete', logs, re.MULTILINE)
@@ -470,7 +481,7 @@ class HS2Api(Api):
     try:
     try:
       db = self._get_db(snippet, async, interpreter=self.interpreter)
       db = self._get_db(snippet, async, interpreter=self.interpreter)
       return _get_sample_data(db, database, table, column, async, operation=operation, cluster=self.interpreter)
       return _get_sample_data(db, database, table, column, async, operation=operation, cluster=self.interpreter)
-    except QueryServerException, ex:
+    except QueryServerException as ex:
       raise QueryError(ex.message)
       raise QueryError(ex.message)
 
 
 
 
@@ -486,7 +497,7 @@ class HS2Api(Api):
       db.use(query.database)
       db.use(query.database)
 
 
       explanation = db.explain(query)
       explanation = db.explain(query)
-    except QueryServerException, ex:
+    except QueryServerException as ex:
       raise QueryError(ex.message)
       raise QueryError(ex.message)
 
 
     return {
     return {
@@ -506,7 +517,7 @@ class HS2Api(Api):
 
 
     upload(target_file, handle, self.request.user, db, self.request.fs, max_rows=max_rows, max_bytes=max_bytes)
     upload(target_file, handle, self.request.user, db, self.request.fs, max_rows=max_rows, max_bytes=max_bytes)
 
 
-    return '/filebrowser/view=%s' % urllib.quote(urllib.quote(target_file.encode('utf-8'), safe=SAFE_CHARACTERS_URI_COMPONENTS)) # Quote twice, because of issue in the routing on client
+    return '/filebrowser/view=%s' % urllib_quote(urllib_quote(target_file.encode('utf-8'), safe=SAFE_CHARACTERS_URI_COMPONENTS)) # Quote twice, because of issue in the routing on client
 
 
 
 
   def export_data_as_table(self, notebook, snippet, destination, is_temporary=False, location=None):
   def export_data_as_table(self, notebook, snippet, destination, is_temporary=False, location=None):
@@ -560,7 +571,7 @@ DROP TABLE IF EXISTS `%(table)s`;
       'location': self.request.fs.netnormpath(destination),
       'location': self.request.fs.netnormpath(destination),
       'hql': query.hql_query
       'hql': query.hql_query
     }
     }
-    success_url = '/filebrowser/view=%s' % urllib.quote(destination.encode('utf-8'), safe=SAFE_CHARACTERS_URI_COMPONENTS)
+    success_url = '/filebrowser/view=%s' % urllib_quote(destination.encode('utf-8'), safe=SAFE_CHARACTERS_URI_COMPONENTS)
 
 
     return hql, success_url
     return hql, success_url
 
 
@@ -680,7 +691,7 @@ DROP TABLE IF EXISTS `%(table)s`;
       db = self._get_db(snippet, interpreter=self.interpreter)
       db = self._get_db(snippet, interpreter=self.interpreter)
 
 
     if partition_spec is not None:
     if partition_spec is not None:
-      decoded_spec = urllib.unquote(partition_spec)
+      decoded_spec = urllib_unquote(partition_spec)
       return db.get_partition(database, table.name, decoded_spec, generate_ddl_only=True)
       return db.get_partition(database, table.name, decoded_spec, generate_ddl_only=True)
     else:
     else:
       return db.get_select_star_query(database, table, limit=100)
       return db.get_select_star_query(database, table, limit=100)
@@ -695,7 +706,7 @@ DROP TABLE IF EXISTS `%(table)s`;
     except binascii.Error:
     except binascii.Error:
       LOG.warn('Handle already base 64 decoded')
       LOG.warn('Handle already base 64 decoded')
 
 
-    for key in handle.keys():
+    for key in list(handle.keys()):
       if key not in ('log_context', 'secret', 'has_result_set', 'operation_type', 'modified_row_count', 'guid'):
       if key not in ('log_context', 'secret', 'has_result_set', 'operation_type', 'modified_row_count', 'guid'):
         handle.pop(key)
         handle.pop(key)
 
 
@@ -801,7 +812,7 @@ DROP TABLE IF EXISTS `%(table)s`;
     if 'result' in snippet and 'handle' in snippet['result'] and 'guid' in snippet['result']['handle']:
     if 'result' in snippet and 'handle' in snippet['result'] and 'guid' in snippet['result']['handle']:
       try:
       try:
         guid = unpack_guid_base64(snippet['result']['handle']['guid'])
         guid = unpack_guid_base64(snippet['result']['handle']['guid'])
-      except Exception, e:
+      except Exception as e:
         LOG.warn('Failed to decode operation handle guid: %s' % e)
         LOG.warn('Failed to decode operation handle guid: %s' % e)
     else:
     else:
       LOG.warn('Snippet does not contain a valid result handle, cannot extract Impala query ID.')
       LOG.warn('Snippet does not contain a valid result handle, cannot extract Impala query ID.')
@@ -814,7 +825,7 @@ DROP TABLE IF EXISTS `%(table)s`;
     try:
     try:
       query_profile = api.get_query_profile(query_id)
       query_profile = api.get_query_profile(query_id)
       profile = query_profile.get('profile')
       profile = query_profile.get('profile')
-    except (RestException, ImpalaDaemonApiException), e:
+    except (RestException, ImpalaDaemonApiException) as e:
       raise PopupException(_("Failed to get query profile from Impala Daemon server: %s") % e)
       raise PopupException(_("Failed to get query profile from Impala Daemon server: %s") % e)
 
 
     if not profile:
     if not profile:

+ 3 - 1
desktop/libs/notebook/src/notebook/connectors/hiveserver2_tests.py

@@ -16,6 +16,8 @@
 # See the License for the specific language governing permissions and
 # See the License for the specific language governing permissions and
 # limitations under the License.
 # limitations under the License.
 
 
+from builtins import next
+from builtins import object
 import json
 import json
 import logging
 import logging
 import re
 import re
@@ -47,7 +49,7 @@ from beeswax.test_base import BeeswaxSampleProvider, get_query_server_config, is
 LOG = logging.getLogger(__name__)
 LOG = logging.getLogger(__name__)
 
 
 
 
-class TestApi():
+class TestApi(object):
 
 
   NOTEBOOK_JSON = """
   NOTEBOOK_JSON = """
     {
     {

+ 6 - 5
desktop/libs/notebook/src/notebook/connectors/jdbc.py

@@ -15,6 +15,7 @@
 # See the License for the specific language governing permissions and
 # See the License for the specific language governing permissions and
 # limitations under the License.
 # limitations under the License.
 
 
+from builtins import object
 import logging
 import logging
 import sys
 import sys
 
 
@@ -38,9 +39,9 @@ def query_error_handler(func):
   def decorator(*args, **kwargs):
   def decorator(*args, **kwargs):
     try:
     try:
       return func(*args, **kwargs)
       return func(*args, **kwargs)
-    except AuthenticationRequired, e:
+    except AuthenticationRequired as e:
       raise e
       raise e
-    except Exception, e:
+    except Exception as e:
       message = force_unicode(smart_str(e))
       message = force_unicode(smart_str(e))
       if 'error occurred while trying to connect to the Java server' in message:
       if 'error occurred while trying to connect to the Java server' in message:
         raise QueryError, _('%s: is the DB Proxy server running?') % message, sys.exc_info()[2]
         raise QueryError, _('%s: is the DB Proxy server running?') % message, sys.exc_info()[2]
@@ -182,7 +183,7 @@ class JdbcApi(Api):
     return Assist(db)
     return Assist(db)
 
 
 
 
-class Assist():
+class Assist(object):
 
 
   def __init__(self, db):
   def __init__(self, db):
     self.db = db
     self.db = db
@@ -214,7 +215,7 @@ class Assist():
     #response['columns'] = []
     #response['columns'] = []
     return query_and_fetch(self.db, 'SELECT %s FROM %s.%s limit 100' % (column, database, table))
     return query_and_fetch(self.db, 'SELECT %s FROM %s.%s limit 100' % (column, database, table))
 
 
-class FixedResultSet():
+class FixedResultSet(object):
 
 
   def __init__(self, data, metadata):
   def __init__(self, data, metadata):
     self.data = data
     self.data = data
@@ -227,7 +228,7 @@ class FixedResultSet():
   def rows(self):
   def rows(self):
     return self.data if self.data is not None else []
     return self.data if self.data is not None else []
 
 
-class FixedResult():
+class FixedResult(object):
 
 
   def __init__(self, data, metadata):
   def __init__(self, data, metadata):
     self.data = data
     self.data = data

+ 4 - 3
desktop/libs/notebook/src/notebook/connectors/jdbc_vertica.py

@@ -15,6 +15,8 @@
 # See the License for the specific language governing permissions and
 # See the License for the specific language governing permissions and
 # limitations under the License.
 # limitations under the License.
 
 
+from __future__ import division
+from past.utils import old_div
 from librdbms.jdbc import query_and_fetch
 from librdbms.jdbc import query_and_fetch
 
 
 from notebook.connectors.jdbc import JdbcApi
 from notebook.connectors.jdbc import JdbcApi
@@ -63,9 +65,8 @@ class VerticaAssist(Assist):
                     + ", cache is used in "
                     + ", cache is used in "
                     + "%.2f"
                     + "%.2f"
                     % (
                     % (
-                        100
-                        * float(self.cache_use_stat["cache"])
-                        / (self.cache_use_stat["query"] + self.cache_use_stat["cache"])
+                        old_div(100
+                        * float(self.cache_use_stat["cache"]), (self.cache_use_stat["query"] + self.cache_use_stat["cache"]))
                     )
                     )
                     + "% cases"
                     + "% cases"
                 )
                 )

+ 2 - 2
desktop/libs/notebook/src/notebook/connectors/kafka.py

@@ -35,7 +35,7 @@ def query_error_handler(func):
   def decorator(*args, **kwargs):
   def decorator(*args, **kwargs):
     try:
     try:
       return func(*args, **kwargs)
       return func(*args, **kwargs)
-    except Exception, e:
+    except Exception as e:
       message = force_unicode(str(e))
       message = force_unicode(str(e))
       raise QueryError(message)
       raise QueryError(message)
   return decorator
   return decorator
@@ -96,7 +96,7 @@ class KafkaApi(Api):
           u'is_view': False
           u'is_view': False
         }
         }
 
 
-    except Exception, e:
+    except Exception as e:
       LOG.warn('Autocomplete data fetching error: %s' % e)
       LOG.warn('Autocomplete data fetching error: %s' % e)
       response['code'] = 500
       response['code'] = 500
       response['error'] = e.message
       response['error'] = e.message

+ 1 - 1
desktop/libs/notebook/src/notebook/connectors/oozie_batch.py

@@ -186,7 +186,7 @@ class OozieApi(Api):
                                                                  log_start_pattern=self.LOG_START_PATTERN,
                                                                  log_start_pattern=self.LOG_START_PATTERN,
                                                                  log_end_pattern=self.LOG_END_PATTERN)
                                                                  log_end_pattern=self.LOG_END_PATTERN)
       if logs:
       if logs:
-        log_output = logs.values()[0]
+        log_output = list(logs.values())[0]
         if log_output.startswith('Unable to locate'):
         if log_output.startswith('Unable to locate'):
           LOG.debug('Failed to get job attempt logs, possibly due to YARN archiving job to JHS. Will sleep and try again.')
           LOG.debug('Failed to get job attempt logs, possibly due to YARN archiving job to JHS. Will sleep and try again.')
           time.sleep(2.0)
           time.sleep(2.0)

+ 5 - 3
desktop/libs/notebook/src/notebook/connectors/rdbms.py

@@ -15,6 +15,8 @@
 # See the License for the specific language governing permissions and
 # See the License for the specific language governing permissions and
 # limitations under the License.
 # limitations under the License.
 
 
+from builtins import next
+from builtins import object
 import logging
 import logging
 import sys
 import sys
 
 
@@ -33,7 +35,7 @@ def query_error_handler(func):
   def decorator(*args, **kwargs):
   def decorator(*args, **kwargs):
     try:
     try:
       return func(*args, **kwargs)
       return func(*args, **kwargs)
-    except Exception, e:
+    except Exception as e:
       message = force_unicode(e)
       message = force_unicode(e)
       if 'Invalid query handle' in message or 'Invalid OperationHandle' in message:
       if 'Invalid query handle' in message or 'Invalid OperationHandle' in message:
         raise QueryExpired(e)
         raise QueryExpired(e)
@@ -199,7 +201,7 @@ class RdbmsApi(Api):
       return dbms.get_query_server_config(server=self.interpreter)
       return dbms.get_query_server_config(server=self.interpreter)
 
 
 
 
-class Assist():
+class Assist(object):
 
 
   def __init__(self, db):
   def __init__(self, db):
     self.db = db
     self.db = db
@@ -218,7 +220,7 @@ class Assist():
     return self.db.get_sample_data(database, table, column)
     return self.db.get_sample_data(database, table, column)
 
 
 
 
-class FixedResult():
+class FixedResult(object):
 
 
   def __init__(self, result):
   def __init__(self, result):
     self.result = result
     self.result = result

+ 6 - 5
desktop/libs/notebook/src/notebook/connectors/solr.py

@@ -15,6 +15,7 @@
 # See the License for the specific language governing permissions and
 # See the License for the specific language governing permissions and
 # limitations under the License.
 # limitations under the License.
 
 
+from builtins import object
 import logging
 import logging
 
 
 from django.utils.translation import ugettext as _
 from django.utils.translation import ugettext as _
@@ -32,7 +33,7 @@ LOG = logging.getLogger(__name__)
 
 
 try:
 try:
   from libsolr.api import SolrApi as NativeSolrApi
   from libsolr.api import SolrApi as NativeSolrApi
-except (ImportError, AttributeError), e:
+except (ImportError, AttributeError) as e:
   LOG.exception('Search is not enabled')
   LOG.exception('Search is not enabled')
 
 
 
 
@@ -40,9 +41,9 @@ def query_error_handler(func):
   def decorator(*args, **kwargs):
   def decorator(*args, **kwargs):
     try:
     try:
       return func(*args, **kwargs)
       return func(*args, **kwargs)
-    except QueryError, e:
+    except QueryError as e:
       raise e
       raise e
-    except Exception, e:
+    except Exception as e:
       message = force_unicode(str(e))
       message = force_unicode(str(e))
       raise QueryError(message)
       raise QueryError(message)
   return decorator
   return decorator
@@ -72,7 +73,7 @@ class SolrApi(Api):
 
 
     headers = []
     headers = []
     for row in response['result-set']['docs']:
     for row in response['result-set']['docs']:
-      for col in row.keys():
+      for col in list(row.keys()):
         if col not in headers:
         if col not in headers:
           headers.append(col)
           headers.append(col)
 
 
@@ -177,7 +178,7 @@ class SolrApi(Api):
     return response
     return response
 
 
 
 
-class Assist():
+class Assist(object):
 
 
   def __init__(self, api, user, db):
   def __init__(self, api, user, db):
     self.api = api
     self.api = api

+ 1 - 1
desktop/libs/notebook/src/notebook/connectors/spark_batch.py

@@ -23,7 +23,7 @@ LOG = logging.getLogger(__name__)
 
 
 try:
 try:
   from spark.job_server_api import get_api as get_spark_api
   from spark.job_server_api import get_api as get_spark_api
-except ImportError, e:
+except ImportError as e:
   LOG.exception('Spark is not enabled')
   LOG.exception('Spark is not enabled')
 
 
 from notebook.connectors.base import Api
 from notebook.connectors.base import Api

+ 7 - 5
desktop/libs/notebook/src/notebook/connectors/spark_shell.py

@@ -15,6 +15,8 @@
 # See the License for the specific language governing permissions and
 # See the License for the specific language governing permissions and
 # limitations under the License.
 # limitations under the License.
 
 
+from builtins import range
+from builtins import object
 import logging
 import logging
 import re
 import re
 import time
 import time
@@ -37,7 +39,7 @@ LOG = logging.getLogger(__name__)
 try:
 try:
   from spark.conf import LIVY_SERVER_SESSION_KIND
   from spark.conf import LIVY_SERVER_SESSION_KIND
   from spark.job_server_api import get_api as get_spark_api
   from spark.job_server_api import get_api as get_spark_api
-except ImportError, e:
+except ImportError as e:
   LOG.exception('Spark is not enabled')
   LOG.exception('Spark is not enabled')
 
 
 
 
@@ -243,7 +245,7 @@ class SparkApi(Api):
           'has_result_set': True,
           'has_result_set': True,
           'sync': False
           'sync': False
       }
       }
-    except Exception, e:
+    except Exception as e:
       message = force_unicode(str(e)).lower()
       message = force_unicode(str(e)).lower()
       if re.search("session ('\d+' )?not found", message) or 'connection refused' in message or 'session is in state busy' in message:
       if re.search("session ('\d+' )?not found", message) or 'connection refused' in message or 'session is in state busy' in message:
         raise SessionExpired(e)
         raise SessionExpired(e)
@@ -260,7 +262,7 @@ class SparkApi(Api):
       return {
       return {
           'status': response['state'],
           'status': response['state'],
       }
       }
-    except Exception, e:
+    except Exception as e:
       message = force_unicode(str(e)).lower()
       message = force_unicode(str(e)).lower()
       if re.search("session ('\d+' )?not found", message):
       if re.search("session ('\d+' )?not found", message):
         raise SessionExpired(e)
         raise SessionExpired(e)
@@ -274,7 +276,7 @@ class SparkApi(Api):
 
 
     try:
     try:
       response = api.fetch_data(session['id'], cell)
       response = api.fetch_data(session['id'], cell)
-    except Exception, e:
+    except Exception as e:
       message = force_unicode(str(e)).lower()
       message = force_unicode(str(e)).lower()
       if re.search("session ('\d+' )?not found", message):
       if re.search("session ('\d+' )?not found", message):
         raise SessionExpired(e)
         raise SessionExpired(e)
@@ -359,7 +361,7 @@ class SparkApi(Api):
           'session': session['id'],
           'session': session['id'],
           'status': 0
           'status': 0
         }
         }
-      except RestException, e:
+      except RestException as e:
         if e.code == 404 or e.code == 500: # TODO remove the 500
         if e.code == 404 or e.code == 500: # TODO remove the 500
           raise SessionExpired(e)
           raise SessionExpired(e)
     else:
     else:

+ 1 - 0
desktop/libs/notebook/src/notebook/connectors/spark_tests.py

@@ -15,6 +15,7 @@
 # See the License for the specific language governing permissions and
 # See the License for the specific language governing permissions and
 # limitations under the License.
 # limitations under the License.
 
 
+from builtins import object
 from nose.tools import assert_equal, assert_true, assert_false
 from nose.tools import assert_equal, assert_true, assert_false
 
 
 from notebook.connectors.spark_shell import SparkApi
 from notebook.connectors.spark_shell import SparkApi

+ 14 - 7
desktop/libs/notebook/src/notebook/connectors/sql_alchemy.py

@@ -44,6 +44,8 @@ Note: using the task server would not leverage any caching.
 from future import standard_library
 from future import standard_library
 standard_library.install_aliases()
 standard_library.install_aliases()
 
 
+from builtins import next
+from builtins import object
 import datetime
 import datetime
 import json
 import json
 import logging
 import logging
@@ -53,7 +55,6 @@ import sys
 import textwrap
 import textwrap
 
 
 from string import Template
 from string import Template
-from urllib.parse import quote_plus
 
 
 from django.utils.translation import ugettext as _
 from django.utils.translation import ugettext as _
 from sqlalchemy import create_engine, inspect
 from sqlalchemy import create_engine, inspect
@@ -67,6 +68,12 @@ from librdbms.server import dbms
 from notebook.connectors.base import Api, QueryError, QueryExpired, _get_snippet_name, AuthenticationRequired
 from notebook.connectors.base import Api, QueryError, QueryExpired, _get_snippet_name, AuthenticationRequired
 from notebook.models import escape_rows
 from notebook.models import escape_rows
 
 
+if sys.version_info[0] > 2:
+  import urllib.request, urllib.error
+  from urllib.parse import quote_plus as urllib_quote_plus
+  from past.builtins import long
+else:
+  from urllib import quote_plus as urllib_quote_plus
 
 
 CONNECTION_CACHE = {}
 CONNECTION_CACHE = {}
 LOG = logging.getLogger(__name__)
 LOG = logging.getLogger(__name__)
@@ -76,13 +83,13 @@ def query_error_handler(func):
   def decorator(*args, **kwargs):
   def decorator(*args, **kwargs):
     try:
     try:
       return func(*args, **kwargs)
       return func(*args, **kwargs)
-    except OperationalError, e:
+    except OperationalError as e:
       message = str(e)
       message = str(e)
       if '1045' in message: # 'Access denied' # MySQL
       if '1045' in message: # 'Access denied' # MySQL
         raise AuthenticationRequired(message=message)
         raise AuthenticationRequired(message=message)
       else:
       else:
         raise e
         raise e
-    except Exception, e:
+    except Exception as e:
       message = force_unicode(e)
       message = force_unicode(e)
       if 'Invalid query handle' in message or 'Invalid OperationHandle' in message:
       if 'Invalid query handle' in message or 'Invalid OperationHandle' in message:
         raise QueryExpired(e)
         raise QueryExpired(e)
@@ -113,10 +120,10 @@ class SqlAlchemyApi(Api):
       url = self.options['url']
       url = self.options['url']
 
 
     if url.startswith('awsathena+rest://'):
     if url.startswith('awsathena+rest://'):
-      url = url.replace(url[17:37], quote_plus(url[17:37]))
-      url = url.replace(url[38:50], quote_plus(url[38:50]))
+      url = url.replace(url[17:37], urllib_quote_plus(url[17:37]))
+      url = url.replace(url[38:50], urllib_quote_plus(url[38:50]))
       s3_staging_dir = url.rsplit('s3_staging_dir=', 1)[1]
       s3_staging_dir = url.rsplit('s3_staging_dir=', 1)[1]
-      url = url.replace(s3_staging_dir, quote_plus(s3_staging_dir))
+      url = url.replace(s3_staging_dir, urllib_quote_plus(s3_staging_dir))
 
 
     options = self.options.copy()
     options = self.options.copy()
     options.pop('session', None)
     options.pop('session', None)
@@ -335,7 +342,7 @@ class SqlAlchemyApi(Api):
     return table_or_column
     return table_or_column
 
 
 
 
-class Assist():
+class Assist(object):
 
 
   def __init__(self, db, engine, backticks):
   def __init__(self, db, engine, backticks):
     self.db = db
     self.db = db

+ 3 - 2
desktop/libs/notebook/src/notebook/connectors/sql_alchemy_tests.py

@@ -16,6 +16,7 @@
 # See the License for the specific language governing permissions and
 # See the License for the specific language governing permissions and
 # limitations under the License.
 # limitations under the License.
 
 
+from builtins import object
 import logging
 import logging
 
 
 from mock import patch, Mock, MagicMock
 from mock import patch, Mock, MagicMock
@@ -33,7 +34,7 @@ from notebook.connectors.sql_alchemy import SqlAlchemyApi
 LOG = logging.getLogger(__name__)
 LOG = logging.getLogger(__name__)
 
 
 
 
-class TestApi():
+class TestApi(object):
 
 
   def setUp(self):
   def setUp(self):
     self.client = make_logged_in_client(username="test", groupname="default", recreate=True, is_superuser=False)
     self.client = make_logged_in_client(username="test", groupname="default", recreate=True, is_superuser=False)
@@ -139,7 +140,7 @@ class TestApi():
       assert_equal(data['meta'](), [{'type': 'BIGINT_TYPE'}])
       assert_equal(data['meta'](), [{'type': 'BIGINT_TYPE'}])
 
 
 
 
-class TestAutocomplete():
+class TestAutocomplete(object):
 
 
   def setUp(self):
   def setUp(self):
     self.client = make_logged_in_client(username="test", groupname="default", recreate=True, is_superuser=False)
     self.client = make_logged_in_client(username="test", groupname="default", recreate=True, is_superuser=False)

+ 11 - 5
desktop/libs/notebook/src/notebook/dashboard_api.py

@@ -15,6 +15,12 @@
 # See the License for the specific language governing permissions and
 # See the License for the specific language governing permissions and
 # limitations under the License.
 # limitations under the License.
 
 
+from __future__ import division
+from __future__ import print_function
+from builtins import next
+from builtins import zip
+from past.utils import old_div
+from builtins import object
 import logging
 import logging
 import json
 import json
 import numbers
 import numbers
@@ -39,7 +45,7 @@ LOG = logging.getLogger(__name__)
 LIMIT = 100
 LIMIT = 100
 
 
 
 
-class MockRequest():
+class MockRequest(object):
   def __init__(self, user, cluster):
   def __init__(self, user, cluster):
     self.user = user
     self.user = user
     self.POST = {'cluster': cluster}
     self.POST = {'cluster': cluster}
@@ -271,7 +277,7 @@ class SQLDashboardApi(DashboardApi):
 
 
   def schema_fields(self, collection):
   def schema_fields(self, collection):
     return {
     return {
-      'fields': [f for f in self.fields(collection)['schema']['fields'].itervalues()]
+      'fields': [f for f in self.fields(collection)['schema']['fields'].values()]
     }
     }
 
 
 
 
@@ -392,7 +398,7 @@ class SQLDashboardApi(DashboardApi):
         if curr > end:
         if curr > end:
           try:
           try:
             api.cancel_operation(snippet)
             api.cancel_operation(snippet)
-          except Exception, e:
+          except Exception as e:
             LOG.warning("Failed to cancel query: %s" % e)
             LOG.warning("Failed to cancel query: %s" % e)
             api.close_statement(mock_notebook, snippet)
             api.close_statement(mock_notebook, snippet)
           raise OperationTimeout(e)
           raise OperationTimeout(e)
@@ -541,7 +547,7 @@ class SQLDashboardApi(DashboardApi):
     elif value <= 1:
     elif value <= 1:
       return value
       return value
     else:
     else:
-      return value / 100
+      return old_div(value, 100)
 
 
   @classmethod
   @classmethod
   def _supports_cume_dist(self):
   def _supports_cume_dist(self):
@@ -759,7 +765,7 @@ class SQLDashboardApi(DashboardApi):
     if nested_facet['canRange']:
     if nested_facet['canRange']:
       if nested_facet['isDate']:
       if nested_facet['isDate']:
         slot = self._gap_to_units(nested_facet['gap'])
         slot = self._gap_to_units(nested_facet['gap'])
-        print augment_date_range_list(rows, nested_facet['start'], nested_facet['end'], slot['timedelta'], len(cols))
+        print(augment_date_range_list(rows, nested_facet['start'], nested_facet['end'], slot['timedelta'], len(cols)))
       else:
       else:
         rows = augment_number_range_list(rows, nested_facet['start'], nested_facet['end'], nested_facet['gap'], len(cols))
         rows = augment_number_range_list(rows, nested_facet['start'], nested_facet['end'], nested_facet['gap'], len(cols))
 
 

+ 13 - 12
desktop/libs/notebook/src/notebook/decorators.py

@@ -15,6 +15,7 @@
 # See the License for the specific language governing permissions and
 # See the License for the specific language governing permissions and
 # limitations under the License.
 # limitations under the License.
 
 
+from past.builtins import basestring
 import json
 import json
 import logging
 import logging
 import math
 import math
@@ -103,26 +104,26 @@ def api_error_handler(func):
 
 
     try:
     try:
       return func(*args, **kwargs)
       return func(*args, **kwargs)
-    except SessionExpired, e:
+    except SessionExpired as e:
       response['status'] = -2
       response['status'] = -2
-    except QueryExpired, e:
+    except QueryExpired as e:
       response['status'] = -3
       response['status'] = -3
       if e.message and isinstance(e.message, basestring):
       if e.message and isinstance(e.message, basestring):
         response['message'] = e.message
         response['message'] = e.message
-    except AuthenticationRequired, e:
+    except AuthenticationRequired as e:
       response['status'] = 401
       response['status'] = 401
       if e.message and isinstance(e.message, basestring):
       if e.message and isinstance(e.message, basestring):
         response['message'] = e.message
         response['message'] = e.message
-    except ValidationError, e:
+    except ValidationError as e:
       LOG.exception('Error validation %s' % func)
       LOG.exception('Error validation %s' % func)
       response['status'] = -1
       response['status'] = -1
       response['message'] = e.message
       response['message'] = e.message
-    except OperationTimeout, e:
+    except OperationTimeout as e:
       response['status'] = -4
       response['status'] = -4
-    except FilesystemException, e:
+    except FilesystemException as e:
       response['status'] = 2
       response['status'] = 2
       response['message'] = e.message
       response['message'] = e.message
-    except QueryError, e:
+    except QueryError as e:
       LOG.exception('Error running %s' % func.__name__)
       LOG.exception('Error running %s' % func.__name__)
       response['status'] = 1
       response['status'] = 1
       response['message'] = smart_unicode(e)
       response['message'] = smart_unicode(e)
@@ -139,14 +140,14 @@ def api_error_handler(func):
         response['handle'] = e.handle
         response['handle'] = e.handle
       if e.extra:
       if e.extra:
         response.update(e.extra)
         response.update(e.extra)
-    except OperationNotSupported, e:
+    except OperationNotSupported as e:
       response['status'] = 5
       response['status'] = 5
       response['message'] = e.message
       response['message'] = e.message
-    except RestException, e:
+    except RestException as e:
       message = extract_solr_exception_message(e)
       message = extract_solr_exception_message(e)
       response['status'] = 1
       response['status'] = 1
       response['message'] = message.get('error')
       response['message'] = message.get('error')
-    except Exception, e:
+    except Exception as e:
       LOG.exception('Error running %s' % func.__name__)
       LOG.exception('Error running %s' % func.__name__)
       response['status'] = -1
       response['status'] = -1
       response['message'] = smart_unicode(e)
       response['message'] = smart_unicode(e)
@@ -177,9 +178,9 @@ def json_error_handler(view_fn):
   def decorator(*args, **kwargs):
   def decorator(*args, **kwargs):
     try:
     try:
       return view_fn(*args, **kwargs)
       return view_fn(*args, **kwargs)
-    except Http404, e:
+    except Http404 as e:
       raise e
       raise e
-    except Exception, e:
+    except Exception as e:
       response = {
       response = {
         'error': str(e)
         'error': str(e)
       }
       }

+ 1 - 0
desktop/libs/notebook/src/notebook/management/commands/send_query_stats.py

@@ -1,3 +1,4 @@
+from __future__ import print_function
 
 
 #!/usr/bin/env python
 #!/usr/bin/env python
 ## -*- coding: utf-8 -*-
 ## -*- coding: utf-8 -*-

+ 23 - 14
desktop/libs/notebook/src/notebook/models.py

@@ -15,12 +15,16 @@
 # See the License for the specific language governing permissions and
 # See the License for the specific language governing permissions and
 # limitations under the License.
 # limitations under the License.
 
 
+from future import standard_library
+standard_library.install_aliases()
+from builtins import str
+from builtins import object
 import datetime
 import datetime
 import json
 import json
 import logging
 import logging
 import math
 import math
 import numbers
 import numbers
-import urllib
+import sys
 import uuid
 import uuid
 
 
 from datetime import timedelta
 from datetime import timedelta
@@ -39,6 +43,11 @@ from desktop.models import Document2
 
 
 from notebook.connectors.base import Notebook, get_interpreter
 from notebook.connectors.base import Notebook, get_interpreter
 
 
+if sys.version_info[0] > 2:
+  import urllib.request, urllib.error
+  from urllib.parse import quote as urllib_quote
+else:
+  from urllib import quote as urllib_quote
 
 
 LOG = logging.getLogger(__name__)
 LOG = logging.getLogger(__name__)
 
 
@@ -123,7 +132,7 @@ def make_notebook(
     'type': 'notebook' if is_notebook else 'query-%s' % editor_type,
     'type': 'notebook' if is_notebook else 'query-%s' % editor_type,
     'showHistory': True,
     'showHistory': True,
     'isSaved': is_saved,
     'isSaved': is_saved,
-    'onSuccessUrl': urllib.quote(on_success_url.encode('utf-8'), safe=SAFE_CHARACTERS_URI) if on_success_url else None,
+    'onSuccessUrl': urllib_quote(on_success_url.encode('utf-8'), safe=SAFE_CHARACTERS_URI) if on_success_url else None,
     'pubSubUrl': pub_sub_url,
     'pubSubUrl': pub_sub_url,
     'skipHistorify': skip_historify,
     'skipHistorify': skip_historify,
     'isManaged': is_task,
     'isManaged': is_task,
@@ -217,7 +226,7 @@ def make_notebook2(name='Browse', description='', is_saved=False, snippets=None)
   return editor
   return editor
 
 
 
 
-class MockedDjangoRequest():
+class MockedDjangoRequest(object):
 
 
   def __init__(self, user, get=None, post=None, method='POST'):
   def __init__(self, user, get=None, post=None, method='POST'):
     self.user = user
     self.user = user
@@ -290,7 +299,7 @@ def import_saved_mapreduce_job(wf):
     files = json.loads(node.files)
     files = json.loads(node.files)
     for filepath in files:
     for filepath in files:
       snippet_properties['files'].append({'type': 'file', 'path': filepath})
       snippet_properties['files'].append({'type': 'file', 'path': filepath})
-  except ValueError, e:
+  except ValueError as e:
     LOG.warn('Failed to parse files for mapreduce job design "%s".' % wf.name)
     LOG.warn('Failed to parse files for mapreduce job design "%s".' % wf.name)
 
 
   snippet_properties['archives'] = []
   snippet_properties['archives'] = []
@@ -298,7 +307,7 @@ def import_saved_mapreduce_job(wf):
     archives = json.loads(node.archives)
     archives = json.loads(node.archives)
     for filepath in archives:
     for filepath in archives:
       snippet_properties['archives'].append(filepath)
       snippet_properties['archives'].append(filepath)
-  except ValueError, e:
+  except ValueError as e:
     LOG.warn('Failed to parse archives for mapreduce job design "%s".' % wf.name)
     LOG.warn('Failed to parse archives for mapreduce job design "%s".' % wf.name)
 
 
   snippet_properties['hadoopProperties'] = []
   snippet_properties['hadoopProperties'] = []
@@ -307,7 +316,7 @@ def import_saved_mapreduce_job(wf):
     if properties:
     if properties:
       for prop in properties:
       for prop in properties:
         snippet_properties['hadoopProperties'].append("%s=%s" % (prop.get('name'), prop.get('value')))
         snippet_properties['hadoopProperties'].append("%s=%s" % (prop.get('name'), prop.get('value')))
-  except ValueError, e:
+  except ValueError as e:
     LOG.warn('Failed to parse job properties for mapreduce job design "%s".' % wf.name)
     LOG.warn('Failed to parse job properties for mapreduce job design "%s".' % wf.name)
 
 
   snippet_properties['app_jar'] = node.jar_path
   snippet_properties['app_jar'] = node.jar_path
@@ -347,7 +356,7 @@ def import_saved_shell_job(wf):
             snippet_properties['arguments'].append(param['value'])
             snippet_properties['arguments'].append(param['value'])
           else:
           else:
             snippet_properties['env_var'].append(param['value'])
             snippet_properties['env_var'].append(param['value'])
-    except ValueError, e:
+    except ValueError as e:
       LOG.warn('Failed to parse parameters for shell job design "%s".' % wf.name)
       LOG.warn('Failed to parse parameters for shell job design "%s".' % wf.name)
 
 
     snippet_properties['hadoopProperties'] = []
     snippet_properties['hadoopProperties'] = []
@@ -356,7 +365,7 @@ def import_saved_shell_job(wf):
       if properties:
       if properties:
         for prop in properties:
         for prop in properties:
           snippet_properties['hadoopProperties'].append("%s=%s" % (prop.get('name'), prop.get('value')))
           snippet_properties['hadoopProperties'].append("%s=%s" % (prop.get('name'), prop.get('value')))
-    except ValueError, e:
+    except ValueError as e:
       LOG.warn('Failed to parse job properties for shell job design "%s".' % wf.name)
       LOG.warn('Failed to parse job properties for shell job design "%s".' % wf.name)
 
 
     snippet_properties['files'] = []
     snippet_properties['files'] = []
@@ -364,7 +373,7 @@ def import_saved_shell_job(wf):
       files = json.loads(node.files)
       files = json.loads(node.files)
       for filepath in files:
       for filepath in files:
         snippet_properties['files'].append({'type': 'file', 'path': filepath})
         snippet_properties['files'].append({'type': 'file', 'path': filepath})
-    except ValueError, e:
+    except ValueError as e:
       LOG.warn('Failed to parse files for shell job design "%s".' % wf.name)
       LOG.warn('Failed to parse files for shell job design "%s".' % wf.name)
 
 
     snippet_properties['archives'] = []
     snippet_properties['archives'] = []
@@ -372,7 +381,7 @@ def import_saved_shell_job(wf):
       archives = json.loads(node.archives)
       archives = json.loads(node.archives)
       for archive in archives:
       for archive in archives:
         snippet_properties['archives'].append(archive['name'])
         snippet_properties['archives'].append(archive['name'])
-    except ValueError, e:
+    except ValueError as e:
       LOG.warn('Failed to parse archives for shell job design "%s".' % wf.name)
       LOG.warn('Failed to parse archives for shell job design "%s".' % wf.name)
 
 
     snippet_properties['capture_output'] = node.capture_output
     snippet_properties['capture_output'] = node.capture_output
@@ -411,7 +420,7 @@ def import_saved_java_job(wf):
       if properties:
       if properties:
         for prop in properties:
         for prop in properties:
           snippet_properties['hadoopProperties'].append("%s=%s" % (prop.get('name'), prop.get('value')))
           snippet_properties['hadoopProperties'].append("%s=%s" % (prop.get('name'), prop.get('value')))
-    except ValueError, e:
+    except ValueError as e:
       LOG.warn('Failed to parse job properties for Java job design "%s".' % wf.name)
       LOG.warn('Failed to parse job properties for Java job design "%s".' % wf.name)
 
 
     snippet_properties['files'] = []
     snippet_properties['files'] = []
@@ -419,7 +428,7 @@ def import_saved_java_job(wf):
       files = json.loads(node.files)
       files = json.loads(node.files)
       for filepath in files:
       for filepath in files:
         snippet_properties['files'].append({'type': 'file', 'path': filepath})
         snippet_properties['files'].append({'type': 'file', 'path': filepath})
-    except ValueError, e:
+    except ValueError as e:
       LOG.warn('Failed to parse files for Java job design "%s".' % wf.name)
       LOG.warn('Failed to parse files for Java job design "%s".' % wf.name)
 
 
     snippet_properties['archives'] = []
     snippet_properties['archives'] = []
@@ -427,7 +436,7 @@ def import_saved_java_job(wf):
       archives = json.loads(node.archives)
       archives = json.loads(node.archives)
       for archive in archives:
       for archive in archives:
         snippet_properties['archives'].append(archive['name'])
         snippet_properties['archives'].append(archive['name'])
-    except ValueError, e:
+    except ValueError as e:
       LOG.warn('Failed to parse archives for Java job design "%s".' % wf.name)
       LOG.warn('Failed to parse archives for Java job design "%s".' % wf.name)
 
 
     snippet_properties['capture_output'] = node.capture_output
     snippet_properties['capture_output'] = node.capture_output
@@ -481,7 +490,7 @@ def _get_editor_type(editor_id):
   return document.type.rsplit('-', 1)[-1]
   return document.type.rsplit('-', 1)[-1]
 
 
 
 
-class Analytics():
+class Analytics(object):
 
 
   @classmethod
   @classmethod
   def admin_stats(cls):
   def admin_stats(cls):

+ 2 - 1
desktop/libs/notebook/src/notebook/monkey_patches.py

@@ -17,6 +17,7 @@
 
 
 # Start DBProxy server if we have some JDBC snippets
 # Start DBProxy server if we have some JDBC snippets
 
 
+from builtins import range
 from notebook.conf import get_ordered_interpreters, ENABLE_DBPROXY_SERVER
 from notebook.conf import get_ordered_interpreters, ENABLE_DBPROXY_SERVER
 
 
 
 
@@ -30,7 +31,7 @@ def _start_livy_server():
 
 
   def cleanup():
   def cleanup():
     p.terminate()
     p.terminate()
-    for _ in xrange(5):
+    for _ in range(5):
       if p.poll() == None:
       if p.poll() == None:
         time.sleep(1)
         time.sleep(1)
       else:
       else:

+ 9 - 2
desktop/libs/notebook/src/notebook/sql_utils.py

@@ -14,17 +14,24 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # See the License for the specific language governing permissions and
 # limitations under the License.
 # limitations under the License.
+from future import standard_library
+standard_library.install_aliases()
 import hashlib
 import hashlib
 import os
 import os
 import re
 import re
-import StringIO
+import sys
 
 
 from desktop.lib.i18n import smart_str
 from desktop.lib.i18n import smart_str
 
 
+if sys.version_info[0] > 2:
+  from io import StringIO as string_io
+else:
+  from StringIO import StringIO as string_io
+
 # Note: Might be replaceable by sqlparse.split
 # Note: Might be replaceable by sqlparse.split
 def get_statements(hql_query):
 def get_statements(hql_query):
   hql_query = strip_trailing_semicolon(hql_query)
   hql_query = strip_trailing_semicolon(hql_query)
-  hql_query_sio = StringIO.StringIO(hql_query)
+  hql_query_sio = string_io(hql_query)
 
 
   statements = []
   statements = []
   for (start_row, start_col), (end_row, end_col), statement in split_statements(hql_query_sio.read()):
   for (start_row, start_col), (end_row, end_col), statement in split_statements(hql_query_sio.read()):

+ 10 - 3
desktop/libs/notebook/src/notebook/tasks.py

@@ -16,11 +16,14 @@
 # limitations under the License.
 # limitations under the License.
 from __future__ import absolute_import, unicode_literals
 from __future__ import absolute_import, unicode_literals
 
 
+from future import standard_library
+standard_library.install_aliases()
+from builtins import next
+from builtins import object
 import csv
 import csv
 import datetime
 import datetime
 import json
 import json
 import logging
 import logging
-import StringIO
 import sys
 import sys
 import time
 import time
 
 
@@ -44,6 +47,10 @@ from desktop.settings import CACHES_CELERY_KEY, CACHES_CELERY_QUERY_RESULT_KEY
 from notebook.connectors.base import get_api, QueryExpired, ExecutionWrapper
 from notebook.connectors.base import get_api, QueryExpired, ExecutionWrapper
 from notebook.sql_utils import get_current_statement
 from notebook.sql_utils import get_current_statement
 
 
+if sys.version_info[0] > 2:
+  from io import StringIO as string_io
+else:
+  from StringIO import StringIO as string_io
 
 
 LOG_TASK = get_task_logger(__name__)
 LOG_TASK = get_task_logger(__name__)
 LOG = logging.getLogger(__name__)
 LOG = logging.getLogger(__name__)
@@ -75,7 +82,7 @@ class ExecutionWrapperCallback(object):
     if handle.get('sync', False) and handle['result'].get('data'):
     if handle.get('sync', False) and handle['result'].get('data'):
       handle_without_data = handle.copy()
       handle_without_data = handle.copy()
       handle_without_data['result'] = {}
       handle_without_data['result'] = {}
-      for key in filter(lambda x: x != 'data', list(handle['result'].keys())):
+      for key in [x for x in list(handle['result'].keys()) if x != 'data']:
         handle_without_data['result'][key] = handle['result'][key]
         handle_without_data['result'][key] = handle['result'][key]
     else:
     else:
       handle_without_data = handle
       handle_without_data = handle
@@ -255,7 +262,7 @@ def get_log(notebook, snippet, startFrom=None, size=None, postdict=None, user_id
         return f.read()
         return f.read()
     else:
     else:
       count = 0
       count = 0
-      output = StringIO.StringIO()
+      output = string_io()
       with storage.open(_log_key(notebook), 'r') as f:
       with storage.open(_log_key(notebook), 'r') as f:
         for line in f:
         for line in f:
           count += 1
           count += 1

+ 13 - 8
desktop/libs/notebook/src/notebook/tests.py

@@ -16,6 +16,7 @@
 # See the License for the specific language governing permissions and
 # See the License for the specific language governing permissions and
 # limitations under the License.
 # limitations under the License.
 
 
+from builtins import object
 import json
 import json
 
 
 from collections import OrderedDict
 from collections import OrderedDict
@@ -322,7 +323,7 @@ class MockedApi(Api):
     return {'destination': target_file}
     return {'destination': target_file}
 
 
 
 
-class MockFs():
+class MockFs(object):
   def __init__(self, logical_name=None):
   def __init__(self, logical_name=None):
 
 
     self.fs_defaultfs = 'hdfs://curacao:8020'
     self.fs_defaultfs = 'hdfs://curacao:8020'
@@ -332,11 +333,11 @@ class MockFs():
     self._filebrowser_action = ''
     self._filebrowser_action = ''
 
 
   def setuser(self, user):
   def setuser(self, user):
-    self.user = user
+    self._user = user
 
 
   @property
   @property
   def user(self):
   def user(self):
-    return self.user
+    return self._user
 
 
   def do_as_user(self, username, fn, *args, **kwargs):
   def do_as_user(self, username, fn, *args, **kwargs):
     return ''
     return ''
@@ -350,6 +351,10 @@ class MockFs():
   def filebrowser_action(self):
   def filebrowser_action(self):
     return self._filebrowser_action
     return self._filebrowser_action
 
 
+  @user.setter
+  def user(self, value):
+    self._user = value
+
 
 
 class TestNotebookApiMocked(object):
 class TestNotebookApiMocked(object):
 
 
@@ -514,17 +519,17 @@ def test_get_interpreters_to_show():
 
 
     interpreters_shown_on_wheel_unset = get_ordered_interpreters()
     interpreters_shown_on_wheel_unset = get_ordered_interpreters()
     assert_equal(
     assert_equal(
-      default_interpreters.values(),
+      list(default_interpreters.values()),
       interpreters_shown_on_wheel_unset,
       interpreters_shown_on_wheel_unset,
       'get_interpreters_to_show should return the same as get_interpreters when interpreters_shown_on_wheel is unset. expected: %s, actual: %s' % (
       'get_interpreters_to_show should return the same as get_interpreters when interpreters_shown_on_wheel is unset. expected: %s, actual: %s' % (
-          default_interpreters.values(), interpreters_shown_on_wheel_unset
+          list(default_interpreters.values()), interpreters_shown_on_wheel_unset
       )
       )
     )
     )
 
 
     resets.append(INTERPRETERS_SHOWN_ON_WHEEL.set_for_testing('java,pig'))
     resets.append(INTERPRETERS_SHOWN_ON_WHEEL.set_for_testing('java,pig'))
-    assert_equal(expected_interpreters.values(), get_ordered_interpreters(),
+    assert_equal(list(expected_interpreters.values()), get_ordered_interpreters(),
                  'get_interpreters_to_show did not return interpreters in the correct order expected: %s, actual: %s'
                  'get_interpreters_to_show did not return interpreters in the correct order expected: %s, actual: %s'
-                 % (expected_interpreters.values(), get_ordered_interpreters()))
+                 % (list(expected_interpreters.values()), get_ordered_interpreters()))
   finally:
   finally:
     for reset in resets:
     for reset in resets:
       reset()
       reset()
@@ -533,7 +538,7 @@ def test_get_interpreters_to_show():
     appmanager.load_apps(APP_BLACKLIST.get())
     appmanager.load_apps(APP_BLACKLIST.get())
 
 
 
 
-class TestAnalytics():
+class TestAnalytics(object):
 
 
   def setUp(self):
   def setUp(self):
     self.client = make_logged_in_client(username="test", groupname="default", recreate=True, is_superuser=False)
     self.client = make_logged_in_client(username="test", groupname="default", recreate=True, is_superuser=False)

+ 4 - 3
desktop/libs/notebook/src/notebook/views.py

@@ -15,6 +15,7 @@
 # See the License for the specific language governing permissions and
 # See the License for the specific language governing permissions and
 # limitations under the License.
 # limitations under the License.
 
 
+from builtins import object
 import json
 import json
 import logging
 import logging
 
 
@@ -284,7 +285,7 @@ def delete(request):
         doc.can_write_or_exception(request.user)
         doc.can_write_or_exception(request.user)
         doc2.trash()
         doc2.trash()
         ctr += 1
         ctr += 1
-      except FilesystemException, e:
+      except FilesystemException as e:
         failures.append(notebook['uuid'])
         failures.append(notebook['uuid'])
         LOG.exception("Failed to delete document with UUID %s that is writable by user %s, skipping." % (notebook['uuid'], request.user.username))
         LOG.exception("Failed to delete document with UUID %s that is writable by user %s, skipping." % (notebook['uuid'], request.user.username))
 
 
@@ -317,7 +318,7 @@ def copy(request):
         doc2 = doc2.copy(name=name, owner=request.user)
         doc2 = doc2.copy(name=name, owner=request.user)
 
 
         doc.copy(content_object=doc2, name=name, owner=request.user)
         doc.copy(content_object=doc2, name=name, owner=request.user)
-      except FilesystemException, e:
+      except FilesystemException as e:
         failures.append(notebook['uuid'])
         failures.append(notebook['uuid'])
         LOG.exception("Failed to copy document with UUID %s accessible by user %s, skipping." % (notebook['uuid'], request.user.username))
         LOG.exception("Failed to copy document with UUID %s accessible by user %s, skipping." % (notebook['uuid'], request.user.username))
 
 
@@ -371,7 +372,7 @@ def install_examples(request):
     try:
     try:
       Command().handle(user=request.user)
       Command().handle(user=request.user)
       response['status'] = 0
       response['status'] = 0
-    except Exception, err:
+    except Exception as err:
       LOG.exception(err)
       LOG.exception(err)
       response['message'] = str(err)
       response['message'] = str(err)
   else:
   else: