Эх сурвалжийг харах

[core] Fix LOG.warn to LOG.warning (#1926)

- [core] Fix LOG.warn to LOG.warning in apps
- [core] Fix LOG.warn to LOG.warning in desktop libs
- [core] Fix LOG.warn to LOG.warning in apps core
Romain Rigaux 4 жил өмнө
parent
commit
be87d2996b
100 өөрчлөгдсөн 301 нэмэгдсэн , 303 устгасан
  1. 1 1
      apps/beeswax/src/beeswax/api.py
  2. 3 3
      apps/beeswax/src/beeswax/data_export.py
  3. 1 1
      apps/beeswax/src/beeswax/models.py
  4. 2 2
      apps/beeswax/src/beeswax/server/hive_server2_lib.py
  5. 2 2
      apps/beeswax/src/beeswax/test_base.py
  6. 4 4
      apps/beeswax/src/beeswax/views.py
  7. 1 1
      apps/filebrowser/src/filebrowser/lib/xxd_test.py
  8. 2 2
      apps/hive/src/hive/conf.py
  9. 1 1
      apps/impala/src/impala/dbms.py
  10. 2 2
      apps/impala/src/impala/server.py
  11. 2 2
      apps/jobbrowser/src/jobbrowser/apis/job_api.py
  12. 1 1
      apps/jobbrowser/src/jobbrowser/apis/schedule_api.py
  13. 1 1
      apps/jobbrowser/src/jobbrowser/apis/workflow_api.py
  14. 3 3
      apps/jobbrowser/src/jobbrowser/views.py
  15. 4 5
      apps/jobbrowser/src/jobbrowser/yarn_models.py
  16. 1 1
      apps/jobsub/src/jobsub/old_migrations/0004_hue1_to_hue2.py
  17. 1 1
      apps/jobsub/src/jobsub/tests.py
  18. 1 1
      apps/metastore/src/metastore/views.py
  19. 2 2
      apps/oozie/src/oozie/models.py
  20. 4 4
      apps/oozie/src/oozie/models2.py
  21. 2 2
      apps/oozie/src/oozie/tests.py
  22. 2 2
      apps/oozie/src/oozie/views/api.py
  23. 2 2
      apps/oozie/src/oozie/views/editor2.py
  24. 8 8
      apps/useradmin/src/useradmin/ldap_access.py
  25. 1 1
      apps/useradmin/src/useradmin/middleware.py
  26. 1 1
      apps/useradmin/src/useradmin/models.py
  27. 2 2
      apps/useradmin/src/useradmin/organization.py
  28. 1 1
      apps/useradmin/src/useradmin/organization_tests.py
  29. 31 31
      apps/useradmin/src/useradmin/views.py
  30. 6 6
      desktop/core/src/desktop/api2.py
  31. 1 1
      desktop/core/src/desktop/appmanager.py
  32. 13 13
      desktop/core/src/desktop/auth/backend.py
  33. 4 4
      desktop/core/src/desktop/auth/views.py
  34. 1 1
      desktop/core/src/desktop/conf.py
  35. 4 4
      desktop/core/src/desktop/converters.py
  36. 1 1
      desktop/core/src/desktop/lib/conf.py
  37. 1 1
      desktop/core/src/desktop/lib/connectors/models.py
  38. 1 1
      desktop/core/src/desktop/lib/fs/gc/client.py
  39. 1 1
      desktop/core/src/desktop/lib/idbroker/conf.py
  40. 1 1
      desktop/core/src/desktop/lib/scheduler/api.py
  41. 1 1
      desktop/core/src/desktop/lib/thrift_util.py
  42. 1 1
      desktop/core/src/desktop/lib/view_util.py
  43. 1 1
      desktop/core/src/desktop/manage_entry.py
  44. 1 1
      desktop/core/src/desktop/management/commands/create_user_directories.py
  45. 1 1
      desktop/core/src/desktop/management/commands/desktop_document_cleanup.py
  46. 60 60
      desktop/core/src/desktop/management/commands/ldaptest.py
  47. 4 4
      desktop/core/src/desktop/models.py
  48. 4 4
      desktop/core/src/desktop/supervisor.py
  49. 5 5
      desktop/core/src/desktop/tests.py
  50. 6 6
      desktop/core/src/desktop/views.py
  51. 1 1
      desktop/libs/aws/src/aws/conf.py
  52. 2 2
      desktop/libs/aws/src/aws/s3/s3fs.py
  53. 3 3
      desktop/libs/azure/src/azure/abfs/__init__.py
  54. 4 4
      desktop/libs/azure/src/azure/abfs/abfs.py
  55. 1 1
      desktop/libs/dashboard/src/dashboard/controller.py
  56. 2 2
      desktop/libs/dashboard/src/dashboard/models.py
  57. 1 1
      desktop/libs/hadoop/src/hadoop/cluster.py
  58. 1 1
      desktop/libs/hadoop/src/hadoop/conf.py
  59. 1 1
      desktop/libs/hadoop/src/hadoop/fs/hadoopfs.py
  60. 1 1
      desktop/libs/hadoop/src/hadoop/fs/upload.py
  61. 2 2
      desktop/libs/hadoop/src/hadoop/fs/webhdfs.py
  62. 1 2
      desktop/libs/hadoop/src/hadoop/pseudo_hdfs4.py
  63. 6 6
      desktop/libs/indexer/src/indexer/api3.py
  64. 2 2
      desktop/libs/indexer/src/indexer/conf.py
  65. 3 3
      desktop/libs/indexer/src/indexer/controller.py
  66. 1 1
      desktop/libs/indexer/src/indexer/file_format.py
  67. 1 1
      desktop/libs/indexer/src/indexer/indexers/rdbms.py
  68. 1 1
      desktop/libs/indexer/src/indexer/indexers/sql.py
  69. 1 1
      desktop/libs/indexer/src/indexer/solr_api.py
  70. 2 2
      desktop/libs/indexer/src/indexer/solr_client.py
  71. 2 2
      desktop/libs/liboauth/src/liboauth/backend.py
  72. 1 1
      desktop/libs/liboauth/src/liboauth/views.py
  73. 1 1
      desktop/libs/liboozie/src/liboozie/conf.py
  74. 2 2
      desktop/libs/liboozie/src/liboozie/credentials.py
  75. 8 8
      desktop/libs/liboozie/src/liboozie/submission2.py
  76. 1 1
      desktop/libs/liboozie/src/liboozie/submittion.py
  77. 1 1
      desktop/libs/librdbms/src/librdbms/jdbc.py
  78. 1 1
      desktop/libs/libsaml/src/libsaml/urls.py
  79. 1 1
      desktop/libs/libsentry/src/libsentry/api.py
  80. 1 1
      desktop/libs/libsentry/src/libsentry/api2.py
  81. 2 2
      desktop/libs/libsentry/src/libsentry/privilege_checker.py
  82. 1 1
      desktop/libs/libsentry/src/libsentry/sentry_ha.py
  83. 1 1
      desktop/libs/libsentry/src/libsentry/sentry_site.py
  84. 3 3
      desktop/libs/libsolr/src/libsolr/api.py
  85. 1 1
      desktop/libs/libsolr/src/libsolr/conf.py
  86. 2 2
      desktop/libs/libzookeeper/src/libzookeeper/conf.py
  87. 4 4
      desktop/libs/metadata/src/metadata/manager_client.py
  88. 1 1
      desktop/libs/metadata/src/metadata/optimizer/optimizer_client.py
  89. 1 1
      desktop/libs/metadata/src/metadata/optimizer_api.py
  90. 1 1
      desktop/libs/notebook/src/notebook/api.py
  91. 1 1
      desktop/libs/notebook/src/notebook/connectors/altus.py
  92. 1 1
      desktop/libs/notebook/src/notebook/connectors/base.py
  93. 2 2
      desktop/libs/notebook/src/notebook/connectors/flink_sql.py
  94. 2 2
      desktop/libs/notebook/src/notebook/connectors/hbase.py
  95. 1 1
      desktop/libs/notebook/src/notebook/connectors/hive_metastore.py
  96. 9 9
      desktop/libs/notebook/src/notebook/connectors/hiveserver2.py
  97. 1 1
      desktop/libs/notebook/src/notebook/connectors/kafka.py
  98. 2 2
      desktop/libs/notebook/src/notebook/connectors/oozie_batch.py
  99. 1 1
      desktop/libs/notebook/src/notebook/connectors/spark_shell.py
  100. 1 1
      desktop/libs/notebook/src/notebook/connectors/sql_alchemy.py

+ 1 - 1
apps/beeswax/src/beeswax/api.py

@@ -88,7 +88,7 @@ def error_handler(view_fn):
 
       if re.search('database is locked|Invalid query handle|not JSON serializable', message, re.IGNORECASE):
         response['status'] = 2 # Frontend will not display this type of error
-        LOG.warn('error_handler silencing the exception: %s' % e)
+        LOG.warning('error_handler silencing the exception: %s' % e)
       return JsonResponse(response)
   return decorator
 

+ 3 - 3
apps/beeswax/src/beeswax/data_export.py

@@ -157,7 +157,7 @@ class DataAdapter(object):
 
       # For result sets with high num of columns, fetch in smaller batches to avoid serialization cost
       if self.num_cols > 100:
-        LOG.warn('The query results contain %d columns and may take long time to download, reducing fetch size to 100.' % self.num_cols)
+        LOG.warning('The query results contain %d columns and may take long time to download, reducing fetch size to 100.' % self.num_cols)
         self.fetch_size = 100
 
     if self.has_more and not self.is_truncated:
@@ -167,13 +167,13 @@ class DataAdapter(object):
       for row in results.rows():
         num_bytes = self._getsizeofascii(row)
         if self.limit_rows and self.row_counter + 1 > self.max_rows:
-          LOG.warn('The query results exceeded the maximum row limit of %d and has been truncated to first %d rows.' % (
+          LOG.warning('The query results exceeded the maximum row limit of %d and has been truncated to first %d rows.' % (
               self.max_rows, self.row_counter)
           )
           self.is_truncated = True
           break
         if self.limit_bytes and self.bytes_counter + num_bytes > self.max_bytes:
-          LOG.warn('The query results exceeded the maximum bytes limit of %d and has been truncated to first %d rows.' % (
+          LOG.warning('The query results exceeded the maximum bytes limit of %d and has been truncated to first %d rows.' % (
               self.max_bytes, self.row_counter)
           )
           self.is_truncated = True

+ 1 - 1
apps/beeswax/src/beeswax/models.py

@@ -431,7 +431,7 @@ class SessionManager(models.Manager):
           snippet_data = json.loads(doc.data)['snippets'][0]
         except (KeyError, IndexError):
           # data might not contain a 'snippets' field or it might be empty
-          LOG.warn('No snippets in Document2 object of type query-hive')
+          LOG.warning('No snippets in Document2 object of type query-hive')
           continue
         session_guid = snippet_data.get('result', {}).get('handle', {}).get('session_guid')
         status = snippet_data.get('status')

+ 2 - 2
apps/beeswax/src/beeswax/server/hive_server2_lib.py

@@ -180,7 +180,7 @@ class HiveServerTable(Table):
       end_cols_index = list(map(itemgetter('col_name'), rows[col_row_index:])).index('')
     except ValueError as e:
       end_cols_index = 5000
-      LOG.warn('Could not guess end column index, so defaulting to %s: %s' % (end_cols_index, e))
+      LOG.warning('Could not guess end column index, so defaulting to %s: %s' % (end_cols_index, e))
     return [{
           'col_name': prop['col_name'].strip() if prop['col_name'] else prop['col_name'],
           'data_type': prop['data_type'].strip() if prop['data_type'] else prop['data_type'],
@@ -1321,7 +1321,7 @@ class HiveServerTableCompatible(HiveServerTable):
       self.is_impala_only = 'org.apache.hadoop.hive.kudu.KuduSerDe' in str(hive_table.properties) or \
         'org.apache.kudu.mapreduce.KuduTableOutputFormat' in str(hive_table.properties) # Deprecated since CDP
     except Exception as e:
-      LOG.warn('Autocomplete data fetching error: %s' % e)
+      LOG.warning('Autocomplete data fetching error: %s' % e)
       self.is_impala_only = False
 
   @property

+ 2 - 2
apps/beeswax/src/beeswax/test_base.py

@@ -353,7 +353,7 @@ def verify_history(client, fragment, design=None, reverse=False, server_name='be
     except KeyError:
       pass
 
-  LOG.warn('Cannot find history size. Response context clobbered')
+  LOG.warning('Cannot find history size. Response context clobbered')
   return -1
 
 
@@ -478,7 +478,7 @@ class BeeswaxSampleProvider(object):
       cls._make_table(table_info['name'], CREATE_TABLE % table_info, data_file % 2)
 
       if is_live_cluster():
-        LOG.warn('HUE-2884: We cannot create Hive UTF8 tables when live cluster testing at the moment')
+        LOG.warning('HUE-2884: We cannot create Hive UTF8 tables when live cluster testing at the moment')
       else:
         # Create a "test_utf8" table.
         table_info = {'db': cls.db_name, 'name': 'test_utf8', 'comment': cls.get_i18n_table_comment()}

+ 4 - 4
apps/beeswax/src/beeswax/views.py

@@ -444,7 +444,7 @@ def execute_query(request, design_id=None, query_history_id=None):
     except QueryServerException as e:
       if 'Invalid query handle' in e.message or 'Invalid OperationHandle' in e.message:
         query_history.save_state(QueryHistory.STATE.expired)
-        LOG.warn("Invalid query handle", exc_info=sys.exc_info())
+        LOG.warning("Invalid query handle", exc_info=sys.exc_info())
         action = 'editor-expired-results'
       else:
         raise e
@@ -878,7 +878,7 @@ def _list_designs(user, querydict, page_size, prefix="", is_trashed=False):
       sort_dir, sort_attr = '', sort_key
 
     if sort_attr not in SORT_ATTR_TRANSLATION:
-      LOG.warn('Bad parameter to list_designs: sort=%s' % (sort_key,))
+      LOG.warning('Bad parameter to list_designs: sort=%s' % (sort_key,))
       sort_dir, sort_attr = DEFAULT_SORT
   else:
     sort_dir, sort_attr = DEFAULT_SORT
@@ -1072,7 +1072,7 @@ def _list_query_history(user, querydict, page_size, prefix=""):
   d_type = querydict.get(prefix + 'type')
   if d_type:
     if d_type not in list(SavedQuery.TYPES_MAPPING.keys()):
-      LOG.warn('Bad parameter to list_query_history: type=%s' % (d_type,))
+      LOG.warning('Bad parameter to list_query_history: type=%s' % (d_type,))
     else:
       db_queryset = db_queryset.filter(design__type=SavedQuery.TYPES_MAPPING[d_type])
 
@@ -1089,7 +1089,7 @@ def _list_query_history(user, querydict, page_size, prefix=""):
       sort_dir, sort_attr = '-', sort_key[1:]
 
     if sort_attr not in SORT_ATTR_TRANSLATION:
-      LOG.warn('Bad parameter to list_query_history: sort=%s' % (sort_key,))
+      LOG.warning('Bad parameter to list_query_history: sort=%s' % (sort_key,))
       sort_dir, sort_attr = DEFAULT_SORT
   else:
     sort_dir, sort_attr = DEFAULT_SORT

+ 1 - 1
apps/filebrowser/src/filebrowser/lib/xxd_test.py

@@ -90,7 +90,7 @@ class XxdTest(unittest.TestCase):
     try:
       subprocess.check_output('type xxd', shell=True)
     except subprocess.CalledProcessError as e:
-      LOG.warn('xxd not found')
+      LOG.warning('xxd not found')
       raise SkipTest
     # /dev/random tends to hang on Linux, so we use python instead.
     # It's inefficient, but it's not terrible.

+ 2 - 2
apps/hive/src/hive/conf.py

@@ -84,12 +84,12 @@ def config_validator(user):
         if is_s3_enabled():
           fs.do_as_user(user, fs.stats, warehouse)
         else:
-          LOG.warn("Warehouse is in S3, but no credential available.")
+          LOG.warning("Warehouse is in S3, but no credential available.")
       elif fs_scheme == 'abfs':
         if is_abfs_enabled():
           fs.do_as_user(user, fs.stats, warehouse)
         else:
-          LOG.warn("Warehouse is in ABFS, but no credential available.")
+          LOG.warning("Warehouse is in ABFS, but no credential available.")
       else:
         fs.do_as_superuser(fs.stats, warehouse)
   except Exception:

+ 1 - 1
apps/impala/src/impala/dbms.py

@@ -179,7 +179,7 @@ class ImpalaDbms(HiveServer2Dbms):
         unique_values = set(histogram.split(', '))
         results = list(unique_values)
       except IndexError as e:
-        LOG.warn('Failed to get histogram results, result set has unexpected format: %s' % smart_str(e))
+        LOG.warning('Failed to get histogram results, result set has unexpected format: %s' % smart_str(e))
       finally:
         self.close(handle)
 

+ 2 - 2
apps/impala/src/impala/server.py

@@ -81,7 +81,7 @@ class ImpalaServerClient(HiveServerClient):
     try:
       self.close_operation(operation_handle)
     except QueryServerException as e:
-      LOG.warn('Failed to close operation for query handle, query may be invalid or already closed.')
+      LOG.warning('Failed to close operation for query handle, query may be invalid or already closed.')
 
     resp = self.call(self._client.GetExecSummary, req)
 
@@ -99,7 +99,7 @@ class ImpalaServerClient(HiveServerClient):
     try:
       self.close_operation(operation_handle)
     except QueryServerException as e:
-      LOG.warn('Failed to close operation for query handle, query may be invalid or already closed.')
+      LOG.warning('Failed to close operation for query handle, query may be invalid or already closed.')
 
     resp = self.call(self._client.GetRuntimeProfile, req)
 

+ 2 - 2
apps/jobbrowser/src/jobbrowser/apis/job_api.py

@@ -251,7 +251,7 @@ class YarnApi(Api):
       else:
         logs = None
     except PopupException as e:
-      LOG.warn('No task attempt found for logs: %s' % smart_str(e))
+      LOG.warning('No task attempt found for logs: %s' % smart_str(e))
     return {'logs': logs, 'logsList': logs_list}
 
 
@@ -426,7 +426,7 @@ class YarnMapReduceTaskApi(Api):
       response = job_attempt_logs_json(MockDjangoRequest(self.user), job=self.app_id, name=log_name, is_embeddable=is_embeddable)
       logs = json.loads(response.content)['log']
     except PopupException as e:
-      LOG.warn('No task attempt found for default logs: %s' % e)
+      LOG.warning('No task attempt found for default logs: %s' % e)
       logs = ''
     return {'progress': 0, 'logs': logs}
 

+ 1 - 1
apps/jobbrowser/src/jobbrowser/apis/schedule_api.py

@@ -39,7 +39,7 @@ try:
   from oozie.conf import OOZIE_JOBS_COUNT
   from oozie.views.dashboard import list_oozie_coordinator, get_oozie_job_log, massaged_oozie_jobs_for_json, has_job_edition_permission
 except Exception as e:
-  LOG.warn('Some application are not enabled: %s' % e)
+  LOG.warning('Some application are not enabled: %s' % e)
 
 
 class ScheduleApi(Api):

+ 1 - 1
apps/jobbrowser/src/jobbrowser/apis/workflow_api.py

@@ -38,7 +38,7 @@ try:
   has_oozie_installed = True
   OOZIE_JOBS_COUNT_LIMIT = OOZIE_JOBS_COUNT.get()
 except Exception as e:
-  LOG.warn('Some applications are not enabled for Job Browser v2: %s' % e)
+  LOG.warning('Some applications are not enabled for Job Browser v2: %s' % e)
   has_oozie_installed = False
   OOZIE_JOBS_COUNT_LIMIT = 100
 

+ 3 - 3
apps/jobbrowser/src/jobbrowser/views.py

@@ -61,7 +61,7 @@ LOG = logging.getLogger(__name__)
 try:
   from beeswax.hive_site import hiveserver2_impersonation_enabled
 except:
-  LOG.warn('Hive is not enabled')
+  LOG.warning('Hive is not enabled')
   def hiveserver2_impersonation_enabled(): return True
 
 from jobbrowser.conf import LOG_OFFSET, SHARE_JOBS
@@ -83,7 +83,7 @@ def check_job_permission(view_func):
     try:
       job = get_job(request, job_id=jobid)
     except ApplicationNotRunning as e:
-      LOG.warn('Job %s has not yet been accepted by the RM, will poll for status.' % jobid)
+      LOG.warning('Job %s has not yet been accepted by the RM, will poll for status.' % jobid)
       return job_not_assigned(request, jobid, request.path)
 
     if not SHARE_JOBS.get() and not is_admin(request.user) \
@@ -315,7 +315,7 @@ def kill_job(request, job):
     try:
       job = api.get_job(jobid=job.jobId)
     except Exception as e:
-      LOG.warn('Failed to get job with ID %s: %s' % (job.jobId, e))
+      LOG.warning('Failed to get job with ID %s: %s' % (job.jobId, e))
     else:
       if job.status not in ["RUNNING", "QUEUED"]:
         if request.GET.get("next"):

+ 4 - 5
apps/jobbrowser/src/jobbrowser/yarn_models.py

@@ -153,7 +153,7 @@ class SparkJob(Application):
       self.trackingUrl = actual_url
       LOG.debug("SparkJob tracking URL: %s" % self.trackingUrl)
     except Exception as e:
-      LOG.warn("Failed to resolve Spark Job's actual tracking URL: %s" % e)
+      LOG.warning("Failed to resolve Spark Job's actual tracking URL: %s" % e)
     finally:
       if resp is not None:
         resp.close()
@@ -163,7 +163,7 @@ class SparkJob(Application):
     try:
       response = function(*args, **kwargs)
     except Exception as e:
-      LOG.warn('Spark resolve tracking URL returned a failed response: %s' % e)
+      LOG.warning('Spark resolve tracking URL returned a failed response: %s' % e)
     return response
 
   def _get_metrics(self):
@@ -227,7 +227,7 @@ class Job(object):
     for attr in list(attrs.keys()):
       if attr == 'acls':
         # 'acls' are actually not available in the API
-        LOG.warn('Not using attribute: %s' % attrs[attr])
+        LOG.warning('Not using attribute: %s' % attrs[attr])
       else:
         setattr(self, attr, attrs[attr])
 
@@ -349,7 +349,7 @@ class YarnV2Job(Job):
     for attr in list(attrs.keys()):
       if attr == 'acls':
         # 'acls' are actually not available in the API
-        LOG.warn('Not using attribute: %s' % attrs[attr])
+        LOG.warning('Not using attribute: %s' % attrs[attr])
       else:
         setattr(self, attr, attrs[attr])
 
@@ -716,4 +716,3 @@ class Container(object):
     setattr(self, 'maxMapTasks', None)
     setattr(self, 'maxReduceTasks', None)
     setattr(self, 'taskReports', None)
-

+ 1 - 1
apps/jobsub/src/jobsub/old_migrations/0004_hue1_to_hue2.py

@@ -172,7 +172,7 @@ def hue1_to_hue2_data_migration():
     elif jd.type == 'streaming':
       job_design_migration_for_streaming(jd)
     else:
-      LOG.warn("Unknown JobDesign type '%s' in the old table. Row id: %s" %
+      LOG.warning("Unknown JobDesign type '%s' in the old table. Row id: %s" %
                (jd.type, jd.id))
 
 

+ 1 - 1
apps/jobsub/src/jobsub/tests.py

@@ -55,7 +55,7 @@ class TestJobsubWithHadoop(OozieServerProvider):
       except Exception as e:
         # chmod failure likely do to async processing of resource deletion.
         # If the directory has improper permissions, should fail later in the test case.
-        LOG.warn("Received the following exception while change mode attempt %d of /tmp: %s" % (i, str(e)))
+        LOG.warning("Received the following exception while change mode attempt %d of /tmp: %s" % (i, str(e)))
         time.sleep(1)
 
     self.design = self.create_design()

+ 1 - 1
apps/metastore/src/metastore/views.py

@@ -389,7 +389,7 @@ def alter_table(request, database, table):
 
     # Cannot modify both name and comment at same time, name will get precedence
     if new_table_name and comment:
-      LOG.warn('Cannot alter both table name and comment at the same time, will perform rename.')
+      LOG.warning('Cannot alter both table name and comment at the same time, will perform rename.')
 
     table_obj = db.alter_table(database, table, new_table_name=new_table_name, comment=comment)
 

+ 2 - 2
apps/oozie/src/oozie/models.py

@@ -580,7 +580,7 @@ class Workflow(Job):
         workflow.delete(skip_trash=True)
         return graph, node_list
     except Exception as e:
-      LOG.warn('Workflow %s could not be converted to a graph: %s' % (oozie_workflow.id, e))
+      LOG.warning('Workflow %s could not be converted to a graph: %s' % (oozie_workflow.id, e))
 
     return None, []
 
@@ -1532,7 +1532,7 @@ class Coordinator(Job):
     if mapping is None:
       mapping = {}
     tmpl = "editor/gen/coordinator.xml.mako"
-    return re.sub(re.compile('\s*\n+', re.MULTILINE), '\n', 
+    return re.sub(re.compile('\s*\n+', re.MULTILINE), '\n',
       django_mako.render_to_string(tmpl, {'coord': self, 'mapping': mapping})).encode('utf-8', 'xmlcharrefreplace')
 
   def clone(self, new_owner=None):

+ 4 - 4
apps/oozie/src/oozie/models2.py

@@ -294,7 +294,7 @@ class Workflow(Job):
     try:
       _get_hierarchy_from_adj_list(adj_list, adj_list['start']['ok_to'], node_hierarchy)
     except WorkflowDepthReached:
-      LOG.warn("The Workflow: %s with id: %s, has reached the maximum allowed depth for Graph display " \
+      LOG.warning("The Workflow: %s with id: %s, has reached the maximum allowed depth for Graph display " \
         % (oozie_workflow.appName, oozie_workflow.id))
       # Hide graph same as when total nodes > 30
       return {}
@@ -844,7 +844,7 @@ class Node(object):
     if self.data['type'] == 'fork':
       links = [link for link in self.data['children'] if link['to'] in node_mapping]
       if len(links) != len(self.data['children']):
-        LOG.warn('Fork has some children links that do not exist, ignoring them: links %s, existing links %s, links %s, existing links %s' \
+        LOG.warning('Fork has some children links that do not exist, ignoring them: links %s, existing links %s, links %s, existing links %s' \
                  % (len(links), len(self.data['children']), links, self.data['children']))
         self.data['children'] = links
 
@@ -3105,8 +3105,8 @@ def import_workflow_from_hue_3_7(old_wf):
 
   [<Start: start>, <Pig: Pig>, [<Kill: kill>], [<End: end>]]
   [<Start: start>, <Java: TeraGenWorkflow>, <Java: TeraSort>, [<Kill: kill>], [<End: end>]]
-  [<Start: start>, [<Fork: fork-34>, [[<Mapreduce: Sleep-1>, <Mapreduce: Sleep-10>], 
-  [<Mapreduce: Sleep-5>, [<Fork: fork-38>, [[<Mapreduce: Sleep-3>], [<Mapreduce: Sleep-4>]], 
+  [<Start: start>, [<Fork: fork-34>, [[<Mapreduce: Sleep-1>, <Mapreduce: Sleep-10>],
+  [<Mapreduce: Sleep-5>, [<Fork: fork-38>, [[<Mapreduce: Sleep-3>], [<Mapreduce: Sleep-4>]],
   <Join: join-39>]]], <Join: join-35>], [<Kill: kill>], [<End: end>]]
   """
 

+ 2 - 2
apps/oozie/src/oozie/tests.py

@@ -3339,13 +3339,13 @@ my_prop_not_filtered=10
     # With http pool the http connection is reused and so new connection count is 0
     superuser_client = make_logged_in_client(is_superuser=True)
     start_log = "--START HTTP POOL TEST--"
-    LOG.warn(start_log)
+    LOG.warning(start_log)
     superuser_client.get(reverse('oozie:list_oozie_workflows'))
     superuser_client.get(reverse('oozie:list_oozie_workflows') + "?format=json")
     superuser_client.get(reverse('oozie:list_oozie_workflows') + "?format=json&status=RUNNING&status=PREP&status=SUSPENDED")
     superuser_client.get(reverse('oozie:list_oozie_workflows') + "?format=json&status=KILLED&status=FAILED")
     end_log = "--END HTTP POOL TEST--"
-    LOG.warn(end_log)
+    LOG.warning(end_log)
     response = superuser_client.get(reverse(views.log_view))
 
     s1 = response._container[0].index(start_log)

+ 2 - 2
apps/oozie/src/oozie/views/api.py

@@ -49,7 +49,7 @@ try:
   from jobbrowser.views import job_single_logs
   from jobbrowser.models import LinkJobLogs
 except:
-  LOG.warn('Oozie is not enabled')
+  LOG.warning('Oozie is not enabled')
 
 
 def error_handler(view_fn):
@@ -459,7 +459,7 @@ def get_log(request, oozie_workflow, make_links=True, log_start_pattern=None, lo
             re_log_end = re.compile(log_end_pattern)
             is_really_done = re_log_end.search(action_logs) is not None or oozie_workflow.status == 'KILLED'
             if is_really_done and not action_logs:
-              LOG.warn('Unable to scrape full logs, try increasing the jobbrowser log_offset configuration value.')
+              LOG.warning('Unable to scrape full logs, try increasing the jobbrowser log_offset configuration value.')
 
           if make_links:
             action_logs = LinkJobLogs._make_links(action_logs)

+ 2 - 2
apps/oozie/src/oozie/views/editor2.py

@@ -85,7 +85,7 @@ def open_old_workflow(request):
     _workflow = import_workflow_from_hue_3_7(workflow)
     return _edit_workflow(request, None, _workflow)
   except Exception as e:
-    LOG.warn('Could not open old worklow: %s' % smart_str(e))
+    LOG.warning('Could not open old worklow: %s' % smart_str(e))
     return old_edit_workflow(request, workflow=workflow.id)
 
 
@@ -530,7 +530,7 @@ def edit_coordinator(request):
       except Document2.DoesNotExist as e:
         document = None
         coordinator.data['properties']['workflow'] = ''
-        LOG.warn("Workflow with uuid %s doesn't exist: %s" % (scheduled_uuid, e))
+        LOG.warning("Workflow with uuid %s doesn't exist: %s" % (scheduled_uuid, e))
 
       if document and document.is_trashed:
         raise PopupException(_('Your workflow %s has been trashed!') % (document.name if document.name else ''))

+ 8 - 8
apps/useradmin/src/useradmin/ldap_access.py

@@ -30,7 +30,7 @@ try:
   import ldap.filter
   from ldap import SCOPE_SUBTREE
 except ImportError:
-  LOG.warn('ldap module not found')
+  LOG.warning('ldap module not found')
   SCOPE_SUBTREE = None
 import re
 
@@ -244,7 +244,7 @@ class LdapConnection(object):
 
           # Skip unnamed entries.
           if user_name_attr not in data:
-            LOG.warn('Could not find %s in ldap attributes' % user_name_attr)
+            LOG.warning('Could not find %s in ldap attributes' % user_name_attr)
             continue
 
           ldap_info = {
@@ -255,12 +255,12 @@ class LdapConnection(object):
           if 'givenName' in data:
             first_name = smart_str(data['givenName'][0])
             if len(first_name) > 30:
-              LOG.warn('First name is truncated to 30 characters for [<User: %s>].' % ldap_info['username'])
+              LOG.warning('First name is truncated to 30 characters for [<User: %s>].' % ldap_info['username'])
             ldap_info['first'] = first_name[:30]
           if 'sn' in data:
             last_name = smart_str(data['sn'][0])
             if len(last_name) > 30:
-              LOG.warn('Last name is truncated to 30 characters for [<User: %s>].' % ldap_info['username'])
+              LOG.warning('Last name is truncated to 30 characters for [<User: %s>].' % ldap_info['username'])
             ldap_info['last'] = last_name[:30]
           if 'mail' in data:
             ldap_info['email'] = smart_str(data['mail'][0])
@@ -285,7 +285,7 @@ class LdapConnection(object):
 
           # Skip unnamed entries.
           if group_name_attr not in data:
-            LOG.warn('Could not find %s in ldap attributes' % group_name_attr)
+            LOG.warning('Could not find %s in ldap attributes' % group_name_attr)
             continue
 
           group_name = data[group_name_attr][0]
@@ -302,14 +302,14 @@ class LdapConnection(object):
           if group_member_attr in data and group_member_attr.lower() != 'memberuid':
             ldap_info['members'] = data[group_member_attr]
           else:
-            LOG.warn('Skipping import of non-posix users from group %s since group_member_attr '
+            LOG.warning('Skipping import of non-posix users from group %s since group_member_attr '
                      'is memberUid or group did not contain any members' % group_name)
             ldap_info['members'] = []
 
           if 'posixgroup' in (item.lower() for item in data['objectClass']) and 'memberUid' in data:
             ldap_info['posix_members'] = data['memberUid']
           else:
-            LOG.warn('Skipping import of posix users from group %s since posixGroup '
+            LOG.warning('Skipping import of posix users from group %s since posixGroup '
                      'not an objectClass or no memberUids found' % group_name)
             ldap_info['posix_members'] = []
 
@@ -376,7 +376,7 @@ class LdapConnection(object):
       else:
         return []
     except ldap.LDAPError as e:
-      LOG.warn("LDAP Error: %s" % e)
+      LOG.warning("LDAP Error: %s" % e)
 
     return None
 

+ 1 - 1
apps/useradmin/src/useradmin/middleware.py

@@ -65,7 +65,7 @@ class LdapSynchronizationMiddleware(MiddlewareMixin):
       return
 
     if not User.objects.filter(username=user.username, userprofile__creation_method=UserProfile.CreationMethod.EXTERNAL.name).exists():
-      LOG.warn("User %s is not an Ldap user" % user.username)
+      LOG.warning("User %s is not an Ldap user" % user.username)
       return
 
     # Cache should be cleared when user logs out.

+ 1 - 1
apps/useradmin/src/useradmin/models.py

@@ -357,7 +357,7 @@ def install_sample_user(django_user=None):
         LOG.info('Installed a user "%s"' % lookup)
 
     if user.username != django_username and not ENABLE_ORGANIZATIONS.get():
-      LOG.warn('Sample user does not have username "%s", will attempt to modify the username.' % django_username)
+      LOG.warning('Sample user does not have username "%s", will attempt to modify the username.' % django_username)
       with transaction.atomic():
         user = User.objects.get(id=SAMPLE_USER_ID)
         user.username = django_username

+ 2 - 2
apps/useradmin/src/useradmin/organization.py

@@ -60,7 +60,7 @@ def _fitered_queryset(queryset, by_owner=False):
 def get_organization(email, is_multi_user=False):
   if email is None:
     organization = Organization.objects.first()
-    LOG.warn('Returning first organization: %s' % organization)
+    LOG.warning('Returning first organization: %s' % organization)
   else:
     domain = email.split('@')[1] if is_multi_user else email
 
@@ -68,7 +68,7 @@ def get_organization(email, is_multi_user=False):
       organization, created = Organization.objects.get_or_create(name=domain, domain=domain, is_multi_user=is_multi_user)
       LOG.info("Materializing organization %s in the database, is_multi_user=%s" % (domain, is_multi_user))
     else:
-      LOG.warn('No organization domain found for email %s' % email)  # For Backends without emails or when organization enabled by default
+      LOG.warning('No organization domain found for email %s' % email)  # For Backends without emails or when organization enabled by default
       organization = None
 
   return organization

+ 1 - 1
apps/useradmin/src/useradmin/organization_tests.py

@@ -146,6 +146,6 @@ class TestOrganizationSingleUser(unittest.TestCase):
         User.objects.filter(groups__in=Group.objects.all()).values_list('username', flat=True)
       )
     except FieldError as e:
-      LOG.warn('Test currently skipped')
+      LOG.warning('Test currently skipped')
 
     self.client2.get('/useradmin/groups/edit/default')

+ 31 - 31
apps/useradmin/src/useradmin/views.py

@@ -32,7 +32,7 @@ from axes.utils import reset
 try:
   import ldap
 except ImportError:
-  LOG.warn('ldap module not found')
+  LOG.warning('ldap module not found')
 
 from django.urls import reverse
 from django.forms import ValidationError
@@ -849,7 +849,7 @@ def ensure_home_directory(fs, user):
   Throws IOError, WebHdfsException.
   """
   if fs is None:
-    LOG.warn("Not creating home directory of %s as no file system connector is configured" % user)
+    LOG.warning("Not creating home directory of %s as no file system connector is configured" % user)
     return
 
   userprofile = get_profile(user)
@@ -865,7 +865,7 @@ def ensure_home_directory(fs, user):
       home_directory = home_directory.decode("utf-8")
     fs.do_as_user(username, fs.create_home_dir, home_directory)
   else:
-    LOG.warn("Not creating home directory of %s as his profile is empty" % user)
+    LOG.warning("Not creating home directory of %s as his profile is empty" % user)
 
 def sync_unix_users_and_groups(min_uid, max_uid, min_gid, max_gid, check_shell):
   """
@@ -946,10 +946,10 @@ def _import_ldap_users(connection, username_pattern, sync_groups=False, import_b
   try:
     user_info = connection.find_users(username_pattern, find_by_dn=import_by_dn)
   except LdapSearchException as e:
-    LOG.warn("Failed to find LDAP user: %s" % e)
+    LOG.warning("Failed to find LDAP user: %s" % e)
 
   if not user_info:
-    LOG.warn("Could not get LDAP details for users with pattern %s" % username_pattern)
+    LOG.warning("Could not get LDAP details for users with pattern %s" % username_pattern)
     return None
 
   return _import_ldap_users_info(connection, user_info, sync_groups, import_by_dn, server, failed_users=failed_users)
@@ -994,7 +994,7 @@ def _import_ldap_users_info(connection, user_info, sync_groups=False, import_by_
       profile = get_profile(user)
       if not created and profile.creation_method == UserProfile.CreationMethod.HUE.name:
         # This is a Hue user, and shouldn't be overwritten
-        LOG.warn(_('There was a naming conflict while importing user %(username)s') % {
+        LOG.warning(_('There was a naming conflict while importing user %(username)s') % {
           'username': ldap_info['username']
         })
         return None
@@ -1049,7 +1049,7 @@ def _import_ldap_users_info(connection, user_info, sync_groups=False, import_by_
       if failed_users is None:
         failed_users = []
       failed_users.append(ldap_info['username'])
-      LOG.warn('Could not import %s: %s' % (ldap_info['username'], e.message))
+      LOG.warning('Could not import %s: %s' % (ldap_info['username'], e.message))
 
   return imported_users
 
@@ -1064,12 +1064,12 @@ def _import_ldap_members(connection, group, ldap_info, count=0, max_count=1, fai
   try:
     users_info = connection.find_users_of_group(ldap_info['dn'])
   except LdapSearchException as e:
-    LOG.warn("Failed to find LDAP users of group: %s" % e)
+    LOG.warning("Failed to find LDAP users of group: %s" % e)
 
   try:
     groups_info = connection.find_groups_of_group(ldap_info['dn'])
   except LdapSearchException as e:
-    LOG.warn("Failed to find LDAP groups of group: %s" % e)
+    LOG.warning("Failed to find LDAP groups of group: %s" % e)
 
   posix_members = ldap_info['posix_members']
 
@@ -1084,7 +1084,7 @@ def _import_ldap_members(connection, group, ldap_info, count=0, max_count=1, fai
 
     # Must find all members of subgroups
     if len(groups) > 1:
-      LOG.warn('Found multiple groups for member %s.' % smart_str(group_info['dn']))
+      LOG.warning('Found multiple groups for member %s.' % smart_str(group_info['dn']))
     else:
       for group in groups:
         _import_ldap_members(connection, group, group_info, count+1, max_count, failed_users=failed_users)
@@ -1096,7 +1096,7 @@ def _import_ldap_members(connection, group, ldap_info, count=0, max_count=1, fai
       user_info = connection.find_users(posix_member, search_attr=desktop.conf.LDAP.USERS.USER_NAME_ATTR.get(),
                                         user_name_attr=desktop.conf.LDAP.USERS.USER_NAME_ATTR.get(), find_by_dn=False)
     except LdapSearchException as e:
-      LOG.warn("Failed to find LDAP users: %s" % e)
+      LOG.warning("Failed to find LDAP users: %s" % e)
 
     if user_info:
       users = _import_ldap_users_info(connection, user_info, failed_users=failed_users)
@@ -1116,12 +1116,12 @@ def _sync_ldap_members(connection, group, ldap_info, count=0, max_count=1, faile
   try:
     users_info = connection.find_users_of_group(ldap_info['dn'])
   except LdapSearchException as e:
-    LOG.warn("Failed to find LDAP users of group: %s" % e)
+    LOG.warning("Failed to find LDAP users of group: %s" % e)
 
   try:
     groups_info = connection.find_groups_of_group(ldap_info['dn'])
   except LdapSearchException as e:
-    LOG.warn("Failed to find LDAP groups of group: %s" % e)
+    LOG.warning("Failed to find LDAP groups of group: %s" % e)
 
   posix_members = ldap_info['posix_members']
 
@@ -1131,7 +1131,7 @@ def _sync_ldap_members(connection, group, ldap_info, count=0, max_count=1, faile
       user = ldap_access.get_ldap_user(username=user_info['username'])
       group.user_set.add(user)
     except User.DoesNotExist:
-      LOG.warn("Synchronizing user %s with group %s failed. User does not exist." % (
+      LOG.warning("Synchronizing user %s with group %s failed. User does not exist." % (
       smart_str(user_info['dn']), smart_str(group.name)))
 
   for group_info in groups_info:
@@ -1141,7 +1141,7 @@ def _sync_ldap_members(connection, group, ldap_info, count=0, max_count=1, faile
       group = Group.objects.get(name=group_info['name'])
       _sync_ldap_members(connection, group, group_info, count+1, max_count, failed_users=failed_users)
     except Group.DoesNotExist:
-      LOG.warn("Synchronizing group %s failed. Group does not exist." % smart_str(group.name))
+      LOG.warning("Synchronizing group %s failed. Group does not exist." % smart_str(group.name))
 
   for posix_member in posix_members:
     LOG.debug("Synchronizing posix user %s with group %s" % (smart_str(posix_member), smart_str(group.name)))
@@ -1150,14 +1150,14 @@ def _sync_ldap_members(connection, group, ldap_info, count=0, max_count=1, faile
       users_info = connection.find_users(posix_member, search_attr=desktop.conf.LDAP.USERS.USER_NAME_ATTR.get(),
                                          user_name_attr=desktop.conf.LDAP.USERS.USER_NAME_ATTR.get(), find_by_dn=False)
     except LdapSearchException as e:
-      LOG.warn("Failed to find LDAP users: %s" % e)
+      LOG.warning("Failed to find LDAP users: %s" % e)
 
     for user_info in users_info:
       try:
         user = ldap_access.get_ldap_user(username=user_info['username'])
         group.user_set.add(user)
       except User.DoesNotExist:
-        LOG.warn("Synchronizing posix user %s with group %s failed. User does not exist." % (
+        LOG.warning("Synchronizing posix user %s with group %s failed. User does not exist." % (
         smart_str(posix_member), smart_str(group.name)))
 
 
@@ -1179,10 +1179,10 @@ def _import_ldap_nested_groups(connection, groupname_pattern, import_members=Fal
   try:
     group_info = connection.find_groups(groupname_pattern, find_by_dn=import_by_dn, scope=scope)
   except LdapSearchException as e:
-    LOG.warn("Failed to find LDAP group: %s" % e)
+    LOG.warning("Failed to find LDAP group: %s" % e)
 
   if not group_info:
-    LOG.warn("Could not get LDAP details for group pattern %s" % groupname_pattern)
+    LOG.warning("Could not get LDAP details for group pattern %s" % groupname_pattern)
     return None
 
   groups = []
@@ -1190,7 +1190,7 @@ def _import_ldap_nested_groups(connection, groupname_pattern, import_members=Fal
     group, created = Group.objects.get_or_create(name=ldap_info['name'])
     if not created and not LdapGroup.objects.filter(group=group).exists():
       # This is a Hue group, and shouldn't be overwritten
-      LOG.warn(_('There was a naming conflict while importing group %(groupname)s in pattern %(groupname_pattern)s') % {
+      LOG.warning(_('There was a naming conflict while importing group %(groupname)s in pattern %(groupname_pattern)s') % {
         'groupname': ldap_info['name'],
         'groupname_pattern': groupname_pattern
       })
@@ -1234,10 +1234,10 @@ def _import_ldap_suboordinate_groups(connection, groupname_pattern, import_membe
   try:
     group_info = connection.find_groups(groupname_pattern, find_by_dn=import_by_dn, scope=scope)
   except LdapSearchException as e:
-    LOG.warn("Could not find LDAP group: %s" % e)
+    LOG.warning("Could not find LDAP group: %s" % e)
 
   if not group_info:
-    LOG.warn("Could not get LDAP details for group pattern %s" % groupname_pattern)
+    LOG.warning("Could not get LDAP details for group pattern %s" % groupname_pattern)
     return None
 
   groups = []
@@ -1245,7 +1245,7 @@ def _import_ldap_suboordinate_groups(connection, groupname_pattern, import_membe
     group, created = Group.objects.get_or_create(name=ldap_info['name'])
     if not created and not LdapGroup.objects.filter(group=group).exists():
       # This is a Hue group, and shouldn't be overwritten
-      LOG.warn(_('There was a naming conflict while importing group %(groupname)s in pattern %(groupname_pattern)s') % {
+      LOG.warning(_('There was a naming conflict while importing group %(groupname)s in pattern %(groupname_pattern)s') % {
         'groupname': ldap_info['name'],
         'groupname_pattern': groupname_pattern
       })
@@ -1265,7 +1265,7 @@ def _import_ldap_suboordinate_groups(connection, groupname_pattern, import_membe
         try:
           group_info = connection.find_groups(ldap_info['dn'], find_by_dn=True)
         except LdapSearchException as e:
-          LOG.warn("Failed to find LDAP group: %s" % e)
+          LOG.warning("Failed to find LDAP group: %s" % e)
 
         for sub_ldap_info in group_info:
           members += sub_ldap_info['members']
@@ -1283,13 +1283,13 @@ def _import_ldap_suboordinate_groups(connection, groupname_pattern, import_membe
         try:
           user_info = connection.find_users(member, find_by_dn=True)
         except LdapSearchException as e:
-          LOG.warn("Failed to find LDAP user: %s" % e)
+          LOG.warning("Failed to find LDAP user: %s" % e)
 
         if user_info is None:
           continue
 
         if len(user_info) > 1:
-          LOG.warn('Found multiple users for member %s.' % member)
+          LOG.warning('Found multiple users for member %s.' % member)
         else:
           for ldap_info in user_info:
             try:
@@ -1300,7 +1300,7 @@ def _import_ldap_suboordinate_groups(connection, groupname_pattern, import_membe
               if failed_users is None:
                 failed_users = []
               failed_users.append(ldap_info['username'])
-              LOG.warn('Could not sync %s: %s' % (ldap_info['username'], e.message))
+              LOG.warning('Could not sync %s: %s' % (ldap_info['username'], e.message))
             except User.DoesNotExist:
               pass
 
@@ -1318,7 +1318,7 @@ def _import_ldap_suboordinate_groups(connection, groupname_pattern, import_membe
                                               user_name_attr=desktop.conf.LDAP.USERS.USER_NAME_ATTR.get(),
                                               find_by_dn=False)
           except LdapSearchException as e:
-            LOG.warn("Failed to find LDAP user: %s" % e)
+            LOG.warning("Failed to find LDAP user: %s" % e)
 
           if user_info:
             users = _import_ldap_users_info(connection, user_info, import_by_dn=False, failed_users=failed_users)
@@ -1335,10 +1335,10 @@ def _import_ldap_suboordinate_groups(connection, groupname_pattern, import_membe
                                               user_name_attr=desktop.conf.LDAP.USERS.USER_NAME_ATTR.get(),
                                               find_by_dn=False)
           except LdapSearchException as e:
-            LOG.warn("Failed to find LDAP user: %s" % e)
+            LOG.warning("Failed to find LDAP user: %s" % e)
 
           if len(user_info) > 1:
-            LOG.warn('Found multiple users for member %s.' % posix_member)
+            LOG.warning('Found multiple users for member %s.' % posix_member)
           else:
             for ldap_info in user_info:
               try:
@@ -1349,7 +1349,7 @@ def _import_ldap_suboordinate_groups(connection, groupname_pattern, import_membe
                 if failed_users is None:
                   failed_users = []
                 failed_users.append(ldap_info['username'])
-                LOG.warn('Could not sync %s: %s' % (ldap_info['username'], e.message))
+                LOG.warning('Could not sync %s: %s' % (ldap_info['username'], e.message))
               except User.DoesNotExist:
                 pass
 

+ 6 - 6
desktop/core/src/desktop/api2.py

@@ -851,7 +851,7 @@ def import_documents(request):
       # Replace illegal characters
       if '/' in doc['fields']['name']:
         new_name = doc['fields']['name'].replace('/', '-')
-        LOG.warn("Found illegal slash in document named: %s, renaming to: %s." % (doc['fields']['name'], new_name))
+        LOG.warning("Found illegal slash in document named: %s, renaming to: %s." % (doc['fields']['name'], new_name))
         doc['fields']['name'] = new_name
 
       # Set last modified date to now
@@ -1112,7 +1112,7 @@ def _copy_document_with_owner(doc, owner, uuids_map):
     doc['fields']['parent_directory'] = [uuids_map[parent_uuid], 1, False]
   else:
     if parent_uuid is not None:
-      LOG.warn('Could not find parent directory with UUID: %s in JSON import, will set parent to home directory' %
+      LOG.warning('Could not find parent directory with UUID: %s in JSON import, will set parent to home directory' %
                 parent_uuid)
     doc['fields']['parent_directory'] = [home_dir.uuid, home_dir.version, home_dir.is_history]
 
@@ -1120,7 +1120,7 @@ def _copy_document_with_owner(doc, owner, uuids_map):
   idx = 0
   for dep_uuid, dep_version, dep_is_history in doc['fields']['dependencies']:
     if dep_uuid not in list(uuids_map.keys()):
-      LOG.warn('Could not find dependency UUID: %s in JSON import, may cause integrity errors if not found.' % dep_uuid)
+      LOG.warning('Could not find dependency UUID: %s in JSON import, may cause integrity errors if not found.' % dep_uuid)
     else:
       if uuids_map[dep_uuid] is None:
         uuids_map[dep_uuid] = uuid_default()
@@ -1145,7 +1145,7 @@ def _create_or_update_document_with_owner(doc, owner, uuids_map):
     create_new = True
 
   if create_new:
-    LOG.warn('Could not find document with UUID: %s, will create a new document on import.', doc['fields']['uuid'])
+    LOG.warning('Could not find document with UUID: %s, will create a new document on import.', doc['fields']['uuid'])
     doc['pk'] = None
     doc['fields']['version'] = 1
 
@@ -1154,7 +1154,7 @@ def _create_or_update_document_with_owner(doc, owner, uuids_map):
     uuid, version, is_history = doc['fields']['parent_directory']
     if uuid not in list(uuids_map.keys()) and \
             not Document2.objects.filter(uuid=uuid, version=version, is_history=is_history).exists():
-      LOG.warn('Could not find parent document with UUID: %s, will set parent to home directory' % uuid)
+      LOG.warning('Could not find parent document with UUID: %s, will set parent to home directory' % uuid)
       doc['fields']['parent_directory'] = [home_dir.uuid, home_dir.version, home_dir.is_history]
 
   # Verify that dependencies exist, raise critical error if any dependency not found
@@ -1167,7 +1167,7 @@ def _create_or_update_document_with_owner(doc, owner, uuids_map):
         raise PopupException(_('Cannot import document, dependency with UUID: %s not found.') % uuid)
       elif is_history:
         history_deps_list.insert(0, index) # Insert in decreasing order to facilitate delete
-        LOG.warn('History dependency with UUID: %s ignored while importing document %s' % (uuid, doc['fields']['name']))
+        LOG.warning('History dependency with UUID: %s ignored while importing document %s' % (uuid, doc['fields']['name']))
 
     # Delete history dependencies not found in the DB
     for index in history_deps_list:

+ 1 - 1
desktop/core/src/desktop/appmanager.py

@@ -264,7 +264,7 @@ def load_apps(app_blacklist):
     if sdk_app.name not in app_blacklist:
       # TODO: Remove once pig and jobsub have been migrated to editor
       if 'oozie' in app_blacklist and sdk_app.name in ('pig', 'jobsub'):
-        LOG.warn('%s depends on oozie which is blacklisted, will skip loading %s app.' % (sdk_app.name, sdk_app.name))
+        LOG.warning('%s depends on oozie which is blacklisted, will skip loading %s app.' % (sdk_app.name, sdk_app.name))
       else:
         m = sdk_app.load()
         dmi = DesktopModuleInfo(m)

+ 13 - 13
desktop/core/src/desktop/auth/backend.py

@@ -38,11 +38,11 @@ LOG = logging.getLogger(__name__)
 try:
   import ldap
 except ImportError:
-  LOG.warn('ldap module not found')
+  LOG.warning('ldap module not found')
 try:
   import pam
 except ImportError:
-  LOG.warn('pam module not found')
+  LOG.warning('pam module not found')
 import requests
 
 import django.contrib.auth.backends
@@ -56,7 +56,7 @@ try:
   from django_auth_ldap.backend import LDAPBackend
   from django_auth_ldap.config import LDAPSearch
 except ImportError:
-  LOG.warn('django_auth_ldap module not found')
+  LOG.warning('django_auth_ldap module not found')
   class LDAPSearch: pass
   class LDAPSearch: pass
 from liboauth.metrics import oauth_authentication_time
@@ -64,7 +64,7 @@ try:
   from mozilla_django_oidc.auth import OIDCAuthenticationBackend, default_username_algo
   from mozilla_django_oidc.utils import absolutify, import_from_settings
 except ImportError:
-  LOG.warn('mozilla_django_oidc module not found')
+  LOG.warning('mozilla_django_oidc module not found')
   class OIDCAuthenticationBackend: pass
 
 from desktop import metrics
@@ -112,7 +112,7 @@ def rewrite_user(user):
   We currently only re-write specific attributes, though this could be generalized.
   """
   if user is None:
-    LOG.warn('Failed to rewrite user, user is None.')
+    LOG.warning('Failed to rewrite user, user is None.')
   else:
     augment = get_user_augmentation_class()(user)
     for attr in ('get_groups', 'get_home_directory', 'has_hue_permission', 'get_permissions'):
@@ -484,13 +484,13 @@ class LdapBackend(object):
   def add_ldap_config(self, ldap_config):
     ldap_url = ldap_config.LDAP_URL.get()
     if ldap_url is None:
-      LOG.warn("Could not find LDAP URL required for authentication.")
+      LOG.warning("Could not find LDAP URL required for authentication.")
       return None
     else:
       setattr(self._backend.settings, 'SERVER_URI', ldap_config.LDAP_URL.get())
 
     if ldap_url.lower().startswith('ldaps') and ldap_config.USE_START_TLS.get():
-      LOG.warn("Cannot configure LDAP with SSL and enable STARTTLS.")
+      LOG.warning("Cannot configure LDAP with SSL and enable STARTTLS.")
 
     if ldap_config.SEARCH_BIND_AUTHENTICATION.get():
       # New Search/Bind Auth
@@ -573,7 +573,7 @@ class LdapBackend(object):
       if existing_profile.creation_method == UserProfile.CreationMethod.EXTERNAL.name:
         is_super = User.objects.get(**username_filter_kwargs).is_superuser
     elif not LDAP.CREATE_USERS_ON_LOGIN.get():
-      LOG.warn("Create users when they login with their LDAP credentials is turned off")
+      LOG.warning("Create users when they login with their LDAP credentials is turned off")
       return None
 
     try:
@@ -584,10 +584,10 @@ class LdapBackend(object):
         else:
           user = self._backend.authenticate(username=username, password=password)
       else:
-        LOG.warn("%s not in an allowed login group" % username)
+        LOG.warning("%s not in an allowed login group" % username)
         return None
     except ImproperlyConfigured as detail:
-      LOG.warn("LDAP was not properly configured: %s", detail)
+      LOG.warning("LDAP was not properly configured: %s", detail)
       return None
 
     if user is not None and user.is_active:
@@ -620,10 +620,10 @@ class LdapBackend(object):
       try:
         user_info = connection.find_users(username, find_by_dn=False)
       except Exception as e:
-        LOG.warn("Failed to find LDAP user: %s" % e)
+        LOG.warning("Failed to find LDAP user: %s" % e)
 
       if not user_info:
-        LOG.warn("Could not get LDAP details for users with pattern %s" % username)
+        LOG.warning("Could not get LDAP details for users with pattern %s" % username)
         return False
 
       ldap_info = user_info[0]
@@ -927,7 +927,7 @@ class OIDCBackend(OIDCAuthenticationBackend):
       else:
         LOG.error("OpenID Connect logout failed: %s" % resp.content)
     else:
-      LOG.warn("OpenID Connect tokens are not available, logout skipped!")
+      LOG.warning("OpenID Connect tokens are not available, logout skipped!")
     return None
 
   # def filter_users_by_claims(self, claims):

+ 4 - 4
desktop/core/src/desktop/auth/views.py

@@ -243,9 +243,9 @@ def dt_logout(request, next_page=None):
     try:
       get_api(request, session).close_session(session)
     except PopupException as e:
-      LOG.warn("Error closing %s session: %s" % (session_app, e.message.encode('utf-8')))
+      LOG.warning("Error closing %s session: %s" % (session_app, e.message.encode('utf-8')))
     except Exception as e:
-      LOG.warn("Error closing %s session: %s" % (session_app, e))
+      LOG.warning("Error closing %s session: %s" % (session_app, e))
 
   backends = get_backends()
   if backends:
@@ -256,10 +256,10 @@ def dt_logout(request, next_page=None):
           if response:
             return response
         except Exception as e:
-          LOG.warn('Potential error on logout for user: %s with exception: %s' % (username, e))
+          LOG.warning('Potential error on logout for user: %s with exception: %s' % (username, e))
 
   if len([backend for backend in backends if hasattr(backend, 'logout')]) == len(backends):
-    LOG.warn("Failed to log out from all backends for user: %s" % (username))
+    LOG.warning("Failed to log out from all backends for user: %s" % (username))
 
   response = django.contrib.auth.views.LogoutView.as_view(next_page=next_page)(request)
   response.delete_cookie(LOAD_BALANCER_COOKIE)

+ 1 - 1
desktop/core/src/desktop/conf.py

@@ -2314,7 +2314,7 @@ def config_validator(user):
     if not _is_oozie_mail_enabled(user):
       res.append(('OOZIE_EMAIL_SERVER', new_str(_('Email notifications is disabled for Workflows and Jobs as SMTP server is localhost.'))))
   except Exception as e:
-    LOG.warn('Config check failed because Oozie app not installed %s' % e)
+    LOG.warning('Config check failed because Oozie app not installed %s' % e)
 
   from notebook.models import make_notebook
   from notebook.api import _save_notebook

+ 4 - 4
desktop/core/src/desktop/converters.py

@@ -109,7 +109,7 @@ class DocumentConverter(object):
           self.failed_doc_ids.append(doc.id)
           LOG.exception('Failed to import SavedQuery document id: %d' % doc.id)
     except ImportError:
-      LOG.warn('Cannot convert Saved Query documents: beeswax app is not installed')
+      LOG.warning('Cannot convert Saved Query documents: beeswax app is not installed')
 
 
   def _convert_query_histories(self):
@@ -145,7 +145,7 @@ class DocumentConverter(object):
           self.failed_doc_ids.append(doc.id)
           LOG.exception('Failed to import history document id: %d' % doc.id)
     except ImportError as e:
-      LOG.warn('Cannot convert history documents: beeswax app is not installed')
+      LOG.warning('Cannot convert history documents: beeswax app is not installed')
 
 
   def _convert_job_designs(self):
@@ -191,7 +191,7 @@ class DocumentConverter(object):
           self.failed_doc_ids.append(doc.id)
           LOG.exception('Failed to import Job Designer document id: %d' % doc.id)
     except ImportError as e:
-      LOG.warn('Cannot convert Job Designer documents: oozie app is not installed')
+      LOG.warning('Cannot convert Job Designer documents: oozie app is not installed')
 
 
   def _convert_pig_scripts(self):
@@ -220,7 +220,7 @@ class DocumentConverter(object):
           self.failed_doc_ids.append(doc.id)
           LOG.exception('Failed to import Pig document id: %d' % doc.id)
     except ImportError as e:
-      LOG.warn('Cannot convert Pig documents: pig app is not installed')
+      LOG.warning('Cannot convert Pig documents: pig app is not installed')
 
 
   def _get_unconverted_docs(self, content_type, only_history=False):

+ 1 - 1
desktop/core/src/desktop/lib/conf.py

@@ -229,7 +229,7 @@ class Config(object):
                       % (key, type, default, pytype(default)))
 
     if type == bool:
-      LOG.warn("%s is of type bool. Resetting it as type 'coerce_bool'."
+      LOG.warning("%s is of type bool. Resetting it as type 'coerce_bool'."
                " Please fix it permanently" % (key,))
       type = coerce_bool
 

+ 1 - 1
desktop/core/src/desktop/lib/connectors/models.py

@@ -151,7 +151,7 @@ def _get_installed_connectors(category=None, categories=None, dialect=None, inte
     if full_connector:
       connectors.append(full_connector)
     else:
-      LOG.warn('Skipping connector %(id)s as connector dialect %(dialect)s or interface %(interface)s are not installed' % (
+      LOG.warning('Skipping connector %(id)s as connector dialect %(dialect)s or interface %(interface)s are not installed' % (
           {'id': connector['id'], 'dialect': connector['dialect'], 'interface': connector['interface']}
         )
       )

+ 1 - 1
desktop/core/src/desktop/lib/fs/gc/client.py

@@ -21,7 +21,7 @@ LOG = logging.getLogger(__name__)
 try:
   import gcs_oauth2_boto_plugin
 except ImportError:
-  LOG.warn('gcs_oauth2_boto_plugin module not found')
+  LOG.warning('gcs_oauth2_boto_plugin module not found')
 import json
 
 from aws.s3.s3fs import S3FileSystem

+ 1 - 1
desktop/core/src/desktop/lib/idbroker/conf.py

@@ -38,7 +38,7 @@ def validate_fs(fs=None):
   if fs in SUPPORTED_FS:
     return SUPPORTED_FS[fs]
   else:
-    LOG.warn('Selected FS %s is not supported by Hue IDBroker client' % fs)
+    LOG.warning('Selected FS %s is not supported by Hue IDBroker client' % fs)
     return None
 
 def get_cab_address(fs=None):

+ 1 - 1
desktop/core/src/desktop/lib/scheduler/api.py

@@ -35,7 +35,7 @@ try:
   from oozie.forms import ParameterForm
   from oozie.views.editor2 import edit_coordinator, new_coordinator, Coordinator
 except Exception as e:
-  LOG.warn('Oozie application is not enabled: %s' % e)
+  LOG.warning('Oozie application is not enabled: %s' % e)
 
 
 def list_schedules(request):

+ 1 - 1
desktop/core/src/desktop/lib/thrift_util.py

@@ -834,7 +834,7 @@ def is_thrift_struct(o):
 # Same in resource.py for not losing the trace class
 def log_if_slow_call(duration, message):
   if duration >= math.floor(WARN_LEVEL_CALL_DURATION_MS / 1000):
-    LOG.warn('SLOW: %.2f - %s' % (duration, message))
+    LOG.warning('SLOW: %.2f - %s' % (duration, message))
   elif duration >= math.floor(INFO_LEVEL_CALL_DURATION_MS / 1000):
     LOG.info('SLOW: %.2f - %s' % (duration, message))
   else:

+ 1 - 1
desktop/core/src/desktop/lib/view_util.py

@@ -111,7 +111,7 @@ def location_to_url(location, strict=True, is_embeddable=False):
   try:
     filebrowser_path = reverse("filebrowser:filebrowser.views.view", kwargs=dict(path=path))
   except Exception as e:
-    LOG.warn('No table filesystem link: %s' % e)
+    LOG.warning('No table filesystem link: %s' % e)
     return None
 
   if is_embeddable and not filebrowser_path.startswith('/hue'):

+ 1 - 1
desktop/core/src/desktop/manage_entry.py

@@ -34,7 +34,7 @@ def _deprecation_check(arg0):
     to_use = os.path.join(os.path.dirname(arg0), 'hue')
     msg = "Warning: '%s' has been deprecated. Please use '%s' instead." % (arg0, to_use)
     print(msg, file=sys.stderr)
-    LOG.warn(msg)
+    LOG.warning(msg)
 
 def reload_with_cm_env(cm_managed):
   try:

+ 1 - 1
desktop/core/src/desktop/management/commands/create_user_directories.py

@@ -68,4 +68,4 @@ class Command(BaseCommand):
       except Exception as e:
         msg = 'Failed to create user directories for user %s: %s' % (user.username, str(e))
         self.stdout.write(msg)
-        LOG.warn(msg)
+        LOG.warning(msg)

+ 1 - 1
desktop/core/src/desktop/management/commands/desktop_document_cleanup.py

@@ -117,7 +117,7 @@ class Command(BaseCommand):
         self.deleteRecordsBase = 999  #number of documents to delete in a batch
                                       #to avoid Non Fatal Exception: DatabaseError: too many SQL variables
 
-        LOG.warn("HUE_CONF_DIR: %s" % os.environ['HUE_CONF_DIR'])
+        LOG.warning("HUE_CONF_DIR: %s" % os.environ['HUE_CONF_DIR'])
         LOG.info("DB Engine: %s" % desktop.conf.DATABASE.ENGINE.get())
         LOG.info("DB Name: %s" % desktop.conf.DATABASE.NAME.get())
         LOG.info("DB User: %s" % desktop.conf.DATABASE.USER.get())

+ 60 - 60
desktop/core/src/desktop/management/commands/ldaptest.py

@@ -201,19 +201,19 @@ class Command(BaseCommand):
     ldap_url = ldap_config.LDAP_URL.get()
     if ldap_url is None:
       LOG.info(_(ldap_url_msg))
-      LOG.warn('Could not find LDAP_URL server in hue.ini required for authentication')
+      LOG.warning('Could not find LDAP_URL server in hue.ini required for authentication')
       return err_code
 
     if not ((ldap_url.startswith("ldap") and
           "://" in ldap_url)):
       LOG.info(_(ldap_url_msg))
-      LOG.warn("Check your ldap_url=%s" % ldap_url)
+      LOG.warning("Check your ldap_url=%s" % ldap_url)
       return err_code
 
     ldap_cert = ldap_config.LDAP_CERT.get()
     if ldap_cert is not None and (not os.path.isfile(ldap_cert)):
       LOG.info(_(ldap_cert_msg))
-      LOG.warn("Could not find certificate %s on %s" % (ldap_cert, socket.gethostname()))
+      LOG.warning("Could not find certificate %s on %s" % (ldap_cert, socket.gethostname()))
       return err_code
 
     if ldap_cert is not None:
@@ -223,7 +223,7 @@ class Command(BaseCommand):
     bind_dn = ldap_config.BIND_DN.get()
     if bind_dn is None:
       LOG.info(_(bind_dn_msg))
-      LOG.warn("Could not find bind_dn in hue.ini required for authentication")
+      LOG.warning("Could not find bind_dn in hue.ini required for authentication")
       return err_code
 
     if ldap_config.SEARCH_BIND_AUTHENTICATION.get():
@@ -233,17 +233,17 @@ class Command(BaseCommand):
       bind_password = ldap_config.BIND_PASSWORD.get()
       if user_name_attr=='' or ' ' in user_name_attr:
         LOG.info(_(user_name_attr_msg))
-        LOG.warn("Could not find user_name_attr in hue.ini")
+        LOG.warning("Could not find user_name_attr in hue.ini")
         return err_code
 
       if user_filter=='':
         LOG.info(_(user_filter_msg))
-        LOG.warn("Could not find user_filter in hue.ini required for authentication")
+        LOG.warning("Could not find user_filter in hue.ini required for authentication")
         return err_code
 
       if (not bind_password and not ldap_config.BIND_PASSWORD_SCRIPT.get()):
         LOG.info(_(bind_password_msg))
-        LOG.warn("Could not find bind_password in hue.ini, required for authentication")
+        LOG.warning("Could not find bind_password in hue.ini, required for authentication")
         return err_code
     else:
       # Direct Bind Auth
@@ -253,8 +253,8 @@ class Command(BaseCommand):
         if pattern is None:
           LOG.info(_(nt_domain_msg))
           LOG.info(_(ldap_username_pattern_msg))
-          LOG.warn('Could not find nt_domain in hue.ini')
-          LOG.warn('Could not find ldap_username_pattern in hue.ini, required for authentication')
+          LOG.warning('Could not find nt_domain in hue.ini')
+          LOG.warning('Could not find ldap_username_pattern in hue.ini, required for authentication')
           return err_code
         else:
           pattern = pattern.replace('<username>', bind_dn)
@@ -263,7 +263,7 @@ class Command(BaseCommand):
         if ((',' in bind_dn) or ('@' in bind_dn) or ('=' in bind_dn) or (' ' in bind_dn)):
           LOG.info(_(nt_domain_msg))
           LOG.info(_(ldap_username_pattern_msg))
-          LOG.warn('bind_dn value contains , or @ or = or " " character which is not allowed')
+          LOG.warning('bind_dn value contains , or @ or = or " " character which is not allowed')
           return err_code
         # %(user)s is a special string that will get replaced during the authentication process
         LOG.info('Setting USER_DN_TEMPLATE as %s@%s' % (bind_dn, nt_domain))
@@ -274,28 +274,28 @@ class Command(BaseCommand):
     err_code = 0
     test_ldap_user = ldap_config.TEST_LDAP_USER.get()
     if '*' in test_ldap_user:
-      LOG.warn('Setting test_ldap_user as %s' % test_ldap_user)
-      LOG.warn('This operation can overwhelm the server')
-      LOG.warn('Chances are server may or may not respond')
-      LOG.warn('If you want to test your LDAP Settings please use specific username')
+      LOG.warning('Setting test_ldap_user as %s' % test_ldap_user)
+      LOG.warning('This operation can overwhelm the server')
+      LOG.warning('Chances are server may or may not respond')
+      LOG.warning('If you want to test your LDAP Settings please use specific username')
 
     try:
       users = ldap_obj.find_users(test_ldap_user)
     except ldap.NO_SUCH_OBJECT as err:
-      LOG.warn(str(err))
+      LOG.warning(str(err))
       LOG.info(_(base_dn_msg))
-      LOG.warn('hints: check base_dn')
+      LOG.warning('hints: check base_dn')
       err_code = 1
     except:
       typ, value, traceback = sys.exc_info()
-      LOG.warn("%s %s" % (typ, value))
+      LOG.warning("%s %s" % (typ, value))
       LOG.info(_(base_dn_msg))
-      LOG.warn('hints: check base_dn')
+      LOG.warning('hints: check base_dn')
       err_code = 1
 
     # print ldapsearch command for debugging purpose
     if err_code:
-      LOG.warn(ldap_obj.ldapsearch_cmd())
+      LOG.warning(ldap_obj.ldapsearch_cmd())
       return err_code
     else:
       LOG.info(ldap_obj.ldapsearch_cmd())
@@ -305,14 +305,14 @@ class Command(BaseCommand):
         LOG.info('%s' % user)
         if user.get('username', '')=='':
           LOG.info(_(user_name_attr_msg))
-          LOG.warn('hints: check user_name_attr="%s"' % ldap_config.USERS.USER_NAME_ATTR.get())
+          LOG.warning('hints: check user_name_attr="%s"' % ldap_config.USERS.USER_NAME_ATTR.get())
           err_code = 1
     else:
-      LOG.warn('test_ldap_user %s may not exist' % test_ldap_user)
+      LOG.warning('test_ldap_user %s may not exist' % test_ldap_user)
       LOG.info(_(user_filter_msg))
       LOG.info(_(user_name_attr_msg))
-      LOG.warn('hints: check user_filter="%s"' % ldap_config.USERS.USER_FILTER.get())
-      LOG.warn('hints: check user_name_attr="%s"' % ldap_config.USERS.USER_NAME_ATTR.get())
+      LOG.warning('hints: check user_filter="%s"' % ldap_config.USERS.USER_FILTER.get())
+      LOG.warning('hints: check user_name_attr="%s"' % ldap_config.USERS.USER_NAME_ATTR.get())
       err_code = 1
 
     return err_code
@@ -321,27 +321,27 @@ class Command(BaseCommand):
     err_code = 0
     test_ldap_group = ldap_config.TEST_LDAP_GROUP.get()
     if '*' in test_ldap_group:
-      LOG.warn("Setting test_ldap_group as %s" % test_ldap_group)
-      LOG.warn("This operation can overwhelm the server")
-      LOG.warn("Chances are server may or may not respond")
-      LOG.warn("If you want to test your LDAP Settings please use specific groupname")
+      LOG.warning("Setting test_ldap_group as %s" % test_ldap_group)
+      LOG.warning("This operation can overwhelm the server")
+      LOG.warning("Chances are server may or may not respond")
+      LOG.warning("If you want to test your LDAP Settings please use specific groupname")
 
     try:
       groups = ldap_obj.find_groups(test_ldap_group)
     except ldap.NO_SUCH_OBJECT as err:
-      LOG.warn(str(err))
+      LOG.warning(str(err))
       LOG.info(_(base_dn_msg))
-      LOG.warn("hints: check base_dn")
+      LOG.warning("hints: check base_dn")
       err_code = 1
     except:
       typ, value, traceback = sys.exc_info()
-      LOG.warn("%s %s" % (typ, value))
+      LOG.warning("%s %s" % (typ, value))
       LOG.info(_(base_dn_msg))
-      LOG.warn("hints: check base_dn")
+      LOG.warning("hints: check base_dn")
       err_code = 1
 
     if err_code:
-      LOG.warn(ldap_obj.ldapsearch_cmd())
+      LOG.warning(ldap_obj.ldapsearch_cmd())
       return err_code
     else:
      LOG.info(ldap_obj.ldapsearch_cmd())
@@ -350,11 +350,11 @@ class Command(BaseCommand):
       for grp in groups:
         LOG.info("%s" % grp)
     else:
-      LOG.warn("test_ldap_group %s may not exist" % test_ldap_group)
+      LOG.warning("test_ldap_group %s may not exist" % test_ldap_group)
       LOG.info(_(group_filter_msg))
       LOG.info(_(group_name_attr_msg))
-      LOG.warn("hints: check group_filter=\"%s\"" % ldap_config.GROUPS.GROUP_FILTER.get())
-      LOG.warn("hints: check group_name_attr=\"%s\"" % ldap_config.GROUPS.GROUP_NAME_ATTR.get())
+      LOG.warning("hints: check group_filter=\"%s\"" % ldap_config.GROUPS.GROUP_FILTER.get())
+      LOG.warning("hints: check group_name_attr=\"%s\"" % ldap_config.GROUPS.GROUP_NAME_ATTR.get())
       err_code = 1
 
     return err_code
@@ -366,20 +366,20 @@ class Command(BaseCommand):
     try:
       groups = ldap_obj.find_users_of_group(test_ldap_group)
     except ldap.NO_SUCH_OBJECT as err:
-      LOG.warn(str(err))
+      LOG.warning(str(err))
       LOG.info(_(base_dn_msg))
-      LOG.warn('hints: check base_dn')
+      LOG.warning('hints: check base_dn')
       err_code = 1
     except:
       typ, value, traceback = sys.exc_info()
-      LOG.warn("%s %s" % (typ, value))
+      LOG.warning("%s %s" % (typ, value))
       LOG.info(_(base_dn_msg))
-      LOG.warn('hints: check base_dn')
+      LOG.warning('hints: check base_dn')
       err_code = 1
 
     # print ldapsearch command for debugging purpose
     if err_code:
-      LOG.warn(ldap_obj.ldapsearch_cmd())
+      LOG.warning(ldap_obj.ldapsearch_cmd())
       return err_code
     else:
       LOG.info(ldap_obj.ldapsearch_cmd())
@@ -389,14 +389,14 @@ class Command(BaseCommand):
         LOG.info('%s' % grp)
         if grp.get('members', [])==[]:
           LOG.info(_(group_member_attr_msg))
-          LOG.warn('hints: check group_member_attr="%s"' % ldap_config.GROUPS.GROUP_MEMBER_ATTR.get())
+          LOG.warning('hints: check group_member_attr="%s"' % ldap_config.GROUPS.GROUP_MEMBER_ATTR.get())
           err_code = 1
     else:
-      LOG.warn('find_users_of_group %s may not exist' % test_ldap_group)
+      LOG.warning('find_users_of_group %s may not exist' % test_ldap_group)
       LOG.info(_(group_filter_msg))
       LOG.info(_(group_name_attr_msg))
-      LOG.warn('hints: check group_filter="%s"' % ldap_config.GROUPS.GROUP_FILTER.get())
-      LOG.warn('hints: check group_name_attr="%s"' % ldap_config.GROUPS.GROUP_NAME_ATTR.get())
+      LOG.warning('hints: check group_filter="%s"' % ldap_config.GROUPS.GROUP_FILTER.get())
+      LOG.warning('hints: check group_name_attr="%s"' % ldap_config.GROUPS.GROUP_NAME_ATTR.get())
       err_code = 1
 
     return err_code
@@ -408,20 +408,20 @@ class Command(BaseCommand):
     try:
       groups = ldap_obj.find_groups_of_group(test_ldap_group)
     except ldap.NO_SUCH_OBJECT as err:
-      LOG.warn(err.args)
+      LOG.warning(err.args)
       LOG.info(_(base_dn_msg))
-      LOG.warn('hints: check base_dn')
+      LOG.warning('hints: check base_dn')
       err_code = 1
     except:
       typ, value, traceback = sys.exc_info()
-      LOG.warn("%s %s" % (typ, value))
+      LOG.warning("%s %s" % (typ, value))
       LOG.info(_(base_dn_msg))
-      LOG.warn('hints: check base_dn')
+      LOG.warning('hints: check base_dn')
       err_code = 1
 
     # print ldapsearch command for debugging purpose
     if err_code:
-      LOG.warn(ldap_obj.ldapsearch_cmd())
+      LOG.warning(ldap_obj.ldapsearch_cmd())
       return err_code
     else:
       LOG.info(ldap_obj.ldapsearch_cmd())
@@ -431,7 +431,7 @@ class Command(BaseCommand):
         LOG.info('%s' % grp)
         if grp.get('members',[])==[]:
           LOG.info(_(group_member_attr_msg))
-          LOG.warn('hints: check group_member_attr="%s"' % ldap_config.GROUPS.GROUP_MEMBER_ATTR.get())
+          LOG.warning('hints: check group_member_attr="%s"' % ldap_config.GROUPS.GROUP_MEMBER_ATTR.get())
           err_code = 1
     else:
       LOG.info('find_groups_of_group %s may not exist' % test_ldap_group)
@@ -440,7 +440,7 @@ class Command(BaseCommand):
 
   def sys_exit(self, exit_code):
     if exit_code!=0:
-      LOG.warn('LDAP Test Command failed')
+      LOG.warning('LDAP Test Command failed')
     sys.exit(exit_code)
 
   def handle(self, *args, **options):
@@ -482,28 +482,28 @@ class Command(BaseCommand):
       try:
         connection = ldap_access.get_connection(ldap_config)
       except ldap_access.LdapBindException as err:
-        LOG.warn(str(err))
+        LOG.warning(str(err))
         LOG.info(_(ldap_url_msg))
         LOG.info(_(bind_dn_msg))
-        LOG.warn('hints: check bind_dn, bind_password and ldap_url')
-        LOG.warn('ldap_url="%s"' % ldap_config.LDAP_URL.get())
-        LOG.warn('bind_dn="%s"' % ldap_config.BIND_DN.get())
+        LOG.warning('hints: check bind_dn, bind_password and ldap_url')
+        LOG.warning('ldap_url="%s"' % ldap_config.LDAP_URL.get())
+        LOG.warning('bind_dn="%s"' % ldap_config.BIND_DN.get())
         err_code = 1
       except:
         typ, value, traceback = sys.exc_info()
-        LOG.warn("%s %s" % (typ, value))
+        LOG.warning("%s %s" % (typ, value))
         LOG.info(_(ldap_url_msg))
         LOG.info(_(bind_dn_msg))
-        LOG.warn('hints: check bind_dn, bind_password and ldap_url')
-        LOG.warn('ldap_url="%s"' % ldap_config.LDAP_URL.get())
-        LOG.warn('bind_dn="%s"' % ldap_config.BIND_DN.get())
+        LOG.warning('hints: check bind_dn, bind_password and ldap_url')
+        LOG.warning('ldap_url="%s"' % ldap_config.LDAP_URL.get())
+        LOG.warning('bind_dn="%s"' % ldap_config.BIND_DN.get())
         err_code = 1
 
       if err_code:
         cfg = ldap_access.get_auth(ldap_config)
         ldapsearch = 'ldapsearch -x -LLL -H {ldap_url} -D "{binddn}" -w "********" -b "" ' \
                      ' -s base'.format(ldap_url=cfg[0], binddn=cfg[1])
-        LOG.warn(ldapsearch)
+        LOG.warning(ldapsearch)
         self.sys_exit(err_code)
 
       LOG.info('LDAP whoami_s() %s' % (connection.ldap_handle.whoami_s()))

+ 4 - 4
desktop/core/src/desktop/models.py

@@ -423,7 +423,7 @@ class DocumentManager(models.Manager):
       doc.tags.add(tag)
       return doc
     else:
-      LOG.warn('Object %s already has documents: %s' % (content_object, content_object.doc.all()))
+      LOG.warning('Object %s already has documents: %s' % (content_object, content_object.doc.all()))
       return content_object.doc.all()[0]
 
   def sync(self, doc2_only=True):
@@ -784,7 +784,7 @@ class Document(models.Model):
       else:
         return staticfiles_storage.url('desktop/art/icon_hue_48.png')
     except Exception as e:
-      LOG.warn(force_unicode(e))
+      LOG.warning(force_unicode(e))
       return staticfiles_storage.url('desktop/art/icon_hue_48.png')
 
   def share(self, users, groups, name='read'):
@@ -1284,7 +1284,7 @@ class Document2(models.Model):
       else:
         url = reverse('oozie:edit_workflow') + '?workflow=' + str(self.id)
     except NoReverseMatch:
-      LOG.warn('Could not perform reverse lookup for type %s, app may be blacklisted.' % self.type)
+      LOG.warning('Could not perform reverse lookup for type %s, app may be blacklisted.' % self.type)
     return url
 
   def to_dict(self):
@@ -2213,7 +2213,7 @@ def get_user_preferences(user, key=None):
       return None
     except UserPreferences.MultipleObjectsReturned:
       for dup in UserPreferences.objects.filter(user=user, key=key)[1:]:
-        LOG.warn('Deleting UserPreferences duplicate %s' % dup)
+        LOG.warning('Deleting UserPreferences duplicate %s' % dup)
         dup.delete()
       x = UserPreferences.objects.get(user=user, key=key)
       return {key: x.value}

+ 4 - 4
desktop/core/src/desktop/supervisor.py

@@ -158,7 +158,7 @@ class Supervisor(threading.Thread):
           self.state = Supervisor.FINISHED
           return
         if exitcode != 0:
-          LOG.warn("Exit code for %s: %d" % (proc_str, exitcode))
+          LOG.warning("Exit code for %s: %d" % (proc_str, exitcode))
           self.state = Supervisor.ERROR
         et = time.time()
 
@@ -188,7 +188,7 @@ def shutdown(sups):
   global SHOULD_STOP
   SHOULD_STOP = True
 
-  LOG.warn("Supervisor shutting down!")
+  LOG.warning("Supervisor shutting down!")
 
   for pid in CHILD_PIDS:
     try:
@@ -196,7 +196,7 @@ def shutdown(sups):
     except OSError:
       pass
 
-  LOG.warn("Waiting for children to exit for %d seconds..." % WAIT_FOR_DEATH)
+  LOG.warning("Waiting for children to exit for %d seconds..." % WAIT_FOR_DEATH)
   t = time.time()
   still_alive = False
   while time.time() < t + WAIT_FOR_DEATH:
@@ -207,7 +207,7 @@ def shutdown(sups):
     if not still_alive:
       break
   if still_alive:
-    LOG.warn("Children have not exited after %d seconds. Killing them with SIGKILL." %
+    LOG.warning("Children have not exited after %d seconds. Killing them with SIGKILL." %
              WAIT_FOR_DEATH)
     for pid in CHILD_PIDS:
       try:

+ 5 - 5
desktop/core/src/desktop/tests.py

@@ -225,7 +225,7 @@ def test_log_view():
   URL = reverse(views.log_view)
 
   LOG = logging.getLogger(__name__)
-  LOG.warn('une voix m’a réveillé')
+  LOG.warning('une voix m’a réveillé')
 
   # UnicodeDecodeError: 'ascii' codec can't decode byte... should not happen
   response = c.get(URL)
@@ -236,7 +236,7 @@ def test_log_view():
   URL = reverse(views.log_view)
 
   LOG = logging.getLogger(__name__)
-  LOG.warn('Got response: PK\x03\x04\n\x00\x00\x08\x00\x00\xad\x0cN?\x00\x00\x00\x00')
+  LOG.warning('Got response: PK\x03\x04\n\x00\x00\x08\x00\x00\xad\x0cN?\x00\x00\x00\x00')
 
   # DjangoUnicodeDecodeError: 'utf8' codec can't decode byte 0xad in position 75: invalid start byte... should not happen
   response = c.get(URL)
@@ -248,7 +248,7 @@ def test_download_log_view():
   URL = reverse(views.download_log_view)
 
   LOG = logging.getLogger(__name__)
-  LOG.warn(u'une voix m’a réveillé')
+  LOG.warning(u'une voix m’a réveillé')
 
   # UnicodeDecodeError: 'ascii' codec can't decode byte... should not happen
   response = c.get(URL)
@@ -1519,7 +1519,7 @@ def test_db_migrations_mysql():
   try:
     subprocess.check_output('type mysql', shell=True)
   except subprocess.CalledProcessError as e:
-    LOG.warn('mysql not found')
+    LOG.warning('mysql not found')
     raise SkipTest
   for version in versions:
     file_name = 'hue_' + version + '_mysql.sql'
@@ -1547,7 +1547,7 @@ def test_db_migrations_mysql():
       )
       call_command('migrate', '--fake-initial', '--database=%(SCHEMA)s' % DATABASES[name])
     except subprocess.CalledProcessError as e:
-      LOG.warn('stderr: {}'.format(e.output))
+      LOG.warning('stderr: {}'.format(e.output))
       raise e
     finally:
       del DATABASES[name]

+ 6 - 6
desktop/core/src/desktop/views.py

@@ -546,13 +546,13 @@ def get_banner_message(request):
       url = request.build_absolute_uri("/hue")
       link = '<a href="%s" style="color: #FFF; font-weight: bold">%s</a>' % (url, url)
       message = _('You are accessing an older version of Hue, please switch to the latest version: %s.') % link
-      LOG.warn('User %s is using Hue 3 UI' % request.user.username)
+      LOG.warning('User %s is using Hue 3 UI' % request.user.username)
 
     if HUE_LOAD_BALANCER.get() and HUE_LOAD_BALANCER.get() != [''] and \
       (not forwarded_host or not any(forwarded_host in lb for lb in HUE_LOAD_BALANCER.get())):
       message = _('You are accessing a non-optimized Hue, please switch to one of the available addresses: %s') % \
         (", ".join(['<a href="%s" style="color: #FFF; font-weight: bold">%s</a>' % (host, host) for host in HUE_LOAD_BALANCER.get()]))
-      LOG.warn('User %s is bypassing the load balancer' % request.user.username)
+      LOG.warning('User %s is bypassing the load balancer' % request.user.username)
 
     if message:
       banner_message = '<div style="padding: 4px; text-align: center; background-color: #003F6C; height: 24px; color: #DBE8F1">%s</div>' \
@@ -629,7 +629,7 @@ def _get_config_errors(request, cache=True):
         continue
 
       if not callable(validator):
-        LOG.warn("Auto config validation: %s.%s is not a function" % (module.conf.__name__, CONFIG_VALIDATOR))
+        LOG.warning("Auto config validation: %s.%s is not a function" % (module.conf.__name__, CONFIG_VALIDATOR))
         continue
 
       try:
@@ -651,7 +651,7 @@ def _get_config_errors(request, cache=True):
     _CONFIG_ERROR_LIST = error_list
 
   if _CONFIG_ERROR_LIST:
-    LOG.warn("Errors in config : %s" % _CONFIG_ERROR_LIST)
+    LOG.warning("Errors in config : %s" % _CONFIG_ERROR_LIST)
 
   return _CONFIG_ERROR_LIST
 
@@ -724,14 +724,14 @@ def collect_validation_messages(conf, error_list):
         hierarchy_sections_string += "[" * the_section.depth + section + "]" * the_section.depth + " "
         parent = the_section
     except KeyError as ex:
-      LOG.warn("Section %s not found: %s" % (section, str(ex)))
+      LOG.warning("Section %s not found: %s" % (section, str(ex)))
 
     the_value = ''
     try:
       # the_value may be a section or a value
       the_value = the_section[name]
     except KeyError as ex:
-      LOG.warn("Error in accessing Section or Value %s: %s" % (name, str(ex)))
+      LOG.warning("Error in accessing Section or Value %s: %s" % (name, str(ex)))
 
     section_or_value = 'keyvalue'
     if isinstance(the_value, dict):

+ 1 - 1
desktop/libs/aws/src/aws/conf.py

@@ -140,7 +140,7 @@ def get_region(conf=None):
 
   # If the parsed out region is not in the list of supported regions, fallback to the default
   if region not in get_locations():
-    LOG.warn("Region, %s, not found in the list of supported regions: %s" % (region, ', '.join(get_locations())))
+    LOG.warning("Region, %s, not found in the list of supported regions: %s" % (region, ', '.join(get_locations())))
     region = ''
 
   REGION_CACHED = region

+ 2 - 2
desktop/libs/aws/src/aws/s3/s3fs.py

@@ -109,7 +109,7 @@ class S3FileSystem(object):
       resp = self._s3_connection.make_request('HEAD', name)
       return resp.getheader('x-amz-bucket-region')
     except Exception as e:
-      LOG.warn('Failed to fetch bucket "%s" location with "%s"' % (name, e.message or e.reason))
+      LOG.warning('Failed to fetch bucket "%s" location with "%s"' % (name, e.message or e.reason))
       return None
 
   def _get_or_create_bucket(self, name):
@@ -528,7 +528,7 @@ class S3FileSystem(object):
       else:
         self.open(path)
     except Exception as e:
-      LOG.warn('S3 check_access encountered error verifying %s permission at path "%s": %s' % (permission, path, str(e)))
+      LOG.warning('S3 check_access encountered error verifying %s permission at path "%s": %s' % (permission, path, str(e)))
       return False
     return True
 

+ 3 - 3
desktop/libs/azure/src/azure/abfs/__init__.py

@@ -74,7 +74,7 @@ def strip_scheme(path):
   assert_not_equal(filesystem, '', 'File System must be Specified')
   path = filesystem + '/' + file_path
   return path
-  
+
 def strip_path(path):
   """
   Return only the end of a path given another path
@@ -147,7 +147,7 @@ def abfspath(path, fs_defaultfs = None):
     try:
       fs_defaultfs = get_default_abfs_fs()
     except:
-      LOG.warn("Configuration for ABFS is not set, may run into errors")
+      LOG.warning("Configuration for ABFS is not set, may run into errors")
       return path
   filesystem, dir_name = ("","")
   try:
@@ -166,7 +166,7 @@ def abfspath(path, fs_defaultfs = None):
 
 def get_home_dir_for_ABFS():
   """
-  Attempts to go to the directory set by the user in the configuration file. If not defaults to abfs:// 
+  Attempts to go to the directory set by the user in the configuration file. If not defaults to abfs://
   """
   try:
     filesystem = parse_uri(get_default_abfs_fs())[0]

+ 4 - 4
desktop/libs/azure/src/azure/abfs/abfs.py

@@ -374,7 +374,7 @@ class ABFS(object):
   # --------------------------------
   def append(self, path, data, offset=0):
     if not data:
-      LOG.warn("There is no data to append to")
+      LOG.warning("There is no data to append to")
       return
     self._append(path, data)
     return self.flush(path, {'position' : int(len(data)) + int(offset)})
@@ -385,7 +385,7 @@ class ABFS(object):
     """
     path = Init_ABFS.strip_scheme(path)
     if params is None:
-      LOG.warn("Params not specified, Append will take longer")
+      LOG.warning("Params not specified, Append will take longer")
       resp = self._stats(path)
       params = {'position' : int(resp['Content-Length']) + offset, 'action' : 'append'}
     else:
@@ -405,10 +405,10 @@ class ABFS(object):
     """
     path = Init_ABFS.strip_scheme(path)
     if params is None:
-      LOG.warn("Params not specified")
+      LOG.warning("Params not specified")
       params = {'position' : 0}
     if 'position' not in params:
-      LOG.warn("Position is not specified")
+      LOG.warning("Position is not specified")
       params['position'] = 0
     params['action'] = 'flush'
     if headers is None:

+ 1 - 1
desktop/libs/dashboard/src/dashboard/controller.py

@@ -85,7 +85,7 @@ class DashboardController(object):
           doc2.delete()
       result['status'] = 0
     except Exception as e:
-      LOG.warn('Error deleting collection: %s' % e)
+      LOG.warning('Error deleting collection: %s' % e)
       result['message'] = str(e)
 
     return result

+ 2 - 2
desktop/libs/dashboard/src/dashboard/models.py

@@ -290,7 +290,7 @@ class Collection2(object):
       schema_fields = api.fields(name)
       schema_fields = schema_fields['schema']['fields']
     except Exception as e:
-      LOG.warn('/luke call did not succeed: %s' % e)
+      LOG.warning('/luke call did not succeed: %s' % e)
       try:
         fields = api.schema_fields(name)
         schema_fields = Collection2._make_luke_from_schema_fields(fields)
@@ -1006,7 +1006,7 @@ def extract_solr_exception_message(e):
     msg = message['error'].get('msg')
     response['error'] = msg if msg else message['error']['trace']
   except ValueError as e:
-    LOG.warn('Failed to parse json response: %s' % force_unicode(e))
+    LOG.warning('Failed to parse json response: %s' % force_unicode(e))
     response['error'] = force_unicode(e)
   except Exception as e:
     LOG.exception('Failed to extract json message: %s' % force_unicode(e))

+ 1 - 1
desktop/libs/hadoop/src/hadoop/cluster.py

@@ -150,7 +150,7 @@ def get_next_ha_yarncluster(current_user=None):
               LOG.info('RM %s has failed back to %s server' % (MR_NAME_CACHE, name))
               rm.from_failover = True
             MR_NAME_CACHE = name
-            LOG.warn('Picking RM HA: %s' % name)
+            LOG.warning('Picking RM HA: %s' % name)
             return (config, rm)
           else:
             LOG.info('RM %s is not RUNNING, skipping it: %s' % (name, cluster_info))

+ 1 - 1
desktop/libs/hadoop/src/hadoop/conf.py

@@ -321,7 +321,7 @@ def test_yarn_configurations(user):
   try:
     from jobbrowser.api import get_api # Required for cluster HA testing
   except Exception as e:
-    LOG.warn('Jobbrowser is disabled, skipping test_yarn_configurations')
+    LOG.warning('Jobbrowser is disabled, skipping test_yarn_configurations')
     return result
 
   try:

+ 1 - 1
desktop/libs/hadoop/src/hadoop/fs/hadoopfs.py

@@ -486,7 +486,7 @@ class FileUpload(object):
 
     self.closed = True
     if stderr:
-      LOG.warn("HDFS FileUpload (cmd='%s', env='%s') outputted stderr:\n%s" %
+      LOG.warning("HDFS FileUpload (cmd='%s', env='%s') outputted stderr:\n%s" %
                    (repr(self.subprocess_cmd), repr(self.subprocess_env), stderr))
     if stdout:
       LOG.info("HDFS FileUpload (cmd='%s', env='%s') outputted stdout:\n%s" %

+ 1 - 1
desktop/libs/hadoop/src/hadoop/fs/upload.py

@@ -148,7 +148,7 @@ class HDFSfileUploadHandler(FileUploadHandler):
     self.request = request
     fs = fsmanager.get_filesystem('default')
     if not fs:
-      LOG.warn('No HDFS set for HDFS upload')
+      LOG.warning('No HDFS set for HDFS upload')
     else:
       fs.setuser(request.user.username)
       FileUploadHandler.chunk_size = fs.get_upload_chuck_size(self._destination) if self._destination else UPLOAD_CHUNK_SIZE.get()

+ 2 - 2
desktop/libs/hadoop/src/hadoop/fs/webhdfs.py

@@ -203,7 +203,7 @@ class WebHdfs(Hdfs):
     except WebHdfsException as e:
       exceptions = ['IllegalArgumentException', 'UnsupportedOperationException']
       if any(x in e.message for x in exceptions):
-        LOG.warn('WebHDFS operation GETTRASHROOT is not implemented, returning default trash path: %s' % trash_path)
+        LOG.warning('WebHDFS operation GETTRASHROOT is not implemented, returning default trash path: %s' % trash_path)
       else:
         raise e
     return trash_path
@@ -712,7 +712,7 @@ class WebHdfs(Hdfs):
       return self._root.get(path, params, headers)
     except WebHdfsException as ex:
       if ex.code == 500 or ex.code == 400:
-        LOG.warn('Failed to check access to path %s, CHECKACCESS operation may not be supported.' % path)
+        LOG.warning('Failed to check access to path %s, CHECKACCESS operation may not be supported.' % path)
         return None
       else:
         raise ex

+ 1 - 2
desktop/libs/hadoop/src/hadoop/pseudo_hdfs4.py

@@ -170,7 +170,7 @@ class PseudoHdfs4(object):
   def fs(self):
     if self._fs is None:
       if self._dfs_http_address is None:
-        LOG.warn("Attempt to access uninitialized filesystem")
+        LOG.warning("Attempt to access uninitialized filesystem")
         return None
       self._fs = hadoop.fs.webhdfs.WebHdfs("http://%s/webhdfs/v1" % (self._dfs_http_address,), self.fs_default_name)
     return self._fs
@@ -620,4 +620,3 @@ def main():
   IPShellEmbed()()
 
   cluster.stop()
-

+ 6 - 6
desktop/libs/indexer/src/indexer/api3.py

@@ -34,7 +34,7 @@ try:
   from simple_salesforce.api import Salesforce
   from simple_salesforce.exceptions import SalesforceRefusedRequest
 except ImportError:
-  LOG.warn('simple_salesforce module not found')
+  LOG.warning('simple_salesforce module not found')
 
 from desktop.lib.django_util import JsonResponse
 from desktop.lib.exceptions_renderable import PopupException
@@ -74,17 +74,17 @@ else:
 try:
   from beeswax.server import dbms
 except ImportError as e:
-  LOG.warn('Hive and HiveServer2 interfaces are not enabled')
+  LOG.warning('Hive and HiveServer2 interfaces are not enabled')
 
 try:
   from filebrowser.views import detect_parquet
 except ImportError as e:
-  LOG.warn('File Browser interface is not enabled')
+  LOG.warning('File Browser interface is not enabled')
 
 try:
   from search.conf import SOLR_URL
 except ImportError as e:
-  LOG.warn('Solr Search interface is not enabled')
+  LOG.warning('Solr Search interface is not enabled')
 
 
 def _escape_white_space_characters(s, inverse=False):
@@ -263,7 +263,7 @@ def guess_field_types(request):
       try:
         sample = db.fetch_result(notebook, snippet, 4, start_over=True)['rows'][:4]
       except Exception as e:
-        LOG.warn('Skipping sample data as query handle might be expired: %s' % e)
+        LOG.warning('Skipping sample data as query handle might be expired: %s' % e)
         sample = [[], [], [], [], []]
       columns = db.autocomplete(snippet=snippet, database='', table='')
       columns = [
@@ -595,7 +595,7 @@ def _small_indexing(user, fs, client, source, destination, index_name):
     try:
       client.delete_index(index_name, keep_config=False)
     except Exception as e2:
-      LOG.warn('Error while cleaning-up config of failed collection creation %s: %s' % (index_name, e2))
+      LOG.warning('Error while cleaning-up config of failed collection creation %s: %s' % (index_name, e2))
     raise e
 
   return {

+ 2 - 2
desktop/libs/indexer/src/indexer/conf.py

@@ -63,14 +63,14 @@ def zkensemble():
     if clusters['default'].HOST_PORTS.get() != 'localhost:2181':
       return '%s/solr' % clusters['default'].HOST_PORTS.get()
   except:
-    LOG.warn('Failed to get Zookeeper ensemble')
+    LOG.warning('Failed to get Zookeeper ensemble')
 
   try:
     from search.conf import SOLR_URL
     parsed = urlparse(SOLR_URL.get())
     return "%s:2181/solr" % (parsed.hostname or 'localhost')
   except:
-    LOG.warn('Failed to get Solr url')
+    LOG.warning('Failed to get Solr url')
 
 
 # Deprecated as always on

+ 3 - 3
desktop/libs/indexer/src/indexer/controller.py

@@ -95,7 +95,7 @@ class CollectionManagerController(object):
       for name in solr_cores:
         solr_cores[name]['isCoreOnly'] = True
     except Exception as e:
-      LOG.warn('No Zookeeper servlet running on Solr server: %s' % e)
+      LOG.warning('No Zookeeper servlet running on Solr server: %s' % e)
 
     solr_cores.update(solr_collections)
     solr_cores.update(solr_aliases)
@@ -109,7 +109,7 @@ class CollectionManagerController(object):
       autocomplete['configs'] = api.configs()
 
     except Exception as e:
-      LOG.warn('No Zookeeper servlet running on Solr server: %s' % e)
+      LOG.warning('No Zookeeper servlet running on Solr server: %s' % e)
 
     return autocomplete
 
@@ -120,7 +120,7 @@ class CollectionManagerController(object):
       field_data = api.fields(collection_or_core_name)
       fields = self._format_flags(field_data['schema']['fields'])
     except Exception as e:
-      LOG.warn('/luke call did not succeed: %s' % e)
+      LOG.warning('/luke call did not succeed: %s' % e)
       try:
         fields = api.schema_fields(collection_or_core_name)
         fields = Collection2._make_luke_from_schema_fields(fields)

+ 1 - 1
desktop/libs/indexer/src/indexer/file_format.py

@@ -596,7 +596,7 @@ class CSVFormat(FileFormat):
       fields = [Field(header[i], types[i]) for i in range(len(header))]
     else:
       # likely failed to guess correctly
-      LOG.warn("Guess field types failed - number of headers didn't match number of predicted types.")
+      LOG.warning("Guess field types failed - number of headers didn't match number of predicted types.")
       fields = []
 
     return fields

+ 1 - 1
desktop/libs/indexer/src/indexer/indexers/rdbms.py

@@ -60,7 +60,7 @@ def get_db_component(request):
     format_['status'] = 0
   except Exception as e:
     message = _('Error accessing the database: %s') % e
-    LOG.warn(message)
+    LOG.warning(message)
     format_['message'] = message
 
   return JsonResponse(format_)

+ 1 - 1
desktop/libs/indexer/src/indexer/indexers/sql.py

@@ -49,7 +49,7 @@ LOG = logging.getLogger(__name__)
 try:
   from beeswax.server import dbms
 except ImportError as e:
-  LOG.warn('Hive and HiveServer2 interfaces are not enabled')
+  LOG.warning('Hive and HiveServer2 interfaces are not enabled')
 
 
 class SQLIndexer(object):

+ 1 - 1
desktop/libs/indexer/src/indexer/solr_api.py

@@ -140,7 +140,7 @@ def delete_indexes(request):
       elif index['type'] == 'alias':
         client.delete_alias(index['name'])
       else:
-        LOG.warn('We could not delete: %s' % index)
+        LOG.warning('We could not delete: %s' % index)
 
     response['status'] = 0
     response['message'] = _('Indexes removed!')

+ 2 - 2
desktop/libs/indexer/src/indexer/solr_client.py

@@ -91,7 +91,7 @@ class SolrClient(object):
 
     except Exception as e:
       msg = _('Solr server could not be contacted properly: %s') % e
-      LOG.warn(msg)
+      LOG.warning(msg)
       raise PopupException(msg, detail=smart_str(e))
 
     return sorted(indexes, key=lambda index: index['name'])
@@ -285,7 +285,7 @@ class SolrClient(object):
         if not zc.path_exists(root_node):
           zc.copy_path(root_node, config_root_path)
         else:
-          LOG.warn('Config %s already existing.' % name)
+          LOG.warning('Config %s already existing.' % name)
       except Exception as e:
         if zc.path_exists(root_node):
           zc.delete_path(root_node)

+ 2 - 2
desktop/libs/liboauth/src/liboauth/backend.py

@@ -37,11 +37,11 @@ import liboauth.metrics
 try:
   import httplib2
 except ImportError:
-  LOG.warn('httplib2 module not found')
+  LOG.warning('httplib2 module not found')
 try:
   import oauth2 as oauth
 except ImportError:
-  LOG.warn('oauth2 module not found')
+  LOG.warning('oauth2 module not found')
   oauth = None
 
 if sys.version_info[0] > 2:

+ 1 - 1
desktop/libs/liboauth/src/liboauth/views.py

@@ -27,7 +27,7 @@ import urllib.request, urllib.parse, urllib.error
 try:
   import httplib2
 except ImportError:
-  LOG.warn('httplib2 module not found')
+  LOG.warning('httplib2 module not found')
 
 import django.contrib.auth.views
 from django.core.exceptions import SuspiciousOperation

+ 1 - 1
desktop/libs/liboozie/src/liboozie/conf.py

@@ -103,7 +103,7 @@ def config_validator(user):
   try:
     from oozie.conf import REMOTE_SAMPLE_DIR
   except Exception as e:
-    LOG.warn('Config check failed because Oozie app not installed: %s' % e)
+    LOG.warning('Config check failed because Oozie app not installed: %s' % e)
     return res
 
   apps = appmanager.get_apps_dict(user)

+ 2 - 2
desktop/libs/liboozie/src/liboozie/credentials.py

@@ -57,7 +57,7 @@ class Credentials(object):
     from beeswax import hive_site, conf
 
     if not hasattr(conf.HIVE_SERVER_HOST, 'get') or not conf.HIVE_SERVER_HOST.get():
-      LOG.warn('Could not get all the Oozie credentials: beeswax app is blacklisted.')
+      LOG.warning('Could not get all the Oozie credentials: beeswax app is blacklisted.')
     else:
       if hive_properties is None:
         hive_properties = hive_site.get_metastore()
@@ -66,7 +66,7 @@ class Credentials(object):
 
       if not hive_properties:
         hive_properties = {}
-        LOG.warn('Could not get all the Oozie credentials: hive-site.xml required on the Hue host.')
+        LOG.warning('Could not get all the Oozie credentials: hive-site.xml required on the Hue host.')
 
       credentials[self.hive_name] = {
         'xml_name': self.hive_name,

+ 8 - 8
desktop/libs/liboozie/src/liboozie/submission2.py

@@ -274,7 +274,7 @@ python altus.py
                   auth_key_id=ALTUS.AUTH_KEY_ID.get(),
                   auth_key_secret=ALTUS.AUTH_KEY_SECRET.get().replace('\\n', '\n')
               )
-            
+
           self._create_file(deployment_dir, 'altus.py', shell_script)
 
           ext_py_lib_path = os.path.join(get_desktop_root(), 'core', 'ext-py')
@@ -595,7 +595,7 @@ STORED AS TEXTFILE %s""" % (self.properties.get('send_result_path'), '\n\n\n'.jo
       if self._do_as(self.user.username , self.fs.exists, path):
         self._do_as(self.user.username , self.fs.rmtree, path)
     except Exception as ex:
-      LOG.warn("Failed to clean up workflow deployment directory for %s (owner %s). Caused by: %s", self.job.name, self.user, ex)
+      LOG.warning("Failed to clean up workflow deployment directory for %s (owner %s). Caused by: %s", self.job.name, self.user, ex)
 
   def _is_workflow(self):
     from oozie.models2 import Workflow
@@ -697,9 +697,9 @@ def _exec(service, command, parameters=None):
     raise e
 
 
-try:    
+try:
   handle = _exec('%(service)s', '%(command)s', arguments)
-  
+
   if 'create' in '%(command)s':
     handle = _exec('%(service)s', 'listJobs', {'clusterCrn': handle['cluster']['crn']})
 
@@ -707,13 +707,13 @@ try:
     job = handle['jobs'].pop(0)
     status = 'QUEUED'
     print 'Job submitted: %%(jobId)s' %% job
-  
+
     while status in ('QUEUED', 'RUNNING', 'SUBMITTING'):
       time.sleep(5)
-  
+
       print 'Checking status...'
       status = _exec('%(service)s', 'describeJob', {'jobId': job['jobId']})['job']['status']
-  
+
     if status != 'COMPLETED':
       raise Exception('Job %%s failed %%s' %% (job['jobId'], status))
     else:
@@ -724,7 +724,7 @@ except Exception, e:
 
 """ % {
       'service': service,
-      'hostname': hostname,      
+      'hostname': hostname,
       'command': command,
       'arguments': repr(arguments),
       'auth_key_id': auth_key_id,

+ 1 - 1
desktop/libs/liboozie/src/liboozie/submittion.py

@@ -316,7 +316,7 @@ class Submission(object):
       if self._do_as(self.user.username , self.fs.exists, path):
         self._do_as(self.user.username , self.fs.rmtree, path)
     except Exception as ex:
-      LOG.warn("Failed to clean up workflow deployment directory for "
+      LOG.warning("Failed to clean up workflow deployment directory for "
                "%s (owner %s). Caused by: %s",
                self.job.name, self.user, ex)
 

+ 1 - 1
desktop/libs/librdbms/src/librdbms/jdbc.py

@@ -30,7 +30,7 @@ LOG = logging.getLogger(__name__)
 try:
   from py4j.java_gateway import JavaGateway, JavaObject
 except:
-  LOG.warn('Failed to import py4j')
+  LOG.warning('Failed to import py4j')
 
 
 def query_and_fetch(db, statement, n=None):

+ 1 - 1
desktop/libs/libsaml/src/libsaml/urls.py

@@ -29,7 +29,7 @@ try:
   from djangosaml2 import views as djangosaml2_views
   from libsaml import views as libsaml_views
 except ImportError:
-  LOG.warn('djangosaml2 module not found')
+  LOG.warning('djangosaml2 module not found')
   djangosaml2_views = None
 
 try:

+ 1 - 1
desktop/libs/libsentry/src/libsentry/api.py

@@ -48,7 +48,7 @@ def ha_error_handler(func):
       if not is_ha_enabled():
         raise PopupException(_('Failed to connect to Sentry server %s, and Sentry HA is not enabled.') % args[0].client.host, detail=e)
       else:
-        LOG.warn("Failed to connect to Sentry server %s, will attempt to find next available host." % args[0].client.host)
+        LOG.warning("Failed to connect to Sentry server %s, will attempt to find next available host." % args[0].client.host)
         server, attempts = get_next_available_server(SentryClient, args[0].client.username, args[0].client.host)
         if server is not None:
           args[0].client = create_client(SentryClient, args[0].client.username, server)

+ 1 - 1
desktop/libs/libsentry/src/libsentry/api2.py

@@ -48,7 +48,7 @@ def ha_error_handler(func):
       if not is_ha_enabled():
         raise PopupException(_('Failed to connect to Sentry server %s, and Sentry HA is not enabled.') % args[0].client.host, detail=e)
       else:
-        LOG.warn("Failed to connect to Sentry server %s, will attempt to find next available host." % args[0].client.host)
+        LOG.warning("Failed to connect to Sentry server %s, will attempt to find next available host." % args[0].client.host)
         server, attempts = get_next_available_server(client_class=SentryClient, username=args[0].client.username, failed_host=args[0].client.host, component=args[0].client.component)
         if server is not None:
           args[0].client = create_client(SentryClient, args[0].client.username, server, args[0].client.component)

+ 2 - 2
desktop/libs/libsentry/src/libsentry/privilege_checker.py

@@ -116,7 +116,7 @@ class PrivilegeChecker(object):
           if self._is_object_action_authorized_v1(hierarchy=self.privilege_hierarchy_v1, object=authorizable, action=action):
             yield object
         except KeyError as e:
-          LOG.warn('Skipping %s: %s' % (authorizable, e))
+          LOG.warning('Skipping %s: %s' % (authorizable, e))
 
     if v2_authorizables:
       for (object, authorizable) in v2_authorizables:
@@ -124,7 +124,7 @@ class PrivilegeChecker(object):
           if self._is_object_action_authorized_v2(hierarchy=self.privilege_hierarchy_v2, object=authorizable, action=action):
             yield object
         except KeyError as e:
-          LOG.warn('Skipping %s: %s' % (authorizable, e))
+          LOG.warning('Skipping %s: %s' % (authorizable, e))
 
 
   def _to_sentry_authorizables(self, objects, key):

+ 1 - 1
desktop/libs/libsentry/src/libsentry/sentry_ha.py

@@ -60,7 +60,7 @@ def get_next_available_server(client_class, username, failed_host=None, componen
   attempted_hosts = []
 
   while has_next:
-    LOG.warn('Could not connect to Sentry server %s, attempting to fetch next available client.' % current_host)
+    LOG.warning('Could not connect to Sentry server %s, attempting to fetch next available client.' % current_host)
     next_server = get_sentry_server(current_host=current_host)
     time.sleep(1)
     try:

+ 1 - 1
desktop/libs/libsentry/src/libsentry/sentry_site.py

@@ -121,7 +121,7 @@ def get_sentry_server(current_host=None):
         LOG.debug("Current Sentry host, %s, index is: %d." % (current_host, current_idx))
         next_idx = (current_idx + 1) % len(servers)
       except ValueError as e:
-        LOG.warn("Current host: %s not found in list of servers: %s" % (current_host, ','.join(hosts)))
+        LOG.warning("Current host: %s not found in list of servers: %s" % (current_host, ','.join(hosts)))
 
     server = servers[next_idx]
     LOG.debug("Returning Sentry host, %s, at next index: %d." % (server['hostname'], next_idx))

+ 3 - 3
desktop/libs/libsolr/src/libsolr/api.py

@@ -57,7 +57,7 @@ LOG = logging.getLogger(__name__)
 try:
   from search.conf import EMPTY_QUERY, SECURITY_ENABLED, SOLR_URL
 except ImportError as e:
-  LOG.warn('Solr Search is not enabled')
+  LOG.warning('Solr Search is not enabled')
 
 
 def utf_quoter(what):
@@ -565,7 +565,7 @@ class SolrApi(object):
         return False
     except RestException as e:
       if 'already exists' in e.message:
-        LOG.warn("Could not create collection.", exc_info=True)
+        LOG.warning("Could not create collection.", exc_info=True)
         return False
       else:
         raise PopupException(e, title=_('Error while accessing Solr'))
@@ -1204,4 +1204,4 @@ GAPS = {
         'facet-widget': {'coeff': '+1', 'unit': 'YEARS'},
         'pie-widget': {'coeff': '+1', 'unit': 'YEARS'}
     }
-}
+}

+ 1 - 1
desktop/libs/libsolr/src/libsolr/conf.py

@@ -52,7 +52,7 @@ def zkensemble_path():
     if parsed.port == 9983: # Standalone Solr cloud
       return ''
   except:
-    LOG.warn('Failed to get Zookeeper ensemble path')
+    LOG.warning('Failed to get Zookeeper ensemble path')
 
   return '/solr'
 

+ 2 - 2
desktop/libs/libzookeeper/src/libzookeeper/conf.py

@@ -46,7 +46,7 @@ def zkensemble():
       if clusters['default'].HOST_PORTS.get() != 'localhost:2181':
         return '%s' % clusters['default'].HOST_PORTS.get()
     except:
-      LOG.warn('Could not get zookeeper ensemble from the zookeeper app')
+      LOG.warning('Could not get zookeeper ensemble from the zookeeper app')
 
   if 'search' in settings.INSTALLED_APPS:
     try:
@@ -54,7 +54,7 @@ def zkensemble():
       parsed = urlparse(SOLR_URL.get())
       return "%s:2181" % (parsed.hostname or 'localhost')
     except:
-      LOG.warn('Could not get zookeeper ensemble from the search app')
+      LOG.warning('Could not get zookeeper ensemble from the search app')
 
   return "localhost:2181"
 

+ 4 - 4
desktop/libs/metadata/src/metadata/manager_client.py

@@ -125,7 +125,7 @@ class ManagerApi(object):
           }, params={'view': 'full'})['items']
           return shs_server_hostId, shs_server_configs
     except Exception as e:
-      LOG.warn("Check Spark History Server via ManagerApi: %s" % e)
+      LOG.warning("Check Spark History Server via ManagerApi: %s" % e)
 
     return None, None
 
@@ -164,7 +164,7 @@ class ManagerApi(object):
 
   def assemble_shs_url(self, shs_ui_hostname, shs_ui_port=None, shs_ssl_port=None, shs_ssl_enabled=None):
     if not shs_ui_hostname or not shs_ui_port or not shs_ssl_port or not shs_ssl_enabled:
-      LOG.warn("Spark conf not found!")
+      LOG.warning("Spark conf not found!")
       return None
 
     protocol = 'https' if shs_ssl_enabled.lower() == 'true' else 'http'
@@ -271,7 +271,7 @@ class ManagerApi(object):
     cluster = self._get_cluster(cluster_name)
     roles = [role['name'] for role in self._get_roles(cluster['name'], service, 'AGENT')]
 
-    if restart:    
+    if restart:
       return self.restart_services(cluster['name'], service, roles)
     else:
       return self.refresh_configs(cluster['name'], service, roles)
@@ -376,6 +376,6 @@ class ManagerApi(object):
                 return config['value']
 
     except Exception as e:
-      LOG.warn("Get Impala Daemon API configurations via ManangerAPI: %s" % e)
+      LOG.warning("Get Impala Daemon API configurations via ManangerAPI: %s" % e)
 
     return None

+ 1 - 1
desktop/libs/metadata/src/metadata/optimizer/optimizer_client.py

@@ -53,7 +53,7 @@ OPTIMIZER_TENANT_ID_CACHE_KEY = 'navopt-tenant-id'
 try:
   from navoptapi.api_lib import ApiLib
 except Exception as e:
-  LOG.warn('NavOpt module is not installed: %s' % e)
+  LOG.warning('NavOpt module is not installed: %s' % e)
 
 
 class NavOptException(Exception):

+ 1 - 1
desktop/libs/metadata/src/metadata/optimizer_api.py

@@ -51,7 +51,7 @@ try:
 
   from metastore.views import _get_db
 except ImportError as e:
-  LOG.warn("Hive lib not enabled")
+  LOG.warning("Hive lib not enabled")
 
 
 def error_handler(view_fn):

+ 1 - 1
desktop/libs/notebook/src/notebook/api.py

@@ -753,7 +753,7 @@ def autocomplete(request, server=None, database=None, table=None, column=None, n
     autocomplete_data = get_api(request, snippet).autocomplete(snippet, database, table, column, nested, action)
     response.update(autocomplete_data)
   except QueryExpired as e:
-    LOG.warn('Expired query seen: %s' % e)
+    LOG.warning('Expired query seen: %s' % e)
 
   response['status'] = 0
 

+ 1 - 1
desktop/libs/notebook/src/notebook/connectors/altus.py

@@ -41,7 +41,7 @@ DATE_FORMAT = "%Y-%m-%d"
 try:
   from navoptapi.api_lib import ApiLib
 except Exception as e:
-  LOG.warn('NavOpt module is not installed: %s' % e)
+  LOG.warning('NavOpt module is not installed: %s' % e)
 
 
 def _exec(service, command, parameters=None):

+ 1 - 1
desktop/libs/notebook/src/notebook/connectors/base.py

@@ -667,7 +667,7 @@ class Api(object):
       try:
         self.close_statement(notebook, snippet)  # Close all the time past multi queries
       except:
-        LOG.warn('Could not close previous multiquery query')
+        LOG.warning('Could not close previous multiquery query')
 
     return resp
 

+ 2 - 2
desktop/libs/notebook/src/notebook/connectors/flink_sql.py

@@ -96,7 +96,7 @@ class FlinkSqlApi(Api):
       self.db.session_heartbeat(session_id=SESSIONS[session_key]['session_id'])
     except Exception as e:
       if 'Session: %(id)s does not exist' % SESSIONS[session_key] in str(e):
-        LOG.warn('Session: %(id)s does not exist, opening a new one' % SESSIONS[session_key])
+        LOG.warning('Session: %(id)s does not exist, opening a new one' % SESSIONS[session_key])
         SESSIONS[session_key] = self.db.create_session()
       else:
         raise e
@@ -174,7 +174,7 @@ class FlinkSqlApi(Api):
               status = 'expired'
           except Exception as e:
             if '%s does not exist in current session' % statement_id in str(e):
-              LOG.warn('Job: %s does not exist' % statement_id)
+              LOG.warning('Job: %s does not exist' % statement_id)
             else:
               raise e
 

+ 2 - 2
desktop/libs/notebook/src/notebook/connectors/hbase.py

@@ -39,7 +39,7 @@ LOG = logging.getLogger(__name__)
 try:
   from hbase.api import HbaseApi
 except ImportError as e:
-  LOG.warn("HBase app is not enabled: %s" % e)
+  LOG.warning("HBase app is not enabled: %s" % e)
 
 
 def query_error_handler(func):
@@ -73,7 +73,7 @@ class HBaseApi(Api):
       else:
         raise PopupException('Could not find column `%s`.`%s`.`%s`' % (database, table, column))
     except Exception as e:
-      LOG.warn('Autocomplete data fetching error: %s' % e)
+      LOG.warning('Autocomplete data fetching error: %s' % e)
       response['code'] = 500
       response['error'] = e.message
 

+ 1 - 1
desktop/libs/notebook/src/notebook/connectors/hive_metastore.py

@@ -41,7 +41,7 @@ try:
   from beeswax.server import dbms
   from beeswax.server.dbms import get_query_server_config, QueryServerException
 except ImportError as e:
-  LOG.warn('Hive and HiveMetastoreServer interfaces are not enabled: %s' % e)
+  LOG.warning('Hive and HiveMetastoreServer interfaces are not enabled: %s' % e)
   hive_settings = None
 
 

+ 9 - 9
desktop/libs/notebook/src/notebook/connectors/hiveserver2.py

@@ -65,7 +65,7 @@ try:
   from beeswax.server.dbms import get_query_server_config, QueryServerException
   from beeswax.views import parse_out_jobs, parse_out_queries
 except ImportError as e:
-  LOG.warn('Hive and HiveServer2 interfaces are not enabled: %s' % e)
+  LOG.warning('Hive and HiveServer2 interfaces are not enabled: %s' % e)
   hive_settings = None
 
 try:
@@ -73,7 +73,7 @@ try:
   from impala.conf import CONFIG_WHITELIST as impala_settings
   from impala.server import get_api as get_impalad_api, ImpalaDaemonApiException, _get_impala_server_url
 except ImportError as e:
-  LOG.warn("Impala app is not enabled")
+  LOG.warning("Impala app is not enabled")
   impala_settings = None
 
 try:
@@ -84,7 +84,7 @@ try:
   has_hive_query_browser = ENABLE_HIVE_QUERY_BROWSER.get()
   has_jobbrowser = True
 except (AttributeError, ImportError, RuntimeError) as e:
-  LOG.warn("Job Browser app is not enabled")
+  LOG.warning("Job Browser app is not enabled")
   has_jobbrowser = False
   has_query_browser = False
   has_hive_query_browser = False
@@ -217,7 +217,7 @@ class HS2Api(Api):
       decoded_guid = session.get_handle().sessionId.guid
       response['session_id'] = unpack_guid(decoded_guid)
     except Exception as e:
-      LOG.warn('Failed to decode session handle: %s' % e)
+      LOG.warning('Failed to decode session handle: %s' % e)
 
     if lang == 'impala' and session:
       http_addr = _get_impala_server_url(session)
@@ -665,7 +665,7 @@ DROP TABLE IF EXISTS `%(table)s`;
     if not isinstance(properties, list) or \
       not all(isinstance(prop, dict) for prop in properties) or \
       not all('key' in prop for prop in properties) or not all('value' in prop for prop in properties):
-      LOG.warn('Current properties are not formatted correctly, will replace with defaults.')
+      LOG.warning('Current properties are not formatted correctly, will replace with defaults.')
       return upgraded_properties
 
     valid_props_dict = dict((prop["key"], prop) for prop in upgraded_properties)
@@ -705,7 +705,7 @@ DROP TABLE IF EXISTS `%(table)s`;
     if not settings:
       session = self._get_session(notebook, 'hive')
       if not session:
-        LOG.warn('Cannot get jobs, failed to find active HS2 session for user: %s' % self.user.username)
+        LOG.warning('Cannot get jobs, failed to find active HS2 session for user: %s' % self.user.username)
       elif session.get('configuration') and session['configuration'].get('hive.execution.engine'):
         return session['configuration'].get('hive.execution.engine')
       else:
@@ -769,7 +769,7 @@ DROP TABLE IF EXISTS `%(table)s`;
     except KeyError:
       raise Exception('Operation has no valid handle attached')
     except binascii.Error:
-      LOG.warn('Handle already base 64 decoded')
+      LOG.warning('Handle already base 64 decoded')
 
     for key in list(handle.keys()):
       if key not in ('log_context', 'secret', 'has_result_set', 'operation_type', 'modified_row_count', 'guid'):
@@ -894,9 +894,9 @@ DROP TABLE IF EXISTS `%(table)s`;
       try:
         guid = unpack_guid_base64(snippet['result']['handle']['guid'])
       except Exception as e:
-        LOG.warn('Failed to decode operation handle guid: %s' % e)
+        LOG.warning('Failed to decode operation handle guid: %s' % e)
     else:
-      LOG.warn('Snippet does not contain a valid result handle, cannot extract Impala query ID.')
+      LOG.warning('Snippet does not contain a valid result handle, cannot extract Impala query ID.')
     return guid
 
 

+ 1 - 1
desktop/libs/notebook/src/notebook/connectors/kafka.py

@@ -105,7 +105,7 @@ class KafkaApi(Api):
         }
 
     except Exception as e:
-      LOG.warn('Autocomplete data fetching error: %s' % e)
+      LOG.warning('Autocomplete data fetching error: %s' % e)
       response['code'] = 500
       response['error'] = e.message
 

+ 2 - 2
desktop/libs/notebook/src/notebook/connectors/oozie_batch.py

@@ -43,7 +43,7 @@ try:
   from oozie.views.dashboard import check_job_access_permission, check_job_edition_permission
   from oozie.views.editor2 import _submit_workflow
 except Exception as e:
-  LOG.warn('Oozie application is not enabled: %s' % e)
+  LOG.warning('Oozie application is not enabled: %s' % e)
 
 
 class OozieApi(Api):
@@ -109,7 +109,7 @@ class OozieApi(Api):
         if results:
           response['status'] = 'available'
         else:
-          LOG.warn('No log result could be matched for %s' % job_id)
+          LOG.warning('No log result could be matched for %s' % job_id)
       else:
         response['status'] = 'failed'
 

+ 1 - 1
desktop/libs/notebook/src/notebook/connectors/spark_shell.py

@@ -304,7 +304,7 @@ class SparkApi(Api):
     spark_ui_url = self.SPARK_UI_RE.search(logs)
 
     if not spark_ui_url:
-      LOG.warn('Could not find the Spark UI URL in the session logs.')
+      LOG.warning('Could not find the Spark UI URL in the session logs.')
       return []
     else:
       spark_ui_url = spark_ui_url.group(1)

+ 1 - 1
desktop/libs/notebook/src/notebook/connectors/sql_alchemy.py

@@ -344,7 +344,7 @@ class SqlAlchemyApi(Api):
         if handle and handle['result'].cursor:
           stats = handle['result'].cursor.poll()
       except AssertionError as e:
-        LOG.warn('Query probably not running anymore: %s' % e)
+        LOG.warning('Query probably not running anymore: %s' % e)
       if stats:
         stats = stats.get('stats', {})
         progress = stats.get('completedSplits', 0) * 100 // stats.get('totalSplits', 1)

Энэ ялгаанд хэт олон файл өөрчлөгдсөн тул зарим файлыг харуулаагүй болно