Browse Source

HUE-5553 [metadata] Move to rest API instead of exec

Romain Rigaux 9 years ago
parent
commit
7f861bb

+ 10 - 10
desktop/libs/metadata/src/metadata/optimizer_api.py

@@ -69,7 +69,7 @@ def get_tenant(request):
   api = OptimizerApi()
   data = api.get_tenant(email=email)
 
-  if data['status'] == 'success':
+  if data:
     response['status'] = 0
     response['data'] = data['tenant']
   else:
@@ -128,7 +128,7 @@ def table_details(request):
 
   data = api.table_details(database_name=database_name, table_name=table_name)
 
-  if data['status'] == 'success':
+  if data:
     response['status'] = 0
     response['details'] = data
   else:
@@ -150,7 +150,7 @@ def query_compatibility(request):
 
   data = api.query_compatibility(source_platform=source_platform, target_platform=target_platform, query=query)
 
-  if data['status'] == 'success':
+  if data:
     response['status'] = 0
     response['query_compatibility'] = data
   else:
@@ -170,7 +170,7 @@ def query_risk(request):
 
   data = api.query_risk(query=query)
 
-  if data['status'] == 'success':
+  if data:
     response['status'] = 0
     response['query_risk'] = data
   else:
@@ -191,7 +191,7 @@ def similar_queries(request):
 
   data = api.similar_queries(source_platform=source_platform, query=query)
 
-  if data['status'] == 'success':
+  if data:
     response['status'] = 0
     response['similar_queries'] = data
   else:
@@ -211,7 +211,7 @@ def top_filters(request):
   api = OptimizerApi()
   data = api.top_filters(db_tables=db_tables)
 
-  if data['status'] == 'success':
+  if data:
     response['status'] = 0
     response['values'] = data['results']
   else:
@@ -230,7 +230,7 @@ def top_joins(request):
   api = OptimizerApi()
   data = api.top_joins(db_tables=db_tables)
 
-  if data['status'] == 'success':
+  if data:
     response['status'] = 0
     response['values'] = data['results']
   else:
@@ -249,7 +249,7 @@ def top_aggs(request):
   api = OptimizerApi()
   data = api.top_aggs(db_tables=db_tables)
 
-  if data['status'] == 'success':
+  if data:
     response['status'] = 0
     response['values'] = data['results']
   else:
@@ -266,7 +266,7 @@ def top_databases(request):
   api = OptimizerApi()
   data = api.top_databases()
 
-  if data['status'] == 'success':
+  if data:
     response['status'] = 0
     response['values'] = data['results']
   else:
@@ -285,7 +285,7 @@ def top_columns(request):
   api = OptimizerApi()
   data = api.top_columns(db_tables=db_tables)
 
-  if data['status'] == 'success':
+  if data:
     response['status'] = 0
     response['values'] = data
   else:

+ 199 - 235
desktop/libs/metadata/src/metadata/optimizer_client.py

@@ -22,7 +22,9 @@ import os
 import subprocess
 import uuid
 
+from subprocess import CalledProcessError
 from tempfile import NamedTemporaryFile
+from urlparse import urlparse
 
 from django.utils.translation import ugettext as _
 
@@ -30,9 +32,9 @@ from desktop.lib.exceptions_renderable import PopupException
 from desktop.lib import export_csvxls
 from desktop.lib.rest.http_client import HttpClient, RestException
 from desktop.lib.rest import resource
+from navoptapi.api_lib import ApiLib
 
 from metadata.conf import OPTIMIZER, get_optimizer_url
-from subprocess import CalledProcessError
 
 
 LOG = logging.getLogger(__name__)
@@ -41,9 +43,6 @@ LOG = logging.getLogger(__name__)
 _JSON_CONTENT_TYPE = 'application/json'
 
 
-def is_optimizer_enabled():
-  return get_optimizer_url() and OPTIMIZER.PRODUCT_NAME.get()
-
 
 class OptimizerApiException(PopupException):
   pass
@@ -51,184 +50,66 @@ class OptimizerApiException(PopupException):
 
 class OptimizerApi(object):
 
-  UPLOAD = {
-    'queries': {
-      'headers': ['SQL_ID', 'ELAPSED_TIME', 'SQL_FULLTEXT'],
-      'file_headers': """{
-    "fileLocation": "%(query_file)s",
-    "tenant": "%(tenant)s",
-    "fileName": "%(query_file_name)s",
-    "sourcePlatform": "%(source_platform)s",
-    "colDelim": ",",
-    "rowDelim": "\\n",
-    "headerFields": [
-        {
-            "count": 0,
-            "coltype": "SQL_ID",
-            "use": true,
-            "tag": "",
-            "name": "SQL_ID"
-        },
-        {
-            "count": 0,
-            "coltype": "NONE",
-            "use": true,
-            "tag": "",
-            "name": "ELAPSED_TIME"
-        },
-        {
-            "count": 0,
-            "coltype": "SQL_QUERY",
-            "use": true,
-            "tag": "",
-            "name": "SQL_FULLTEXT"
-        }
-    ]
-}"""
-    },
-    'table_stats': {
-        'headers': ['TABLE_NAME', 'NUM_ROWS'],
-        'file_headers': """{
-    "fileLocation": "%(query_file)s",
-    "tenant": "%(tenant)s",
-    "fileName": "%(query_file_name)s",
-    "sourcePlatform": "%(source_platform)s",
-    "colDelim": ",",
-    "rowDelim": "\\n",
-    "headerFields": [
-        {
-            "count": 0,
-            "coltype": "NONE",
-            "use": true,
-            "tag": "",
-            "name": "TABLE_NAME"
-        },
-        {
-            "count": 0,
-            "coltype": "NONE",
-            "use": true,
-            "tag": "",
-            "name": "NUM_ROWS"
-        }
-    ]
-}"""
-    },
-    'cols_stats': {
-        'headers': ['table_name', 'column_name', 'data_type', 'num_distinct', 'num_nulls', 'avg_col_len'], # Lower case for some reason
-        'file_headers': """{
-    "fileLocation": "%(query_file)s",
-    "tenant": "%(tenant)s",
-    "fileName": "%(query_file_name)s",
-    "sourcePlatform": "%(source_platform)s",
-    "colDelim": ",",
-    "rowDelim": "\\n",
-    "headerFields": [
-        {
-            "count": 0,
-            "coltype": "NONE",
-            "use": true,
-            "tag": "",
-            "name": "table_name"
-        },
-        {
-            "count": 0,
-            "coltype": "NONE",
-            "use": true,
-            "tag": "",
-            "name": "column_name"
-        },
-        {
-            "count": 0,
-            "coltype": "NONE",
-            "use": true,
-            "tag": "",
-            "name": "data_type"
-        },
-        {
-            "count": 0,
-            "coltype": "NONE",
-            "use": true,
-            "tag": "",
-            "name": "num_distinct"
-        },
-        {
-            "count": 0,
-            "coltype": "NONE",
-            "use": true,
-            "tag": "",
-            "name": "num_nulls"
-        },
-        {
-            "count": 0,
-            "coltype": "NONE",
-            "use": true,
-            "tag": "",
-            "name": "avg_col_len"
-        }
-    ]
-}"""
-    }
-  }
-
   def __init__(self, api_url=None, product_name=None, product_secret=None, ssl_cert_ca_verify=OPTIMIZER.SSL_CERT_CA_VERIFY.get(), product_auth_secret=None):
     self._api_url = (api_url or get_optimizer_url()).strip('/')
     self._email = OPTIMIZER.EMAIL.get()
     self._email_password = OPTIMIZER.EMAIL_PASSWORD.get()
     self._product_secret = product_secret if product_secret else OPTIMIZER.PRODUCT_SECRET.get()
-    self._product_auth_secret = product_auth_secret if product_auth_secret else OPTIMIZER.PRODUCT_AUTH_SECRET.get()
+    self._product_auth_secret = product_auth_secret if product_auth_secret else (OPTIMIZER.PRODUCT_AUTH_SECRET.get() and OPTIMIZER.PRODUCT_AUTH_SECRET.get().replace('\\n', '\n'))
     self._product_name = product_name if product_name else (OPTIMIZER.PRODUCT_NAME.get() or self.get_tenant()['tenant']) # Aka "workload"
 
-    self._client = HttpClient(self._api_url, logger=LOG)
-    self._client.set_verify(ssl_cert_ca_verify)
-
-    self._root = resource.Resource(self._client)
-    self._token = None
+#     self._client = HttpClient(self._api_url, logger=LOG)
+#     self._client.set_verify(ssl_cert_ca_verify)
+# 
+#     self._root = resource.Resource(self._client)
+#     self._token = None
 
+    self._api = ApiLib("navopt", urlparse(self._api_url).hostname, self._product_secret, self._product_auth_secret)
 
   def _authenticate(self, force=False):
     if self._token is None or force:
       self._token = self.authenticate()['token']
 
     return self._token
-
-  def _exec(self, command, args):
-    data = None
-    response = {'status': 'error'}
-
-    try:
-      cmd_args = [
-          'ccs',
-          'navopt',
-          '--endpoint-url=%s' % self._api_url,
-          command
-      ]
-      if self._product_secret:
-        cmd_args += ['--auth-config', self._product_secret]
-
-      LOG.info(' '.join(cmd_args + args))
-      data = subprocess.check_output(cmd_args + args)
-    except CalledProcessError, e:
-      if command == 'upload' and e.returncode == 1:
-        LOG.info('Upload command is successful despite return code of 1: %s' % e.output)
-        data = '\n'.join(e.output.split('\n')[3:]) # Beware removing of {"url":...}
-      else:
-        raise OptimizerApiException(e, title=_('Error while accessing Optimizer'))
-    except RestException, e:
-      raise OptimizerApiException(e, title=_('Error while accessing Optimizer'))
-
-    if data:
-      response = json.loads(data)
-      if 'status' not in response:
-        response['status'] = 'success'
-    return response
+# 
+#   def _exec(self, command, args):
+#     data = None
+#     response = {'status': 'error'}
+# 
+#     try:
+#       cmd_args = [
+#           'ccs',
+#           'navopt',
+#           '--endpoint-url=%s' % self._api_url,
+#           command
+#       ]
+#       if self._product_secret:
+#         cmd_args += ['--auth-config', self._product_secret]
+# 
+#       LOG.info(' '.join(cmd_args + args))
+#       data = subprocess.check_output(cmd_args + args)
+#     except CalledProcessError, e:
+#       if command == 'upload' and e.returncode == 1:
+#         LOG.info('Upload command is successful despite return code of 1: %s' % e.output)
+#         data = '\n'.join(e.output.split('\n')[3:]) # Beware removing of {"url":...}
+#       else:
+#         raise OptimizerApiException(e, title=_('Error while accessing Optimizer'))
+#     except RestException, e:
+#       raise OptimizerApiException(e, title=_('Error while accessing Optimizer'))
+# 
+#     if data:
+#       response = json.loads(data)
+#       if 'status' not in response:
+#         response['status'] = 'success'
+#     return response
 
 
   def get_tenant(self, email=None):
-    return self._exec('get-tenant', ['--email', email or self._email])
+    return self._api.call_api("getTenant", {"email" : email or self._email}).json()
 
 
   def create_tenant(self, group):
-    return self._exec('create-tenant', ['--user-group', group])
+    return self._api.call_api('createTenant', {'userGroup' : group}).json()
 
 
   def authenticate(self):
@@ -253,17 +134,6 @@ class OptimizerApi(object):
       raise PopupException(e, title=_('Error while accessing Optimizer'))
 
 
-  def get_status(self, token, email=None):
-    try:
-      data = {
-          'email': email if email is not None else self._email,
-          'token': token,
-      }
-      return self._root.post('/api/getStatus', data=json.dumps(data), contenttype=_JSON_CONTENT_TYPE)
-    except RestException, e:
-      raise PopupException(e, title=_('Error while accessing Optimizer'))
-
-
   def upload(self, data, data_type='queries', source_platform='generic', workload_id=None):
     data_headers = OptimizerApi.UPLOAD[data_type]['file_headers']
 
@@ -299,13 +169,13 @@ class OptimizerApi(object):
         f_queries.close()
         f_format.close()
 
-      args = [
-          '--cli-input-json', 'file://%s' % f_format.name
-      ]
+      args = {
+          'cliInputJson': 'file://%s' % f_format.name
+      }
       if workload_id:
-        args += ['--workload-id', workload_id]
+        args['workloadId'] = workload_id
 
-      return self._exec('upload', args)
+      return self._api.call_api('upload', {'tenant' : self._product_name, 'workfloadId': workload_id}).json()
 
     except RestException, e:
       raise PopupException(e, title=_('Error while accessing Optimizer'))
@@ -315,101 +185,195 @@ class OptimizerApi(object):
 
 
   def upload_status(self, workload_id):
-    return self._exec('upload-status', [
-        '--tenant', self._product_name,
-        '--workload-id', workload_id
-    ])
+    return self._api.call_api('uploadStatus', {'tenant' : self._product_name, 'workfloadId': workload_id}).json()
 
 
-  def top_tables(self, workfloadId=None, database_name='default'):        
-    return self._exec('get-top-tables', [
-        '--tenant', self._product_name,
-        '--db-name', database_name.lower()
-    ])
+  def top_tables(self, workfloadId=None, database_name='default'):    
+    return self._api.call_api('getTopTables', {'tenant' : self._product_name}).json()
 
 
   def table_details(self, database_name, table_name):
-    return self._exec('get-tables-detail', [
-        '--tenant', self._product_name,
-        '--db-name', database_name.lower(),
-        '--table-name', table_name.lower()
-    ])
+    return self._api.call_api('getTablesDetail', {'tenant' : self._product_name, 'dbName': database_name.lower(), 'tableName': table_name.lower()}).json()
 
 
   def query_compatibility(self, source_platform, target_platform, query):
-    return self._exec('get-query-compatible', [
-        '--tenant', self._product_name,
-        '--source-platform', source_platform,
-        '--target-platform', target_platform,
-        '--query', query,
-    ])
+    return self._api.call_api('getQueryCompatible', {'tenant' : self._product_name, 'query': query, 'sourcePlatform': source_platform, 'targetPlatform': target_platform}).json()
 
 
   def query_risk(self, query):
-    return self._exec('get-query-risk', [
-        '--tenant', self._product_name,
-        '--query', query
-    ])
+    return self._api.call_api('getQueryRisk', {'tenant' : self._product_name, 'query': query}).json()
 
 
   def similar_queries(self, source_platform, query):
-    return self._exec('get-similar-queries', [
-        '--tenant', self._product_name,
-        '--source-platform', source_platform,
-        '--query', query
-    ])
+    return self._api.call_api('getSimilarQueries', {'tenant' : self._product_name, 'sourcePlatform': source_platform, 'query': query}).json()
 
 
   def top_filters(self, db_tables=None):
-    args = [
-        '--tenant', self._product_name,
-    ]
+    args = {
+      'tenant' : self._product_name
+    }
     if db_tables:
-      args += ['--db-table-list']
-      args.extend([db_table.lower() for db_table in db_tables])
+      args['dbTableList'] = [db_table.lower() for db_table in db_tables]
 
-    return self._exec('get-top-filters', args)
+    return self._api.call_api('getTopFilters', args).json()
 
 
   def top_aggs(self, db_tables=None):
-    args = [
-        '--tenant', self._product_name
-    ]
+    args = {
+      'tenant' : self._product_name
+    }
     if db_tables:
-      args += ['--db-table-list']
-      args.extend([db_table.lower() for db_table in db_tables])
+      args['dbTableList'] = [db_table.lower() for db_table in db_tables]
 
-    return self._exec('get-top-aggs', args)
+    return self._api.call_api('getTopAggs', args).json()
 
 
   def top_columns(self, db_tables=None):
-    args = [
-        '--tenant', self._product_name
-    ]
+    args = {
+      'tenant' : self._product_name
+    }
     if db_tables:
-      args += ['--db-table-list']
-      args.extend([db_table.lower() for db_table in db_tables])
+      args['dbTableList'] = [db_table.lower() for db_table in db_tables]
 
-    return self._exec('get-top-columns', args)
+    return self._api.call_api('getTopColumns', args).json()
 
 
   def top_joins(self, db_tables=None):
-    args = [
-        '--tenant', self._product_name,
-    ]
+    args = {
+      'tenant' : self._product_name
+    }
     if db_tables:
-      args += ['--db-table-list']
-      args.extend([db_table.lower() for db_table in db_tables])
+      args['dbTableList'] = [db_table.lower() for db_table in db_tables]
 
-    return self._exec('get-top-joins', args)
+    return self._api.call_api('getTopJoins', args).json()
 
 
   def top_databases(self, db_tables=None):
-    args = [
-        '--tenant', self._product_name,
-    ]
+    args = {
+      'tenant' : self._product_name
+    }
+
+    return self._api.call_api('getTopDataBases', args).json()
 
-    return self._exec('get-top-data-bases', args)
+
+  UPLOAD = {
+    'queries': {
+      'headers': ['SQL_ID', 'ELAPSED_TIME', 'SQL_FULLTEXT'],
+      'file_headers': """{
+    "fileLocation": "%(query_file)s",
+    "tenant": "%(tenant)s",
+    "fileName": "%(query_file_name)s",
+    "sourcePlatform": "%(source_platform)s",
+    "colDelim": ",",
+    "rowDelim": "\\n",
+    "headerFields": [
+        {
+            "count": 0,
+            "coltype": "SQL_ID",
+            "use": true,
+            "tag": "",
+            "name": "SQL_ID"
+        },
+        {
+            "count": 0,
+            "coltype": "NONE",
+            "use": true,
+            "tag": "",
+            "name": "ELAPSED_TIME"
+        },
+        {
+            "count": 0,
+            "coltype": "SQL_QUERY",
+            "use": true,
+            "tag": "",
+            "name": "SQL_FULLTEXT"
+        }
+    ]
+}"""
+    },
+    'table_stats': {
+        'headers': ['TABLE_NAME', 'NUM_ROWS'],
+        'file_headers': """{
+    "fileLocation": "%(query_file)s",
+    "tenant": "%(tenant)s",
+    "fileName": "%(query_file_name)s",
+    "sourcePlatform": "%(source_platform)s",
+    "colDelim": ",",
+    "rowDelim": "\\n",
+    "headerFields": [
+        {
+            "count": 0,
+            "coltype": "NONE",
+            "use": true,
+            "tag": "",
+            "name": "TABLE_NAME"
+        },
+        {
+            "count": 0,
+            "coltype": "NONE",
+            "use": true,
+            "tag": "",
+            "name": "NUM_ROWS"
+        }
+    ]
+}"""
+    },
+    'cols_stats': {
+        'headers': ['table_name', 'column_name', 'data_type', 'num_distinct', 'num_nulls', 'avg_col_len'], # Lower case for some reason
+        'file_headers': """{
+    "fileLocation": "%(query_file)s",
+    "tenant": "%(tenant)s",
+    "fileName": "%(query_file_name)s",
+    "sourcePlatform": "%(source_platform)s",
+    "colDelim": ",",
+    "rowDelim": "\\n",
+    "headerFields": [
+        {
+            "count": 0,
+            "coltype": "NONE",
+            "use": true,
+            "tag": "",
+            "name": "table_name"
+        },
+        {
+            "count": 0,
+            "coltype": "NONE",
+            "use": true,
+            "tag": "",
+            "name": "column_name"
+        },
+        {
+            "count": 0,
+            "coltype": "NONE",
+            "use": true,
+            "tag": "",
+            "name": "data_type"
+        },
+        {
+            "count": 0,
+            "coltype": "NONE",
+            "use": true,
+            "tag": "",
+            "name": "num_distinct"
+        },
+        {
+            "count": 0,
+            "coltype": "NONE",
+            "use": true,
+            "tag": "",
+            "name": "num_nulls"
+        },
+        {
+            "count": 0,
+            "coltype": "NONE",
+            "use": true,
+            "tag": "",
+            "name": "avg_col_len"
+        }
+    ]
+}"""
+    }
+  }
 
 
 def OptimizerDataAdapter(data, data_type='queries'):

+ 3 - 3
desktop/libs/metadata/src/metadata/optimizer_client_tests.py

@@ -21,14 +21,14 @@ from nose.plugins.skip import SkipTest
 from nose.tools import assert_equal, assert_true
 
 from django.contrib.auth.models import User
-from django.core.urlresolvers import reverse
 
 from desktop.auth.backend import rewrite_user
 from desktop.lib.django_test_util import make_logged_in_client
 from desktop.lib.test_utils import add_to_group, grant_access
 from hadoop.pseudo_hdfs4 import is_live_cluster
 
-from metadata.optimizer_client import OptimizerApi, is_optimizer_enabled
+from metadata.optimizer_client import OptimizerApi
+from metadata.conf import has_optimizer
 
 
 LOG = logging.getLogger(__name__)
@@ -38,7 +38,7 @@ class TestOptimizerApi(object):
 
   @classmethod
   def setup_class(cls):
-    if not is_live_cluster() or not is_optimizer_enabled():
+    if not is_live_cluster() or not has_optimizer():
       raise SkipTest
 
     cls.client = make_logged_in_client(username='test', is_superuser=False)