Эх сурвалжийг харах

Adjust flink sql connector to Apache Flink SQL Gateway

Grzegorz Kołakowski 10 сар өмнө
parent
commit
5d4b059e0c

+ 114 - 0
desktop/core/src/desktop/js/sql/reference/flink/udfReference.ts

@@ -17,6 +17,8 @@
 import { UdfCategory, UdfCategoryFunctions } from 'sql/reference/types';
 import I18n from 'utils/i18n';
 
+// TODO: add more UDFs
+
 const AGGREGATE_FUNCTIONS: UdfCategoryFunctions = {
   avg: {
     name: 'avg',
@@ -102,7 +104,35 @@ const AGGREGATE_FUNCTIONS: UdfCategoryFunctions = {
     description: 'Returns the unbiased sample variance of a numeric column in the group.'
   }
 };
+
 const STRING_FUNCTIONS: UdfCategoryFunctions = {
+  char_length: {
+    name: 'char_length',
+    returnTypes: ['INTEGER'],
+    arguments: [[{ type: 'STRING' }]],
+    signature: 'char_length(STRING value)',
+    draggable: 'char_length()',
+    description:
+      "Returns the number of characters in STRING."
+  },
+  character_length: {
+    name: 'character_length',
+    returnTypes: ['INTEGER'],
+    arguments: [[{ type: 'STRING' }]],
+    signature: 'character_length(STRING value)',
+    draggable: 'character_length()',
+    description:
+      "Returns the number of characters in STRING."
+  },
+  lower: {
+    name: 'lower',
+    returnTypes: ['STRING'],
+    arguments: [[{ type: 'STRING' }]],
+    signature: 'lower(STRING value)',
+    draggable: 'lower()',
+    description:
+      "Returns string in lowercase."
+  },
   regexp_extract: {
     name: 'regexp_extract',
     returnTypes: ['STRING'],
@@ -111,8 +141,18 @@ const STRING_FUNCTIONS: UdfCategoryFunctions = {
     draggable: 'regexp_extract()',
     description:
       "Returns the string extracted using the pattern. For example, regexp_extract('foothebar', 'foo(.*?)(bar)', 2) returns 'bar.' Note that some care is necessary in using predefined character classes: using '\\s' as the second argument will match the letter s; '\\\\s' is necessary to match whitespace, etc. The 'index' parameter is the Java regex Matcher group() method index."
+  },
+  upper: {
+    name: 'upper',
+    returnTypes: ['STRING'],
+    arguments: [[{ type: 'STRING' }]],
+    signature: 'upper(STRING value)',
+    draggable: 'upper()',
+    description:
+      "Returns string in uppercase."
   }
 };
+
 const DATE_FUNCTIONS: UdfCategoryFunctions = {
   current_date: {
     name: 'current_date',
@@ -389,6 +429,7 @@ const GROUP_WINDOW_FUNCTIONS: UdfCategoryFunctions = {
       'Returns a proctime attribute that can be used in subsequent time-based operations such as interval joins and group window or over window aggregations.'
   }
 };
+
 const ANALYTIC_FUNCTIONS: UdfCategoryFunctions = {
   dense_rank: {
     name: 'dense_rank',
@@ -472,9 +513,82 @@ const ANALYTIC_FUNCTIONS: UdfCategoryFunctions = {
   }
 };
 
+const ARITHMETIC_FUNCTIONS: UdfCategoryFunctions = {
+  abs: {
+    name: 'abs',
+    returnTypes: ['NUMERIC'],
+    arguments: [[{ type: 'numeric'}]],
+    signature: 'abs(numeric)',
+    draggable: 'abs(numeric)',
+    description:
+      'Returns the absolute value of numeric.'
+  },
+  ln: {
+    name: 'ln',
+    returnTypes: ['NUMERIC'],
+    arguments: [[{ type: 'numeric'}]],
+    signature: 'ln(numeric)',
+    draggable: 'ln(numeric)',
+    description:
+      'Returns the natural logarithm (base e) of numeric.'
+  },
+  log: {
+    name: 'log',
+    returnTypes: ['NUMERIC'],
+    arguments: [
+      [{ type: 'numeric'}],
+      [{ type: 'numeric', optional: true }],
+    ],
+    signature: 'log(NUMERIC numeric1[, NUMERIC numeric2])',
+    draggable: 'log()',
+    description:
+      'When called with one argument, returns the natural logarithm of numeric2. When called with two arguments, this function returns the logarithm of numeric2 to the base numeric1. Currently, numeric2 must be greater than 0 and numeric1 must be greater than 1.'
+  },
+  log10: {
+    name: 'log10',
+    returnTypes: ['NUMERIC'],
+    arguments: [[{ type: 'numeric'}]],
+    signature: 'log10(NUMERIC numeric)',
+    draggable: 'log10()',
+    description:
+      'Returns the base 10 logarithm of numeric.'
+  },
+  log2: {
+    name: 'log2',
+    returnTypes: ['NUMERIC'],
+    arguments: [[{ type: 'numeric'}]],
+    signature: 'log2(NUMERIC numeric)',
+    draggable: 'log2()',
+    description:
+      'Returns the base 2 logarithm of numeric.'
+  },
+  power: {
+    name: 'power',
+    returnTypes: ['NUMERIC'],
+    arguments: [
+      [{ type: 'NUMERIC' }],
+      [{ type: 'NUMERIC' }],
+    ],
+    signature: 'power(NUMERIC numeric1, NUMERIC numeric2)',
+    draggable: 'power()',
+    description:
+      'Returns numeric1 raised to the power of numeric2 (numeric1^numeric2).'
+  },
+  sqrt: {
+    name: 'sqrt',
+    returnTypes: ['NUMERIC'],
+    arguments: [[{ type: 'numeric'}]],
+    signature: 'sqrt(NUMERIC numeric)',
+    draggable: 'sqrt()',
+    description:
+      'Returns the square root of numeric.'
+  },
+};
+
 export const UDF_CATEGORIES: UdfCategory[] = [
   { name: I18n('Aggregate'), isAggregate: true, functions: AGGREGATE_FUNCTIONS },
   { name: I18n('Analytic'), isAnalytic: true, functions: ANALYTIC_FUNCTIONS },
+  { name: I18n('Arithmetic'), functions: ARITHMETIC_FUNCTIONS },
   { name: I18n('Date'), functions: DATE_FUNCTIONS },
   { name: I18n('Group Window Functions'), functions: GROUP_WINDOW_FUNCTIONS },
   { name: I18n('String'), functions: STRING_FUNCTIONS }

+ 315 - 150
desktop/libs/notebook/src/notebook/connectors/flink_sql.py

@@ -17,25 +17,24 @@
 
 from __future__ import absolute_import
 
-import sys
 import json
 import logging
 import posixpath
+import re
+import time
 
-from django.utils.translation import gettext as _
-
+from desktop.auth.backend import rewrite_user
 from desktop.lib.i18n import force_unicode
 from desktop.lib.rest.http_client import HttpClient, RestException
 from desktop.lib.rest.resource import Resource
 from notebook.connectors.base import Api, QueryError
 
 LOG = logging.getLogger()
+
 _JSON_CONTENT_TYPE = 'application/json'
-_API_VERSION = 'v1'
-SESSIONS = {}
+_API_VERSION = 'v3'
 SESSION_KEY = '%(username)s-%(connector_name)s'
-
-n = 0
+OPERATION_TOKEN = '%(username)s-%(connector_name)s' + '-operation-token'
 
 
 def query_error_handler(func):
@@ -48,13 +47,24 @@ def query_error_handler(func):
       except Exception:
         message = e.message
       message = force_unicode(message)
-      raise QueryError(message)
+      raise QueryError(parse_error(message))
     except Exception as e:
       message = force_unicode(str(e))
       raise QueryError(message)
+
   return decorator
 
 
+def parse_error(error):
+  lines = re.split(r'\\n', error)
+  caused_by = [line for line in lines if 'Caused by:' in line]
+
+  if len(caused_by) == 0:
+    return error
+  elif len(caused_by) >= 1:
+    return caused_by[-1]
+
+
 class FlinkSqlApi(Api):
 
   def __init__(self, user, interpreter=None):
@@ -71,76 +81,123 @@ class FlinkSqlApi(Api):
 
     response = {
       'type': lang,
-      'id': session['session_id']
+      'id': session['sessionHandle']
     }
-
     return response
 
-  def _get_session(self):
-    session_key = SESSION_KEY % {
-      'username': self.user.username,
+  def _get_session_key(self):
+    return SESSION_KEY % {
+      'username': self.user.username if hasattr(self.user, 'username') else self.user,
+      'connector_name': self.interpreter['name']
+    }
+
+  def _get_session_info_from_user(self):
+    self.user = rewrite_user(self.user)
+    session_key = self._get_session_key()
+
+    if self.user.profile.data.get(session_key):
+      return self.user.profile.data[session_key]
+
+  def _set_session_info_to_user(self, session_info):
+    self.user = rewrite_user(self.user)
+    session_key = self._get_session_key()
+
+    self.user.profile.update_data({session_key: session_info})
+    self.user.profile.save()
+
+  def _remove_session_info_from_user(self):
+    self.user = rewrite_user(self.user)
+    session_key = self._get_session_key()
+    operation_token_key = self._get_operation_token_key()
+
+    if self.user.profile.data.get(session_key):
+      json_data = self.user.profile.data
+      json_data.pop(session_key)
+      json_data.pop(operation_token_key)
+      self.user.profile.json_data = json.dumps(json_data)
+
+    self.user.profile.save()
+
+  def _get_operation_token_key(self):
+    return OPERATION_TOKEN % {
+      'username': self.user.username if hasattr(self.user, 'username') else self.user,
       'connector_name': self.interpreter['name']
     }
 
-    if session_key not in SESSIONS:
-      SESSIONS[session_key] = self.db.create_session()
+  def _get_operation_token_info_from_user(self, operation_handle):
+    self.user = rewrite_user(self.user)
+    operation_token_key = self._get_operation_token_key()
+
+    if self.user.profile.data.get(operation_token_key):
+      return self.user.profile.data[operation_token_key][operation_handle]
+
+  def _set_operation_token_info_to_user(self, operation_handle, token):
+    self.user = rewrite_user(self.user)
+    operation_token_key = self._get_operation_token_key()
+
+    json_data = self.user.profile.data
+
+    if self.user.profile.data.get(operation_token_key) is None:
+      json_data[operation_token_key] = {}
+
+    json_data[operation_token_key][operation_handle] = token
+    self.user.profile.update_data(json_data)
+
+    self.user.profile.save()
+
+  def _remove_operation_token_info_from_user(self, operation_handle):
+    self.user = rewrite_user(self.user)
+    operation_token_key = self._get_operation_token_key()
+
+    if self.user.profile.data.get(operation_token_key):
+      json_data = self.user.profile.data
+      json_data[operation_token_key].pop(operation_handle)
+      self.user.profile.json_data = json.dumps(json_data)
+
+    self.user.profile.save()
+
+  def _get_session(self):
+    session = self._get_session_info_from_user()
+
+    if not session:
+      session = self.db.create_session()
 
     try:
-      self.db.session_heartbeat(session_id=SESSIONS[session_key]['session_id'])
+      self.db.session_heartbeat(session_handle=session['sessionHandle'])
     except Exception as e:
-      if 'Session: %(id)s does not exist' % SESSIONS[session_key] in str(e):
-        LOG.warning('Session: %(id)s does not exist, opening a new one' % SESSIONS[session_key])
-        SESSIONS[session_key] = self.db.create_session()
+      if "Session '%(sessionHandle)s' does not exist" % session in str(e):
+        LOG.warning('Session %(sessionHandle)s does not exist, opening a new one' % session)
+        session = self.db.create_session()
       else:
         raise e
 
-    SESSIONS[session_key]['id'] = SESSIONS[session_key]['session_id']
+    session['id'] = session['sessionHandle']
+    self._set_session_info_to_user(session)
 
-    return SESSIONS[session_key]
+    return session
 
   @query_error_handler
   def execute(self, notebook, snippet):
-    global n
-    n = 0
     session = self._get_session()
-    session_id = session['id']
-    job_id = None
+    session_handle = session['id']
 
     statement = snippet['statement'].strip().rstrip(';')
 
-    resp = self.db.execute_statement(session_id=session_id, statement=statement)
-
-    if resp['statement_types'][0] == 'SELECT':
-      job_id = resp['results'][0]['data'][0][0]
-      data, description = [], []
-      # TODO: change_flags
-    else:
-      data, description = resp['results'][0]['data'], resp['results'][0]['columns']
-
-    has_result_set = data is not None
+    # TODO: Operations such as add, alter, create, drop, use, load, unload can be executed using simple path via
+    # /sessions/:session_handle/configure-session
+    operation_handle = self.db.execute_statement(session_handle=session_handle, statement=statement)
+    self._set_operation_token_info_to_user(operation_handle['operationHandle'], 0)
 
     return {
-      'sync': job_id is None,
-      'has_result_set': has_result_set,
-      'guid': job_id,
-      'result': {
-        'has_more': job_id is not None,
-        'data': data if job_id is None else [],
-        'meta': [{
-            'name': col['name'],
-            'type': col['type'],
-            'comment': ''
-          }
-          for col in description
-        ]
-        if has_result_set else [],
-        'type': 'table'
-      }
+      'has_result_set': True,
+      'guid': operation_handle['operationHandle'],
     }
 
+  def _is_sync_statement(self, statement):
+    return bool(re.match(r'^(add|alter|create|drop|load|unload|use)\b', statement, re.IGNORECASE))
+
   @query_error_handler
   def check_status(self, notebook, snippet):
-    global n
     response = {}
     session = self._get_session()
 
@@ -155,55 +212,79 @@ class FlinkSqlApi(Api):
           try:
             resp = self.db.fetch_status(session['id'], statement_id)
             if resp.get('status') == 'RUNNING':
-              status = 'streaming'
-              response['result'] = self.fetch_result(notebook, snippet, n, False)
+              status = 'running'
             elif resp.get('status') == 'FINISHED':
               status = 'available'
-            elif resp.get('status') == 'FAILED':
-              status = 'failed'
             elif resp.get('status') == 'CANCELED':
               status = 'expired'
+            elif resp.get('status') == 'CLOSED':
+              status = 'closed'
+            elif resp.get('status') == 'ERROR':
+              status = 'error'
+              self._remove_operation_token_info_from_user(statement_id)
+              result_resp = self.db.fetch_results(session['id'], statement_id, 0)
+              raise QueryError(parse_error(result_resp['errors'][-1]))
+
           except Exception as e:
-            if '%s does not exist in current session' % statement_id in str(e):
-              LOG.warning('Job: %s does not exist' % statement_id)
+            if 'Can not find the submitted operation in the OperationManager with the %s' % statement_id in str(e):
+              LOG.warning('Operation Handle: %s does not exist' % statement_id)
             else:
               raise e
 
     response['status'] = status
-
     return response
 
   @query_error_handler
   def fetch_result(self, notebook, snippet, rows, start_over):
-    global n
     session = self._get_session()
     statement_id = snippet['result']['handle']['guid']
-    token = n  # rows
 
-    resp = self.db.fetch_results(session['id'], job_id=statement_id, token=token)
+    token = self._get_operation_token_info_from_user(statement_id)
+
+    # Is race condition between cancel and fetch possible?
+    resp = self.db.fetch_results(session['id'], operation_handle=statement_id, token=token)
+
+    if resp['resultType'] == 'EOS':
+      next_result = None
+    else:
+      next_result = resp.get('nextResultUri') if resp else None
 
-    next_result = resp.get('next_result_uri')
     if next_result:
-      n = int(next_result.rsplit('/', 1)[-1])
+      # nextResultUri format:
+      #   /sessions/:session_handle/operations/:operation_handle/result/:token?rowFormat=JSON
+      # Step 1: Drop URL query part ("?rowFormat=JSON")
+      url_path = next_result.rsplit('?', 1)[0]
+      # Step 2: Extract "token" from URL path
+      n = int(url_path.rsplit('/', 1)[-1])
+      self._set_operation_token_info_to_user(statement_id, n)
+
+    data = [db['fields'] for db in resp['results']['data'] if resp and resp['results'] and resp['results']['data']]
+
+    if not bool(next_result):
+      # This will not be required if close_statement one will start working
+      self._remove_operation_token_info_from_user(statement_id)
 
     return {
-        'has_more': bool(next_result),
-        'data': resp and resp['results'][0]['data'] or [],  # No escaping...
-        'meta': [{
-            'name': column['name'],
-            'type': column['type'],
-            'comment': ''
-          }
-          for column in resp['results'][0]['columns'] if resp
-        ],
-        'type': 'table'
+      'has_more': bool(next_result),
+      'data': data,  # No escaping...
+      'meta': [{
+        'name': column['name'],
+        'type': column['logicalType']['type'],
+        'comment': column['comment']
+      }
+        for column in resp['results']['columns'] if resp
+      ],
+      'type': 'table'
     }
 
   @query_error_handler
   def autocomplete(self, snippet, database=None, table=None, column=None, nested=None, operation=None):
+    LOG.debug(f"Autocomplete: '{database}'; '{table}'; '{column}'; '{nested}', '{operation}'.")
     response = {}
 
-    if database is None:
+    if operation == 'functions':
+      response['functions'] = self._show_functions(database)
+    elif database is None:
       response['databases'] = self._show_databases()
     elif table is None:
       response['tables_meta'] = self._show_tables(database)
@@ -211,10 +292,10 @@ class FlinkSqlApi(Api):
       columns = self._get_columns(database, table)
       response['columns'] = [col['name'] for col in columns]
       response['extended_columns'] = [{
-          'comment': col.get('comment'),
-          'name': col.get('name'),
-          'type': col['type']
-        }
+        'comment': col.get('comment'),
+        'name': col.get('name'),
+        'type': col['type']
+      }
         for col in columns
       ]
 
@@ -224,27 +305,74 @@ class FlinkSqlApi(Api):
   def get_sample_data(self, snippet, database=None, table=None, column=None, is_async=False, operation=None):
     if operation == 'hello':
       snippet['statement'] = "SELECT 'Hello World!'"
+    else:
+      snippet['statement'] = "SELECT * FROM `%(database)s`.`%(table)s` LIMIT 25;" % {
+        'database': database,
+        'table': table
+      }
 
-    notebook = {}
-    sample = self.execute(notebook, snippet)
+    session = self._get_session()
+    session_id = session['id']
+    operation_handle = self.db.execute_statement(session_handle=session_id, statement=snippet['statement'])
+    statement_id = operation_handle['operationHandle']
 
-    response = {
+    resp = self.db.fetch_results(session_id, statement_id, 0)
+    while resp['resultType'] == 'NOT_READY':
+      time.sleep(0.1)
+      resp = self.db.fetch_results(session_id, statement_id, 0)
+
+    sample = [db['fields'] for db in resp['results']['data'] if resp and resp['results'] and resp['results']['data']]
+    n = 0
+
+    while resp['resultType'] != 'EOS':
+      resp = self.db.fetch_results(session_id, statement_id, n)
+      if resp['resultType'] == 'PAYLOAD':
+        n = n + 1
+      sample += [db['fields'] for db in resp['results']['data'] if resp and resp['results'] and resp['results']['data']]
+      time.sleep(1)
+      if len(sample) > 0:
+        break
+
+    return {
       'status': 0,
-      'result': {}
+      'result': {
+        'handle': {
+          'guid': statement_id
+        }
+      },
+      'rows': sample,
+      'full_headers': [
+        {
+          'name': column['name'],
+          'type': column['logicalType']['type'],
+          'comment': column['comment']
+        }
+        for column in resp['results']['columns'] if resp
+      ]
     }
 
-    response['rows'] = sample['result']['data']
-    response['full_headers'] = sample['result']['meta']
+  @query_error_handler
+  def cancel(self, notebook, snippet):
+    session = self._get_session()
+    operation_handle = snippet['result']['handle']['guid']
 
-    return response
+    try:
+      self.db.close_statement(session['id'], operation_handle)
+    except Exception as e:
+      message = force_unicode(str(e)).lower()
+      LOG.debug(message)
 
-  def cancel(self, notebook, snippet):
+    return {'status': 0}
+
+  @query_error_handler
+  def close_statement(self, notebook, snippet):
     session = self._get_session()
     statement_id = snippet['result']['handle']['guid']
 
     try:
       if session and statement_id:
-        self.db.close_statement(session_id=session['id'], job_id=statement_id)
+        self.db.close_statement(session_handle=session['id'], operation_handle=statement_id)
+        # self._remove_operation_token_info_from_user(statement_id)     ## Needs to check why Hue db not getting updated
       else:
         return {'status': -1}  # missing operation ids
     except Exception as e:
@@ -256,54 +384,87 @@ class FlinkSqlApi(Api):
     return {'status': 0}
 
   def close_session(self, session):
-    # Avoid closing session on page refresh or editor close for now
-    pass
-    # session = self._get_session()
-    # self.db.close_session(session['id'])
+    if self._get_session_info_from_user():
+      self._remove_session_info_from_user()
+      self.db.close_session(session['id'])
+
+    return {
+      'status': 0,
+      'session': session['id']
+    }
+
+  def _check_status_and_fetch_result(self, session_handle, operation_handle):
+    resp = self.db.fetch_results(session_handle, operation_handle, 0)
+
+    while resp['resultType'] == 'NOT_READY':
+      resp = self.db.fetch_results(session_handle, operation_handle, 0)
+
+    data = [i['fields'] for i in resp['results']['data'] if resp and resp['results'] and resp['results']['data']]
+    return data
 
   def _show_databases(self):
     session = self._get_session()
-    session_id = session['id']
+    session_handle = session['id']
 
-    resp = self.db.execute_statement(session_id=session_id, statement='SHOW DATABASES')
+    operation_handle = self.db.execute_statement(session_handle=session_handle, statement='SHOW DATABASES')
+    db_list = self._check_status_and_fetch_result(session_handle, operation_handle['operationHandle'])
 
-    return [db[0] for db in resp['results'][0]['data']]
+    return [db[0] for db in db_list]
 
   def _show_tables(self, database):
     session = self._get_session()
-    session_id = session['id']
+    session_handle = session['id']
 
-    resp = self.db.execute_statement(session_id=session_id, statement='USE %(database)s' % {'database': database})
-    resp = self.db.execute_statement(session_id=session_id, statement='SHOW TABLES')
+    operation_handle = self.db.execute_statement(session_handle=session_handle,
+                                                 statement='SHOW TABLES IN `%(database)s`' % {'database': database})
+    table_list = self._check_status_and_fetch_result(session_handle, operation_handle['operationHandle'])
 
-    return [table[0] for table in resp['results'][0]['data']]
+    return [{
+      'name': table[0],
+      'type': 'Table',
+      'comment': '',
+    }
+      for table in table_list
+    ]
 
   def _get_columns(self, database, table):
     session = self._get_session()
-    session_id = session['id']
+    session_handle = session['id']
 
-    resp = self.db.execute_statement(session_id=session_id, statement='USE %(database)s' % {'database': database})
-    resp = self.db.execute_statement(session_id=session_id, statement='DESCRIBE %(table)s' % {'table': table})
-    columns = resp['results'][0]['data']
+    operation_handle = self.db.execute_statement(
+      session_handle=session_handle,
+      statement='DESCRIBE `%(database)s`.`%(table)s`' % {'database': database, 'table': table})
+    column_list = self._check_status_and_fetch_result(session_handle, operation_handle['operationHandle'])
 
     return [{
-        'name': col[0],
-        'type': col[1],  # Types to unify
-        'comment': '',
-      }
-      for col in columns
+      'name': col[0],
+      'type': col[1],  # Types to unify
+      'comment': '',
+    }
+      for col in column_list
     ]
 
+  def _show_functions(self, database):
+    session = self._get_session()
+    session_handle = session['id']
+
+    operation_handle = self.db.execute_statement(
+      session_handle=session_handle,
+      statement='SHOW FUNCTIONS IN `%(database)s`' % {'database': database})
+    function_list = self._check_status_and_fetch_result(session_handle, operation_handle['operationHandle'])
+
+    return [{'name': function[0]} for function in function_list]
 
-class FlinkSqlClient():
-  '''
-  Implements https://github.com/ververica/flink-sql-gateway
+
+class FlinkSqlClient:
+  """
+  Implements https://nightlies.apache.org/flink/flink-docs-master/docs/dev/table/sql-gateway/rest/.
   Could be a pip module or sqlalchemy dialect in the future.
-  '''
+  """
 
   def __init__(self, user, api_url):
     self.user = user
-    self._url = posixpath.join(api_url + '/' + _API_VERSION + '/')
+    self._url = posixpath.join(api_url.rstrip('/') + '/' + _API_VERSION + '/')
     self._client = HttpClient(self._url, logger=LOG)
     self._root = Resource(self._client)
 
@@ -315,62 +476,66 @@ class FlinkSqlClient():
 
   def create_session(self, **properties):
     data = {
-        "session_name": "test",  # optional
-        "planner": "blink",  # required, "old"/"blink"
-        "execution_type": "streaming",  # required, "batch"/"streaming"
-        "properties": {  # optional
-            "key": "value"
-        }
+      "sessionName": self.user.username + "-flink-sql",
     }
     data.update(properties)
-
     return self._root.post('sessions', data=json.dumps(data), contenttype=_JSON_CONTENT_TYPE)
 
-  def session_heartbeat(self, session_id):
-    return self._root.post('sessions/%(session_id)s/heartbeat' % {'session_id': session_id})
+  def close_session(self, session_handle):
+    return self._root.delete('sessions/%(session_handle)s' % {'session_handle': session_handle})
 
-  def execute_statement(self, session_id, statement):
+  def get_session_conf(self, session_handle):
+    return self._root.get('sessions/%(session_handle)s' % {'session_handle': session_handle})
+
+  def session_heartbeat(self, session_handle):
+    return self._root.post('sessions/%(session_handle)s/heartbeat' % {'session_handle': session_handle})
+
+  def configure_session(self, session_handle, statement):
     data = {
-        "statement": statement,  # required
-        "execution_timeout": ""  # execution time limit in milliseconds, optional, but required for stream SELECT ?
+      "statement": statement,
     }
+    json_data = json.dumps(data)
 
-    return self._root.post(
-        'sessions/%(session_id)s/statements' % {
-        'session_id': session_id
-      },
-      data=json.dumps(data),
-      contenttype=_JSON_CONTENT_TYPE
-    )
+    path = 'sessions/%(session_handle)s/configure-session' % {'session_handle': session_handle}
+    self._root.post(path, data=json_data, contenttype=_JSON_CONTENT_TYPE)
 
-  def fetch_status(self, session_id, job_id):
+  def execute_statement(self, session_handle, statement):
+    data = {
+      "statement": statement,
+    }
+    json_data = json.dumps(data)
+
+    path = 'sessions/%(session_handle)s/statements' % {'session_handle': session_handle}
+    return self._root.post(path, data=json_data, contenttype=_JSON_CONTENT_TYPE)
+
+  def fetch_status(self, session_handle, operation_handle):
     return self._root.get(
-      'sessions/%(session_id)s/jobs/%(job_id)s/status' % {
-        'session_id': session_id,
-        'job_id': job_id
+      'sessions/%(session_handle)s/operations/%(operation_handle)s/status' % {
+        'session_handle': session_handle,
+        'operation_handle': operation_handle,
       }
     )
 
-  def fetch_results(self, session_id, job_id, token=0):
+  def fetch_results(self, session_handle, operation_handle, token=0):
     return self._root.get(
-      'sessions/%(session_id)s/jobs/%(job_id)s/result/%(token)s' % {
-        'session_id': session_id,
-        'job_id': job_id,
+      'sessions/%(session_handle)s/operations/%(operation_handle)s/result/%(token)s' % {
+        'session_handle': session_handle,
+        'operation_handle': operation_handle,
         'token': token
-      }
-    )
+      })
 
-  def close_statement(self, session_id, job_id):
+  def close_statement(self, session_handle, operation_handle):
     return self._root.delete(
-      'sessions/%(session_id)s/jobs/%(job_id)s' % {
-        'session_id': session_id,
-        'job_id': job_id,
+      'sessions/%(session_handle)s/operations/%(operation_handle)s/close' % {
+        'session_handle': session_handle,
+        'operation_handle': operation_handle,
       }
     )
 
-  def close_session(self, session_id):
-    return self._root.delete(
-      'sessions/%(session_id)s' % {
-        'session_id': session_id,
+  def cancel(self, session_handle, operation_handle):
+    return self._root.post(
+      'sessions/%(session_handle)s/operations/%(operation_handle)s/cancel' % {
+        'session_handle': session_handle,
+        'operation_handle': operation_handle
       }
     )

+ 1 - 1
desktop/libs/notebook/src/notebook/templates/editor_components.mako

@@ -1727,7 +1727,7 @@ else:
       <div class="pull-left" data-bind="text: (result.statement_id() + 1)"></div><div class="pull-left">/</div><div class="pull-left" data-bind="text: result.statements_count()"></div>
     </div>
     <!-- ko if: !isCanceling() -->
-    <a class="snippet-side-btn red" data-bind="click: cancel, visible: status() == 'running' || status() == 'starting'" title="${ _('Cancel operation') }">
+    <a class="snippet-side-btn red" data-bind="click: cancel, visible: status() == 'running' || status() == 'starting' || status() == 'available'" title="${ _('Cancel operation') }">
       <i class="fa fa-fw fa-stop snippet-side-single"></i>
     </a>
     <!-- /ko -->

+ 2 - 0
tools/docker/hue/Dockerfile

@@ -6,6 +6,8 @@ LABEL description="Hue SQL Assistant - gethue.com"
 ENV DEBIAN_FRONTEND=noninteractive
 
 RUN apt-get update -y && apt-get install -y \
+  pkg-config \
+  build-essential \
   python3-pip \
   libkrb5-dev  \
   libsasl2-modules-gssapi-mit \