Pārlūkot izejas kodu

HUE-7738 [editor] Cache the UDF categories instead of the API response to support individual describe

This also moves the reminding UDF ref js to ts and takes care of some bugs found in the process.
Johan Ahlen 5 gadi atpakaļ
vecāks
revīzija
c9a2789a51

+ 40 - 0
desktop/core/src/desktop/js/sql/reference/apiCache.ts

@@ -0,0 +1,40 @@
+// Licensed to Cloudera, Inc. under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  Cloudera, Inc. licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+import localForage from 'localforage';
+import { UdfCategory } from 'sql/reference/types';
+import { Connector } from 'types';
+
+const GLOBAL_UDF_CACHE_KEY = 'HUE_GLOBAL_UDF_KEY';
+const VERSION = '0';
+
+const getStore = (connector: Connector) =>
+  localForage.createInstance({
+    name: `HueUdfCatalog_${VERSION}_${connector.id}`
+  });
+
+export const clearUdfCache = async (connector: Connector) => await getStore(connector).clear();
+
+export const getCachedUdfCategories = async (
+  connector: Connector,
+  database: string | undefined
+): Promise<UdfCategory[]> => await getStore(connector).getItem(database || GLOBAL_UDF_CACHE_KEY);
+
+export const setCachedUdfCategories = async (
+  connector: Connector,
+  database: string | undefined,
+  categories: UdfCategory[]
+) => await getStore(connector).setItem(database || GLOBAL_UDF_CACHE_KEY, categories);

+ 1 - 1
desktop/core/src/desktop/js/sql/reference/apiUtils.test.ts

@@ -14,7 +14,7 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-import { adaptApiFunctions, ApiUdf, extractArgumentTypes, mergeArgumentTypes } from './apiUtils';
+import { adaptApiFunctions, ApiUdf, extractArgumentTypes, mergeArgumentTypes} from './apiUtils';
 
 describe('apiUtils.js', () => {
   it('should return the default signature when not defined', () => {

+ 8 - 10
desktop/core/src/desktop/js/sql/reference/apiUtils.ts

@@ -16,7 +16,8 @@
 
 import { simplePostAsync } from 'api/apiUtils';
 import { AUTOCOMPLETE_API_PREFIX } from 'api/urls';
-import { Argument, Connector, UdfDetails } from './sqlReferenceRepository';
+import { UdfArgument, UdfDetails } from 'sql/reference/types';
+import { Connector } from 'types';
 import I18n from 'utils/i18n';
 
 export interface ApiUdf {
@@ -52,7 +53,7 @@ const adaptApiUdf = (apiUdf: ApiUdf): UdfDetails => {
 const extractReturnTypes = (apiUdf: ApiUdf): string[] =>
   apiUdf.return_type ? [stripPrecision(apiUdf.return_type)] : DEFAULT_RETURN_TYPES;
 
-export const extractArgumentTypes = (apiUdf: ApiUdf): Argument[][] => {
+export const extractArgumentTypes = (apiUdf: ApiUdf): UdfArgument[][] => {
   if (apiUdf.signature) {
     const cleanSignature = stripPrecision(apiUdf.signature);
     if (cleanSignature === '()') {
@@ -63,7 +64,7 @@ export const extractArgumentTypes = (apiUdf: ApiUdf): Argument[][] => {
       return match.map(argString => {
         const typeMatch = argString.match(TYPE_REGEX);
         if (typeMatch && typeMatch.groups) {
-          const arg: Argument = { type: typeMatch.groups.type };
+          const arg: UdfArgument = { type: typeMatch.groups.type };
           if (typeMatch.groups.multiple) {
             arg.multiple = true;
           }
@@ -77,7 +78,7 @@ export const extractArgumentTypes = (apiUdf: ApiUdf): Argument[][] => {
   return DEFAULT_ARGUMENTS;
 };
 
-export const mergeArgumentTypes = (target: Argument[][], additional: Argument[][]) => {
+export const mergeArgumentTypes = (target: UdfArgument[][], additional: UdfArgument[][]) => {
   for (let i = 0; i < target.length; i++) {
     if (i >= additional.length) {
       break;
@@ -123,7 +124,7 @@ export const fetchUdfs = async (options: {
   connector: Connector;
   database?: string;
   silenceErrors: boolean;
-}): Promise<ApiUdf[]> => {
+}): Promise<UdfDetails[]> => {
   let url = AUTOCOMPLETE_API_PREFIX;
   if (options.database) {
     url += '/' + options.database;
@@ -139,12 +140,9 @@ export const fetchUdfs = async (options: {
 
   try {
     const response = await simplePostAsync(url, data, options);
-
     if (response && response.functions) {
       return adaptApiFunctions(response.functions);
     }
-    return (response && response.functions) || [];
-  } catch (err) {
-    return [];
-  }
+  } catch (err) {}
+  return [];
 };

+ 3 - 1
desktop/core/src/desktop/js/sql/reference/impala/typeConversion.js → desktop/core/src/desktop/js/sql/reference/generic/typeConversion.ts

@@ -14,7 +14,9 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-export const TYPE_CONVERSION = {
+import { TypeConversion } from 'sql/reference/types';
+
+export const TYPE_CONVERSION: TypeConversion = {
   BOOLEAN: {
     BOOLEAN: true,
     TIMESTAMP: false,

+ 7 - 2
desktop/core/src/desktop/js/sql/reference/generic/udfReference.js → desktop/core/src/desktop/js/sql/reference/generic/udfReference.ts

@@ -14,10 +14,12 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
+import { UdfCategory, UdfCategoryFunctions } from 'sql/reference/types';
 import I18n from 'utils/i18n';
 
-export const AGGREGATE_FUNCTIONS = {
+export const AGGREGATE_FUNCTIONS: UdfCategoryFunctions = {
   count: {
+    name: 'count',
     returnTypes: ['BIGINT'],
     arguments: [[{ type: 'T' }]],
     signature: 'count(col)',
@@ -26,6 +28,7 @@ export const AGGREGATE_FUNCTIONS = {
       'count(*) - Returns the total number of retrieved rows, including rows containing NULL values. count(expr) - Returns the number of rows for which the supplied expression is non-NULL.'
   },
   sum: {
+    name: 'sum',
     returnTypes: ['DOUBLE'],
     arguments: [[{ type: 'T' }]],
     signature: 'sum(col)',
@@ -34,6 +37,7 @@ export const AGGREGATE_FUNCTIONS = {
       'Returns the sum of the elements in the group or the sum of the distinct values of the column in the group.'
   },
   max: {
+    name: 'max',
     returnTypes: ['DOUBLE'],
     arguments: [[{ type: 'T' }]],
     signature: 'max(col)',
@@ -41,6 +45,7 @@ export const AGGREGATE_FUNCTIONS = {
     description: 'Returns the maximum value of the column in the group.'
   },
   min: {
+    name: 'min',
     returnTypes: ['DOUBLE'],
     arguments: [[{ type: 'T' }]],
     signature: 'min(col)',
@@ -49,6 +54,6 @@ export const AGGREGATE_FUNCTIONS = {
   }
 };
 
-export const UDF_CATEGORIES = [
+export const UDF_CATEGORIES: UdfCategory[] = [
   { name: I18n('Aggregate'), isAggregate: true, functions: AGGREGATE_FUNCTIONS }
 ];

+ 3 - 1
desktop/core/src/desktop/js/sql/reference/hive/typeConversion.js → desktop/core/src/desktop/js/sql/reference/hive/typeConversion.ts

@@ -14,7 +14,9 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-export const TYPE_CONVERSION = {
+import { TypeConversion } from 'sql/reference/types';
+
+export const TYPE_CONVERSION: TypeConversion = {
   BOOLEAN: {
     BOOLEAN: true,
     TIMESTAMP: false,

Failā izmaiņas netiks attēlotas, jo tās ir par lielu
+ 131 - 8
desktop/core/src/desktop/js/sql/reference/hive/udfReference.ts


+ 3 - 1
desktop/core/src/desktop/js/sql/reference/impala/setReference.js → desktop/core/src/desktop/js/sql/reference/impala/setReference.ts

@@ -14,7 +14,9 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-export const SET_OPTIONS = {
+import { SetOptions } from 'sql/reference/types';
+
+export const SET_OPTIONS: SetOptions = {
   ALLOW_ERASURE_CODED_FILES: {
     description:
       'Use the ALLOW_ERASURE_CODED_FILES query option to enable or disable the support of erasure coded files in Impala. Until Impala is fully tested and certified with erasure coded files, this query option is set to FALSE by default.',

+ 3 - 1
desktop/core/src/desktop/js/sql/reference/generic/typeConversion.js → desktop/core/src/desktop/js/sql/reference/impala/typeConversion.ts

@@ -14,7 +14,9 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-export const TYPE_CONVERSION = {
+import { TypeConversion } from 'sql/reference/types';
+
+export const TYPE_CONVERSION: TypeConversion = {
   BOOLEAN: {
     BOOLEAN: true,
     TIMESTAMP: false,

Failā izmaiņas netiks attēlotas, jo tās ir par lielu
+ 129 - 4
desktop/core/src/desktop/js/sql/reference/impala/udfReference.ts


+ 0 - 175
desktop/core/src/desktop/js/sql/reference/pig/udfReference.js

@@ -1,175 +0,0 @@
-// Licensed to Cloudera, Inc. under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  Cloudera, Inc. licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-import I18n from 'utils/i18n';
-
-const EVAL_FUNCTIONS = {
-  avg: { signature: 'AVG(%VAR%)', draggable: 'AVG()' },
-  concat: { signature: 'CONCAT(%VAR1%, %VAR2%)', draggable: 'CONCAT()' },
-  count: { signature: 'COUNT(%VAR%)', draggable: 'COUNT()' },
-  count_start: { signature: 'COUNT_START(%VAR%)', draggable: 'COUNT_START()' },
-  is_empty: { signature: 'IsEmpty(%VAR%)', draggable: 'IsEmpty()' },
-  diff: { signature: 'DIFF(%VAR1%, %VAR2%)', draggable: 'DIFF()' },
-  max: { signature: 'MAX(%VAR%)', draggable: 'MAX()' },
-  min: { signature: 'MIN(%VAR%)', draggable: 'MIN()' },
-  size: { signature: 'SIZE(%VAR%)', draggable: 'SIZE()' },
-  sum: { signature: 'SUM(%VAR%)', draggable: 'SUM()' },
-  tokenize: { signature: 'TOKENIZE(%VAR%, %DELIM%)', draggable: 'TOKENIZE()' }
-};
-
-const RELATIONAL_OPERATORS = {
-  cogroup: { signature: 'COGROUP %VAR% BY %VAR%', draggable: 'COGROUP %VAR% BY %VAR%' },
-  cross: { signature: 'CROSS %VAR1%, %VAR2%;', draggable: 'CROSS %VAR1%, %VAR2%;' },
-  distinct: { signature: 'DISTINCT %VAR%;', draggable: 'DISTINCT %VAR%;' },
-  filter: { signature: 'FILTER %VAR% BY %COND%', draggable: 'FILTER %VAR% BY %COND%' },
-  flatten: { signature: 'FLATTEN(%VAR%)', draggable: 'FLATTEN()' },
-  foreach_generate: {
-    signature: 'FOREACH %DATA% GENERATE %NEW_DATA%;',
-    draggable: 'FOREACH %DATA% GENERATE %NEW_DATA%;'
-  },
-  foreach: {
-    signature: 'FOREACH %DATA% {%NESTED_BLOCK%};',
-    draggable: 'FOREACH %DATA% {%NESTED_BLOCK%};'
-  },
-  group_by: { signature: 'GROUP %VAR% BY %VAR%', draggable: 'GROUP %VAR% BY %VAR%' },
-  group_all: { signature: 'GROUP %VAR% ALL', draggable: 'GROUP %VAR% ALL' },
-  join: { signature: 'JOIN %VAR% BY ', draggable: 'JOIN %VAR% BY ' },
-  limit: { signature: 'LIMIT %VAR% %N%', draggable: 'LIMIT %VAR% %N%' },
-  order: { signature: 'ORDER %VAR% BY %FIELD%', draggable: 'ORDER %VAR% BY %FIELD%' },
-  sample: { signature: 'SAMPLE %VAR% %SIZE%', draggable: 'SAMPLE %VAR% %SIZE%' },
-  split: {
-    signature: 'SPLIT %VAR1% INTO %VAR2% IF %EXPRESSIONS%',
-    draggable: 'SPLIT %VAR1% INTO %VAR2% IF %EXPRESSIONS%'
-  },
-  union: { signature: 'UNION %VAR1%, %VAR2%', draggable: 'UNION %VAR1%, %VAR2%' }
-};
-
-const INPUT_OUTPUT = {
-  load: { signature: "LOAD '%FILE%';", draggable: "LOAD '%FILE%';" },
-  dump: { signature: 'DUMP %VAR%;', draggable: 'DUMP %VAR%;' },
-  store: { signature: 'STORE %VAR% INTO %PATH%;', draggable: 'STORE %VAR% INTO %PATH%;' }
-};
-
-const DEBUG = {
-  explain: { signature: 'EXPLAIN %VAR%;', draggable: 'EXPLAIN %VAR%;' },
-  illustrate: { signature: 'ILLUSTRATE %VAR%;', draggable: 'ILLUSTRATE %VAR%;' },
-  describe: { signature: 'DESCRIBE %VAR%;', draggable: 'DESCRIBE %VAR%;' }
-};
-
-const HCATALOG = {
-  LOAD: {
-    signature: "LOAD '%TABLE%' USING org.apache.hcatalog.pig.HCatLoader();",
-    draggable: "LOAD '%TABLE%' USING org.apache.hcatalog.pig.HCatLoader();"
-  }
-};
-
-const MATH_FUNCTIONS = {
-  abs: { signature: 'ABS(%VAR%)', draggable: 'ABS()' },
-  acos: { signature: 'ACOS(%VAR%)', draggable: 'ACOS()' },
-  asin: { signature: 'ASIN(%VAR%)', draggable: 'ASIN()' },
-  atan: { signature: 'ATAN(%VAR%)', draggable: 'ATAN()' },
-  cbrt: { signature: 'CBRT(%VAR%)', draggable: 'CBRT()' },
-  ceil: { signature: 'CEIL(%VAR%)', draggable: 'CEIL()' },
-  cos: { signature: 'COS(%VAR%)', draggable: 'COS()' },
-  cosh: { signature: 'COSH(%VAR%)', draggable: 'COSH()' },
-  exp: { signature: 'EXP(%VAR%)', draggable: 'EXP()' },
-  floor: { signature: 'FLOOR(%VAR%)', draggable: 'FLOOR()' },
-  log: { signature: 'LOG(%VAR%)', draggable: 'LOG()' },
-  log10: { signature: 'LOG10(%VAR%)', draggable: 'LOG10()' },
-  random: { signature: 'RANDOM(%VAR%)', draggable: 'RANDOM()' },
-  round: { signature: 'ROUND(%VAR%)', draggable: 'ROUND()' },
-  sin: { signature: 'SIN(%VAR%)', draggable: 'SIN()' },
-  sinh: { signature: 'SINH(%VAR%)', draggable: 'SINH()' },
-  sqrt: { signature: 'SQRT(%VAR%)', draggable: 'SQRT()' },
-  tan: { signature: 'TAN(%VAR%)', draggable: 'TAN()' },
-  tanh: { signature: 'TANH(%VAR%)', draggable: 'TANH()' }
-};
-
-const TUPLE_BAG_MAP = {
-  totuple: { signature: 'TOTUPLE(%VAR%)', draggable: 'TOTUPLE()' },
-  tobag: { signature: 'TOBAG(%VAR%)', draggable: 'TOBAG()' },
-  tomap: { signature: 'TOMAP(%KEY%, %VALUE%)', draggable: 'TOMAP()' },
-  top: { signature: 'TOP(%topN%, %COLUMN%, %RELATION%)', draggable: 'TOP()' }
-};
-
-const STRING_FUNCTIONS = {
-  indexof: {
-    signature: "INDEXOF(%STRING%, '%CHARACTER%', %STARTINDEX%)",
-    draggable: 'INDEXOF()'
-  },
-  last_index_of: {
-    signature: "LAST_INDEX_OF(%STRING%, '%CHARACTER%', %STARTINDEX%)",
-    draggable: 'LAST_INDEX_OF()'
-  },
-  lower: { signature: 'LOWER(%STRING%)', draggable: 'LOWER()' },
-  regex_extract: {
-    signature: 'REGEX_EXTRACT(%STRING%, %REGEX%, %INDEX%)',
-    draggable: 'REGEX_EXTRACT()'
-  },
-  regex_extract_all: {
-    signature: 'REGEX_EXTRACT_ALL(%STRING%, %REGEX%)',
-    draggable: 'REGEX_EXTRACT_ALL()'
-  },
-  replace: { signature: "REPLACE(%STRING%, '%oldChar%', '%newChar%')", draggable: 'REPLACE()' },
-  strsplit: { signature: 'STRSPLIT(%STRING%, %REGEX%, %LIMIT%)', draggable: 'STRSPLIT()' },
-  substring: {
-    signature: 'SUBSTRING(%STRING%, %STARTINDEX%, %STOPINDEX%)',
-    draggable: 'SUBSTRING()'
-  },
-  trim: { signature: 'TRIM(%STRING%)', draggable: 'TRIM()' },
-  ucfirst: { signature: 'UCFIRST(%STRING%)', draggable: 'UCFIRST()' },
-  upper: { signature: 'UPPER(%STRING%)', draggable: 'UPPER()' }
-};
-
-const MACROS = {
-  import: { signature: "IMPORT '%PATH_TO_MACRO%';", draggable: "IMPORT '%PATH_TO_MACRO%';" }
-};
-
-const HBASE = {
-  load: {
-    signature:
-      "LOAD 'hbase://%TABLE%' USING org.apache.pig.backend.hadoop.hbase.HBaseStorage('%columnList%')",
-    draggable:
-      "LOAD 'hbase://%TABLE%' USING org.apache.pig.backend.hadoop.hbase.HBaseStorage('%columnList%')"
-  },
-  store: {
-    signature:
-      "STORE %VAR% INTO 'hbase://%TABLE%' USING org.apache.pig.backend.hadoop.hbase.HBaseStorage('%columnList%')",
-    draggable:
-      "STORE %VAR% INTO 'hbase://%TABLE%' USING org.apache.pig.backend.hadoop.hbase.HBaseStorage('%columnList%')"
-  }
-};
-
-const PYTHON_UDF = {
-  register: {
-    signature: "REGISTER 'python_udf.py' USING jython AS myfuncs;",
-    draggable: "REGISTER 'python_udf.py' USING jython AS myfuncs;"
-  }
-};
-
-export const UDF_CATEGORIES = [
-  { name: I18n('Eval'), functions: EVAL_FUNCTIONS },
-  { name: I18n('Relational Operators'), functions: RELATIONAL_OPERATORS },
-  { name: I18n('Input and Output'), functions: INPUT_OUTPUT },
-  { name: I18n('Debug'), functions: DEBUG },
-  { name: I18n('HCatalog'), functions: HCATALOG },
-  { name: I18n('Math'), functions: MATH_FUNCTIONS },
-  { name: I18n('Tuple, Bag and Map'), functions: TUPLE_BAG_MAP },
-  { name: I18n('String'), functions: STRING_FUNCTIONS },
-  { name: I18n('Macros'), functions: MACROS },
-  { name: I18n('HBase'), functions: HBASE },
-  { name: I18n('Python UDF'), functions: PYTHON_UDF }
-];

+ 566 - 0
desktop/core/src/desktop/js/sql/reference/pig/udfReference.ts

@@ -0,0 +1,566 @@
+// Licensed to Cloudera, Inc. under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  Cloudera, Inc. licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+import { UdfCategory, UdfCategoryFunctions } from 'sql/reference/types';
+import I18n from 'utils/i18n';
+
+const EVAL_FUNCTIONS: UdfCategoryFunctions = {
+  avg: {
+    name: 'avg',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'AVG(%VAR%)',
+    draggable: 'AVG()'
+  },
+  concat: {
+    name: 'concat',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'CONCAT(%VAR1%, %VAR2%)',
+    draggable: 'CONCAT()'
+  },
+  count: {
+    name: 'count',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'COUNT(%VAR%)',
+    draggable: 'COUNT()'
+  },
+  count_start: {
+    name: 'count_start',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'COUNT_START(%VAR%)',
+    draggable: 'COUNT_START()'
+  },
+  is_empty: {
+    name: 'is_empty',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'IsEmpty(%VAR%)',
+    draggable: 'IsEmpty()'
+  },
+  diff: {
+    name: 'diff',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'DIFF(%VAR1%, %VAR2%)',
+    draggable: 'DIFF()'
+  },
+  max: {
+    name: 'max',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'MAX(%VAR%)',
+    draggable: 'MAX()'
+  },
+  min: {
+    name: 'min',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'MIN(%VAR%)',
+    draggable: 'MIN()'
+  },
+  size: {
+    name: 'size',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'SIZE(%VAR%)',
+    draggable: 'SIZE()'
+  },
+  sum: {
+    name: 'sum',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'SUM(%VAR%)',
+    draggable: 'SUM()'
+  },
+  tokenize: {
+    name: 'tokenize',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'TOKENIZE(%VAR%, %DELIM%)',
+    draggable: 'TOKENIZE()'
+  }
+};
+
+const RELATIONAL_OPERATORS: UdfCategoryFunctions = {
+  cogroup: {
+    name: 'cogroup',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'COGROUP %VAR% BY %VAR%',
+    draggable: 'COGROUP %VAR% BY %VAR%'
+  },
+  cross: {
+    name: 'cross',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'CROSS %VAR1%, %VAR2%;',
+    draggable: 'CROSS %VAR1%, %VAR2%;'
+  },
+  distinct: {
+    name: 'distinct',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'DISTINCT %VAR%;',
+    draggable: 'DISTINCT %VAR%;'
+  },
+  filter: {
+    name: 'filter',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'FILTER %VAR% BY %COND%',
+    draggable: 'FILTER %VAR% BY %COND%'
+  },
+  flatten: {
+    name: 'flatten',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'FLATTEN(%VAR%)',
+    draggable: 'FLATTEN()'
+  },
+  foreach_generate: {
+    name: 'foreach_generate',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'FOREACH %DATA% GENERATE %NEW_DATA%;',
+    draggable: 'FOREACH %DATA% GENERATE %NEW_DATA%;'
+  },
+  foreach: {
+    name: 'foreach',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'FOREACH %DATA% {%NESTED_BLOCK%};',
+    draggable: 'FOREACH %DATA% {%NESTED_BLOCK%};'
+  },
+  group_by: {
+    name: 'group_by',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'GROUP %VAR% BY %VAR%',
+    draggable: 'GROUP %VAR% BY %VAR%'
+  },
+  group_all: {
+    name: 'group_all',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'GROUP %VAR% ALL',
+    draggable: 'GROUP %VAR% ALL'
+  },
+  join: {
+    name: 'join',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'JOIN %VAR% BY ',
+    draggable: 'JOIN %VAR% BY '
+  },
+  limit: {
+    name: 'limit',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'LIMIT %VAR% %N%',
+    draggable: 'LIMIT %VAR% %N%'
+  },
+  order: {
+    name: 'order',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'ORDER %VAR% BY %FIELD%',
+    draggable: 'ORDER %VAR% BY %FIELD%'
+  },
+  sample: {
+    name: 'sample',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'SAMPLE %VAR% %SIZE%',
+    draggable: 'SAMPLE %VAR% %SIZE%'
+  },
+  split: {
+    name: 'split',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'SPLIT %VAR1% INTO %VAR2% IF %EXPRESSIONS%',
+    draggable: 'SPLIT %VAR1% INTO %VAR2% IF %EXPRESSIONS%'
+  },
+  union: {
+    name: 'union',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'UNION %VAR1%, %VAR2%',
+    draggable: 'UNION %VAR1%, %VAR2%'
+  }
+};
+
+const INPUT_OUTPUT: UdfCategoryFunctions = {
+  load: {
+    name: 'load',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: "LOAD '%FILE%';",
+    draggable: "LOAD '%FILE%';"
+  },
+  dump: {
+    name: 'dump',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'DUMP %VAR%;',
+    draggable: 'DUMP %VAR%;'
+  },
+  store: {
+    name: 'store',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'STORE %VAR% INTO %PATH%;',
+    draggable: 'STORE %VAR% INTO %PATH%;'
+  }
+};
+
+const DEBUG: UdfCategoryFunctions = {
+  explain: {
+    name: 'explain',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'EXPLAIN %VAR%;',
+    draggable: 'EXPLAIN %VAR%;'
+  },
+  illustrate: {
+    name: 'illustrate',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'ILLUSTRATE %VAR%;',
+    draggable: 'ILLUSTRATE %VAR%;'
+  },
+  describe: {
+    name: 'describe',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'DESCRIBE %VAR%;',
+    draggable: 'DESCRIBE %VAR%;'
+  }
+};
+
+const HCATALOG: UdfCategoryFunctions = {
+  LOAD: {
+    name: 'LOAD',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: "LOAD '%TABLE%' USING org.apache.hcatalog.pig.HCatLoader();",
+    draggable: "LOAD '%TABLE%' USING org.apache.hcatalog.pig.HCatLoader();"
+  }
+};
+
+const MATH_FUNCTIONS: UdfCategoryFunctions = {
+  abs: {
+    name: 'abs',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'ABS(%VAR%)',
+    draggable: 'ABS()'
+  },
+  acos: {
+    name: 'acos',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'ACOS(%VAR%)',
+    draggable: 'ACOS()'
+  },
+  asin: {
+    name: 'asin',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'ASIN(%VAR%)',
+    draggable: 'ASIN()'
+  },
+  atan: {
+    name: 'atan',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'ATAN(%VAR%)',
+    draggable: 'ATAN()'
+  },
+  cbrt: {
+    name: 'cbrt',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'CBRT(%VAR%)',
+    draggable: 'CBRT()'
+  },
+  ceil: {
+    name: 'ceil',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'CEIL(%VAR%)',
+    draggable: 'CEIL()'
+  },
+  cos: {
+    name: 'cos',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'COS(%VAR%)',
+    draggable: 'COS()'
+  },
+  cosh: {
+    name: 'cosh',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'COSH(%VAR%)',
+    draggable: 'COSH()'
+  },
+  exp: {
+    name: 'exp',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'EXP(%VAR%)',
+    draggable: 'EXP()'
+  },
+  floor: {
+    name: 'floor',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'FLOOR(%VAR%)',
+    draggable: 'FLOOR()'
+  },
+  log: {
+    name: 'log',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'LOG(%VAR%)',
+    draggable: 'LOG()'
+  },
+  log10: {
+    name: 'log10',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'LOG10(%VAR%)',
+    draggable: 'LOG10()'
+  },
+  random: {
+    name: 'random',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'RANDOM(%VAR%)',
+    draggable: 'RANDOM()'
+  },
+  round: {
+    name: 'round',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'ROUND(%VAR%)',
+    draggable: 'ROUND()'
+  },
+  sin: {
+    name: 'sin',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'SIN(%VAR%)',
+    draggable: 'SIN()'
+  },
+  sinh: {
+    name: 'sinh',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'SINH(%VAR%)',
+    draggable: 'SINH()'
+  },
+  sqrt: {
+    name: 'sqrt',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'SQRT(%VAR%)',
+    draggable: 'SQRT()'
+  },
+  tan: {
+    name: 'tan',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'TAN(%VAR%)',
+    draggable: 'TAN()'
+  },
+  tanh: {
+    name: 'tanh',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'TANH(%VAR%)',
+    draggable: 'TANH()'
+  }
+};
+
+const TUPLE_BAG_MAP: UdfCategoryFunctions = {
+  totuple: {
+    name: 'totuple',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'TOTUPLE(%VAR%)',
+    draggable: 'TOTUPLE()'
+  },
+  tobag: {
+    name: 'tobag',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'TOBAG(%VAR%)',
+    draggable: 'TOBAG()'
+  },
+  tomap: {
+    name: 'tomap',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'TOMAP(%KEY%, %VALUE%)',
+    draggable: 'TOMAP()'
+  },
+  top: {
+    name: 'top',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'TOP(%topN%, %COLUMN%, %RELATION%)',
+    draggable: 'TOP()'
+  }
+};
+
+const STRING_FUNCTIONS: UdfCategoryFunctions = {
+  indexof: {
+    name: 'indexof',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: "INDEXOF(%STRING%, '%CHARACTER%', %STARTINDEX%)",
+    draggable: 'INDEXOF()'
+  },
+  last_index_of: {
+    name: 'last_index_of',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: "LAST_INDEX_OF(%STRING%, '%CHARACTER%', %STARTINDEX%)",
+    draggable: 'LAST_INDEX_OF()'
+  },
+  lower: {
+    name: 'lower',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'LOWER(%STRING%)',
+    draggable: 'LOWER()'
+  },
+  regex_extract: {
+    name: 'regex_extract',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'REGEX_EXTRACT(%STRING%, %REGEX%, %INDEX%)',
+    draggable: 'REGEX_EXTRACT()'
+  },
+  regex_extract_all: {
+    name: 'regex_extract_all',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'REGEX_EXTRACT_ALL(%STRING%, %REGEX%)',
+    draggable: 'REGEX_EXTRACT_ALL()'
+  },
+  replace: {
+    name: 'replace',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: "REPLACE(%STRING%, '%oldChar%', '%newChar%')",
+    draggable: 'REPLACE()'
+  },
+  strsplit: {
+    name: 'strsplit',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'STRSPLIT(%STRING%, %REGEX%, %LIMIT%)',
+    draggable: 'STRSPLIT()'
+  },
+  substring: {
+    name: 'substring',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'SUBSTRING(%STRING%, %STARTINDEX%, %STOPINDEX%)',
+    draggable: 'SUBSTRING()'
+  },
+  trim: {
+    name: 'trim',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'TRIM(%STRING%)',
+    draggable: 'TRIM()'
+  },
+  ucfirst: {
+    name: 'ucfirst',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'UCFIRST(%STRING%)',
+    draggable: 'UCFIRST()'
+  },
+  upper: {
+    name: 'upper',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: 'UPPER(%STRING%)',
+    draggable: 'UPPER()'
+  }
+};
+
+const MACROS: UdfCategoryFunctions = {
+  import: {
+    name: 'import',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: "IMPORT '%PATH_TO_MACRO%';",
+    draggable: "IMPORT '%PATH_TO_MACRO%';"
+  }
+};
+
+const HBASE: UdfCategoryFunctions = {
+  load: {
+    name: 'load',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature:
+      "LOAD 'hbase://%TABLE%' USING org.apache.pig.backend.hadoop.hbase.HBaseStorage('%columnList%')",
+    draggable:
+      "LOAD 'hbase://%TABLE%' USING org.apache.pig.backend.hadoop.hbase.HBaseStorage('%columnList%')"
+  },
+  store: {
+    name: 'store',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature:
+      "STORE %VAR% INTO 'hbase://%TABLE%' USING org.apache.pig.backend.hadoop.hbase.HBaseStorage('%columnList%')",
+    draggable:
+      "STORE %VAR% INTO 'hbase://%TABLE%' USING org.apache.pig.backend.hadoop.hbase.HBaseStorage('%columnList%')"
+  }
+};
+
+const PYTHON_UDF: UdfCategoryFunctions = {
+  register: {
+    name: 'register',
+    returnTypes: ['T'],
+    arguments: [[{ type: 'T', multiple: true }]],
+    signature: "REGISTER 'python_udf.py' USING jython AS myfuncs;",
+    draggable: "REGISTER 'python_udf.py' USING jython AS myfuncs;"
+  }
+};
+
+export const UDF_CATEGORIES: UdfCategory[] = [
+  { name: I18n('Eval'), functions: EVAL_FUNCTIONS },
+  { name: I18n('Relational Operators'), functions: RELATIONAL_OPERATORS },
+  { name: I18n('Input and Output'), functions: INPUT_OUTPUT },
+  { name: I18n('Debug'), functions: DEBUG },
+  { name: I18n('HCatalog'), functions: HCATALOG },
+  { name: I18n('Math'), functions: MATH_FUNCTIONS },
+  { name: I18n('Tuple, Bag and Map'), functions: TUPLE_BAG_MAP },
+  { name: I18n('String'), functions: STRING_FUNCTIONS },
+  { name: I18n('Macros'), functions: MACROS },
+  { name: I18n('HBase'), functions: HBASE },
+  { name: I18n('Python UDF'), functions: PYTHON_UDF }
+];

+ 9 - 3
desktop/core/src/desktop/js/sql/reference/sqlReferenceRepository.test.ts

@@ -14,7 +14,8 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-import { Argument, getArgumentDetailsForUdf } from './sqlReferenceRepository';
+import { UdfArgument } from 'sql/reference/types';
+import { getArgumentDetailsForUdf } from './sqlReferenceRepository';
 import * as apiUtils from 'sql/reference/apiUtils';
 
 describe('sqlReferenceRepository.js', () => {
@@ -64,7 +65,12 @@ describe('sqlReferenceRepository.js', () => {
           },
           concat: {
             returnTypes: ['STRING'],
-            arguments: [[{ type: 'STRING', multiple: true }, { type: 'BINARY', multiple: true }]],
+            arguments: [
+              [
+                { type: 'STRING', multiple: true },
+                { type: 'BINARY', multiple: true }
+              ]
+            ],
             signature: 'concat(STRING|BINARY a, STRING|BINARY b...)',
             draggable: 'concat()',
             description: ''
@@ -87,7 +93,7 @@ describe('sqlReferenceRepository.js', () => {
 
   jest.spyOn(apiUtils, 'fetchUdfs').mockImplementation(() => Promise.resolve([]));
 
-  const extractType = (details: Argument): string => details.type;
+  const extractType = (details: UdfArgument): string => details.type;
 
   it('should give the expected argument types at a specific position', async () => {
     expect((await getArgumentDetailsForUdf(hiveConn, 'cos', 1)).map(extractType)).toEqual([

+ 15 - 59
desktop/core/src/desktop/js/sql/reference/sqlReferenceRepository.ts

@@ -14,54 +14,14 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
+import { SetOptions, UdfArgument, UdfCategory, UdfCategoryFunctions, UdfDetails} from 'sql/reference/types';
+import { Connector } from 'types';
 import { matchesType } from './typeUtils';
 import I18n from 'utils/i18n';
 import huePubSub from 'utils/huePubSub';
-import { clearUdfCache, getCachedApiUdfs, setCachedApiUdfs } from './apiCache';
+import { clearUdfCache, getCachedUdfCategories, setCachedUdfCategories } from './apiCache';
 import { fetchUdfs } from './apiUtils';
 
-export interface Connector {
-  id: string;
-  dialect: string;
-}
-
-export interface Argument {
-  type: string;
-  multiple?: boolean;
-  keywords?: string[];
-  optional?: boolean;
-}
-
-export interface UdfDetails {
-  returnTypes: string[];
-  name: string;
-  arguments: Argument[][];
-  signature: string;
-  draggable: string;
-  description?: string;
-}
-
-interface UdfCategoryFunctions {
-  [attr: string]: UdfDetails;
-}
-
-interface UdfCategory {
-  name: string;
-  functions: UdfCategoryFunctions;
-  isAnalytic?: boolean;
-  isAggregate?: boolean;
-}
-
-export interface SetOptions {
-  [attr: string]: SetDetails;
-}
-
-interface SetDetails {
-  default: string;
-  type: string;
-  details: string;
-}
-
 export const CLEAR_UDF_CACHE_EVENT = 'hue.clear.udf.cache';
 
 const SET_REFS: { [attr: string]: () => Promise<{ SET_OPTIONS?: SetOptions }> } = {
@@ -127,15 +87,11 @@ const mergeWithApiUdfs = async (
   connector: Connector,
   database?: string
 ) => {
-  let apiUdfs = await getCachedApiUdfs(connector, database);
-  if (!apiUdfs) {
-    apiUdfs = await fetchUdfs({
-      connector: connector,
-      database: database,
-      silenceErrors: true
-    });
-    await setCachedApiUdfs(connector, database, apiUdfs);
-  }
+  const apiUdfs = await fetchUdfs({
+    connector: connector,
+    database: database,
+    silenceErrors: true
+  });
 
   if (apiUdfs.length) {
     const additionalUdfs = findUdfsToAdd(apiUdfs, categories);
@@ -156,6 +112,10 @@ export const getUdfCategories = async (
   const promiseKey = getMergedUdfKey(connector, database);
   if (!mergedUdfPromises[promiseKey]) {
     mergedUdfPromises[promiseKey] = new Promise(async resolve => {
+      const cachedCategories = await getCachedUdfCategories(connector, database);
+      if (cachedCategories) {
+        return cachedCategories;
+      }
       let categories: UdfCategory[] = [];
       if (UDF_REFS[connector.dialect]) {
         const module = await UDF_REFS[connector.dialect]();
@@ -164,11 +124,7 @@ export const getUdfCategories = async (
         }
       }
       await mergeWithApiUdfs(categories, connector, database);
-      categories.forEach(category => {
-        Object.keys(category.functions).forEach(udfName => {
-          category.functions[udfName].name = udfName;
-        });
-      });
+      await setCachedUdfCategories(connector, database, categories);
       resolve(categories);
     });
   }
@@ -245,13 +201,13 @@ export const getArgumentDetailsForUdf = async (
   connector: Connector,
   functionName: string,
   argumentPosition: number
-): Promise<Argument[]> => {
+): Promise<UdfArgument[]> => {
   const foundFunctions = await findUdf(connector, functionName);
   if (!foundFunctions.length) {
     return [{ type: 'T' }];
   }
 
-  const possibleArguments: Argument[] = [];
+  const possibleArguments: UdfArgument[] = [];
   foundFunctions.forEach(foundFunction => {
     const args = foundFunction.arguments;
     if (argumentPosition > args.length) {

+ 1 - 4
desktop/core/src/desktop/js/sql/reference/typeUtils.ts

@@ -14,14 +14,11 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
+import { TypeConversion } from 'sql/reference/types';
 import { TYPE_CONVERSION as HIVE_TYPE_CONVERSION } from './hive/typeConversion';
 import { TYPE_CONVERSION as IMPALA_TYPE_CONVERSION } from './impala/typeConversion';
 import { TYPE_CONVERSION as GENERIC_TYPE_CONVERSION } from './generic/typeConversion';
 
-export interface TypeConversion {
-  [attr: string]: { [attr: string]: boolean };
-}
-
 const stripPrecision = (types: string[]): string[] => {
   const result: string[] = [];
   types.forEach(type => {

+ 57 - 0
desktop/core/src/desktop/js/sql/reference/types.ts

@@ -0,0 +1,57 @@
+// Licensed to Cloudera, Inc. under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  Cloudera, Inc. licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+export interface UdfArgument {
+  type: string;
+  multiple?: boolean;
+  keywords?: string[];
+  optional?: boolean;
+}
+
+export interface UdfDetails {
+  returnTypes: string[];
+  name: string;
+  arguments: UdfArgument[][];
+  altArguments?: UdfArgument[][];
+  signature: string;
+  draggable: string;
+  description?: string;
+}
+
+export interface UdfCategoryFunctions {
+  [attr: string]: UdfDetails;
+}
+
+export interface UdfCategory {
+  name: string;
+  functions: UdfCategoryFunctions;
+  isAnalytic?: boolean;
+  isAggregate?: boolean;
+}
+
+export interface SetOptions {
+  [attr: string]: SetDetails;
+}
+
+export interface SetDetails {
+  default: string;
+  type: string;
+  description: string;
+}
+
+export interface TypeConversion {
+  [attr: string]: { [attr: string]: boolean };
+}

+ 4 - 16
desktop/core/src/desktop/js/sql/reference/apiCache.js → desktop/core/src/desktop/js/types.ts

@@ -14,19 +14,7 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-import localForage from 'localforage';
-
-const GLOBAL_UDF_CACHE_KEY = 'HUE_GLOBAL_UDF_KEY';
-
-const getStore = connector =>
-  localForage.createInstance({
-    name: 'HueUdfCatalog_' + connector.id
-  });
-
-export const clearUdfCache = async connector => await getStore(connector).clear();
-
-export const getCachedApiUdfs = async (connector, database) =>
-  await getStore(connector).getItem(database ? database.name : GLOBAL_UDF_CACHE_KEY);
-
-export const setCachedApiUdfs = async (connector, database, apiUdfs) =>
-  await getStore(connector).setItem(database ? database.name : GLOBAL_UDF_CACHE_KEY, apiUdfs);
+export interface Connector {
+  id: string;
+  dialect: string;
+}

Daži faili netika attēloti, jo izmaiņu fails ir pārāk liels