|
|
@@ -14,25 +14,10 @@
|
|
|
// See the License for the specific language governing permissions and
|
|
|
// limitations under the License.
|
|
|
|
|
|
-import {
|
|
|
- SetOptions,
|
|
|
- UdfArgument,
|
|
|
- UdfCategory,
|
|
|
- UdfCategoryFunctions,
|
|
|
- UdfDetails
|
|
|
-} from 'sql/reference/types';
|
|
|
-import { Connector } from 'types/config';
|
|
|
-import { matchesType } from './typeUtils';
|
|
|
-import I18n from 'utils/i18n';
|
|
|
-import huePubSub from 'utils/huePubSub';
|
|
|
-import { clearUdfCache, getCachedUdfCategories, setCachedUdfCategories } from './apiCache';
|
|
|
-import { fetchDescribe, fetchUdfs } from './apiUtils';
|
|
|
-
|
|
|
-export const CLEAR_UDF_CACHE_EVENT = 'hue.clear.udf.cache';
|
|
|
-export const DESCRIBE_UDF_EVENT = 'hue.describe.udf';
|
|
|
-export const UDF_DESCRIBED_EVENT = 'hue.udf.described';
|
|
|
+import { SetOptions, SqlReferenceProvider, UdfCategory } from 'sql/reference/types';
|
|
|
|
|
|
const GENERIC = 'generic';
|
|
|
+const EMPTY_KEYWORDS = new Set<string>();
|
|
|
|
|
|
const KEYWORD_REFS: { [attr: string]: () => Promise<{ RESERVED_WORDS?: Set<string> }> } = {
|
|
|
calcite: async () => import(/* webpackChunkName: "calcite-ref" */ './calcite/reservedKeywords'),
|
|
|
@@ -57,261 +42,32 @@ const UDF_REFS: { [attr: string]: () => Promise<{ UDF_CATEGORIES?: UdfCategory[]
|
|
|
flink: async () => import(/* webpackChunkName: "flink-ref" */ './flink/udfReference')
|
|
|
};
|
|
|
|
|
|
-const IGNORED_UDF_REGEX = /^[!=$%&*+-/<>^|~]+$/;
|
|
|
-
|
|
|
-const mergedUdfPromises: { [attr: string]: Promise<UdfCategory[]> } = {};
|
|
|
-
|
|
|
-const getMergedUdfKey = (connector: Connector, database?: string): string => {
|
|
|
- let key = connector.id;
|
|
|
- if (database) {
|
|
|
- key += '_' + database;
|
|
|
- }
|
|
|
- return key;
|
|
|
-};
|
|
|
-
|
|
|
-export const hasUdfCategories = (connector: Connector): boolean =>
|
|
|
- !!connector.dialect && typeof UDF_REFS[connector.dialect] !== 'undefined';
|
|
|
-
|
|
|
-const findUdfsToAdd = (
|
|
|
- apiUdfs: UdfDetails[],
|
|
|
- existingCategories: UdfCategory[]
|
|
|
-): UdfCategoryFunctions => {
|
|
|
- const existingUdfNames = new Set();
|
|
|
- existingCategories.forEach(category => {
|
|
|
- Object.keys(category.functions).forEach(udfName => {
|
|
|
- existingUdfNames.add(udfName.toUpperCase());
|
|
|
- });
|
|
|
- });
|
|
|
-
|
|
|
- const result: UdfCategoryFunctions = {};
|
|
|
-
|
|
|
- apiUdfs.forEach(apiUdf => {
|
|
|
- if (
|
|
|
- !result[apiUdf.name] &&
|
|
|
- !existingUdfNames.has(apiUdf.name.toUpperCase()) &&
|
|
|
- !IGNORED_UDF_REGEX.test(apiUdf.name)
|
|
|
- ) {
|
|
|
- result[apiUdf.name] = apiUdf;
|
|
|
- }
|
|
|
- });
|
|
|
-
|
|
|
- return result;
|
|
|
-};
|
|
|
-
|
|
|
-const mergeWithApiUdfs = async (
|
|
|
- categories: UdfCategory[],
|
|
|
- connector: Connector,
|
|
|
- database?: string
|
|
|
-) => {
|
|
|
- const apiUdfs = await fetchUdfs(connector, database);
|
|
|
-
|
|
|
- if (apiUdfs.length) {
|
|
|
- const additionalUdfs = findUdfsToAdd(apiUdfs, categories);
|
|
|
- if (Object.keys(additionalUdfs).length) {
|
|
|
- const generalCategory = {
|
|
|
- name: I18n('General'),
|
|
|
- functions: additionalUdfs
|
|
|
- };
|
|
|
- categories.unshift(generalCategory);
|
|
|
- }
|
|
|
- }
|
|
|
-};
|
|
|
-
|
|
|
-export const getUdfCategories = async (
|
|
|
- connector: Connector,
|
|
|
- database?: string
|
|
|
-): Promise<UdfCategory[]> => {
|
|
|
- const promiseKey = getMergedUdfKey(connector, database);
|
|
|
- if (!mergedUdfPromises[promiseKey]) {
|
|
|
- mergedUdfPromises[promiseKey] = new Promise(async resolve => {
|
|
|
- const cachedCategories = await getCachedUdfCategories(connector, database);
|
|
|
- if (cachedCategories) {
|
|
|
- resolve(cachedCategories);
|
|
|
- }
|
|
|
- let categories: UdfCategory[] = [];
|
|
|
- if (connector.dialect && UDF_REFS[connector.dialect]) {
|
|
|
- const module = await UDF_REFS[connector.dialect]();
|
|
|
- if (module.UDF_CATEGORIES) {
|
|
|
- categories = module.UDF_CATEGORIES;
|
|
|
- categories.forEach(category => {
|
|
|
- Object.values(category.functions).forEach(udf => {
|
|
|
- udf.described = true;
|
|
|
- });
|
|
|
- });
|
|
|
- }
|
|
|
- }
|
|
|
- await mergeWithApiUdfs(categories, connector, database);
|
|
|
- await setCachedUdfCategories(connector, database, categories);
|
|
|
- resolve(categories);
|
|
|
- });
|
|
|
- }
|
|
|
-
|
|
|
- return await mergedUdfPromises[promiseKey];
|
|
|
-};
|
|
|
-
|
|
|
-export const findUdf = async (
|
|
|
- connector: Connector,
|
|
|
- functionName: string
|
|
|
-): Promise<UdfDetails[]> => {
|
|
|
- const categories = await getUdfCategories(connector);
|
|
|
- const found: UdfDetails[] = [];
|
|
|
- categories.forEach(category => {
|
|
|
- if (category.functions[functionName]) {
|
|
|
- found.push(category.functions[functionName]);
|
|
|
- }
|
|
|
- });
|
|
|
- return found;
|
|
|
-};
|
|
|
-
|
|
|
-export const getReturnTypesForUdf = async (
|
|
|
- connector: Connector,
|
|
|
- functionName: string
|
|
|
-): Promise<string[]> => {
|
|
|
- if (!functionName) {
|
|
|
- return ['T'];
|
|
|
- }
|
|
|
- const udfs = await findUdf(connector, functionName);
|
|
|
- if (!udfs.length) {
|
|
|
- let returnTypesPresent = false;
|
|
|
- const returnTypes = new Set<string>();
|
|
|
- udfs.forEach(udf => {
|
|
|
- if (udf.returnTypes) {
|
|
|
- returnTypesPresent = true;
|
|
|
- udf.returnTypes.forEach(type => returnTypes.add(type));
|
|
|
- }
|
|
|
- });
|
|
|
- if (returnTypesPresent) {
|
|
|
- return [...returnTypes];
|
|
|
- }
|
|
|
+class SqlReferenceRepository implements SqlReferenceProvider {
|
|
|
+ async getReservedKeywords(dialect: string): Promise<Set<string>> {
|
|
|
+ const refImport = KEYWORD_REFS[dialect] || KEYWORD_REFS[GENERIC];
|
|
|
+ const module = await refImport();
|
|
|
+ return module.RESERVED_WORDS || EMPTY_KEYWORDS;
|
|
|
}
|
|
|
|
|
|
- return ['T'];
|
|
|
-};
|
|
|
-
|
|
|
-export const getUdfsWithReturnTypes = async (
|
|
|
- connector: Connector,
|
|
|
- returnTypes: string[],
|
|
|
- includeAggregate?: boolean,
|
|
|
- includeAnalytic?: boolean
|
|
|
-): Promise<UdfDetails[]> => {
|
|
|
- const categories = await getUdfCategories(connector);
|
|
|
- const result: UdfDetails[] = [];
|
|
|
- categories.forEach(category => {
|
|
|
- if (
|
|
|
- (!category.isAnalytic && !category.isAggregate) ||
|
|
|
- (includeAggregate && category.isAggregate) ||
|
|
|
- (includeAnalytic && category.isAnalytic)
|
|
|
- ) {
|
|
|
- Object.keys(category.functions).forEach(udfName => {
|
|
|
- const udf = category.functions[udfName];
|
|
|
- if (
|
|
|
- !returnTypes ||
|
|
|
- (connector.dialect && matchesType(connector.dialect, returnTypes, udf.returnTypes))
|
|
|
- ) {
|
|
|
- result.push(udf);
|
|
|
- }
|
|
|
- });
|
|
|
+ async getSetOptions(dialect: string): Promise<SetOptions> {
|
|
|
+ if (SET_REFS[dialect]) {
|
|
|
+ const module = await SET_REFS[dialect]();
|
|
|
+ return module.SET_OPTIONS || {};
|
|
|
}
|
|
|
- });
|
|
|
- result.sort((a, b) => a.name.localeCompare(b.name));
|
|
|
- return result;
|
|
|
-};
|
|
|
-
|
|
|
-export const getArgumentDetailsForUdf = async (
|
|
|
- connector: Connector,
|
|
|
- functionName: string,
|
|
|
- argumentPosition: number
|
|
|
-): Promise<UdfArgument[]> => {
|
|
|
- const foundFunctions = await findUdf(connector, functionName);
|
|
|
- if (!foundFunctions.length) {
|
|
|
- return [{ type: 'T' }];
|
|
|
+ return {};
|
|
|
}
|
|
|
|
|
|
- const possibleArguments: UdfArgument[] = [];
|
|
|
- foundFunctions.forEach(foundFunction => {
|
|
|
- const args = foundFunction.arguments;
|
|
|
- if (argumentPosition > args.length) {
|
|
|
- possibleArguments.push(...args[args.length - 1].filter(type => type.multiple));
|
|
|
- } else {
|
|
|
- possibleArguments.push(...args[argumentPosition - 1]);
|
|
|
- }
|
|
|
- });
|
|
|
- return possibleArguments;
|
|
|
-};
|
|
|
-
|
|
|
-export const getSetOptions = async (connector: Connector): Promise<SetOptions> => {
|
|
|
- if (connector.dialect && SET_REFS[connector.dialect]) {
|
|
|
- const module = await SET_REFS[connector.dialect]();
|
|
|
- if (module.SET_OPTIONS) {
|
|
|
- return module.SET_OPTIONS;
|
|
|
- }
|
|
|
+ async getUdfCategories(dialect: string): Promise<UdfCategory[]> {
|
|
|
+ const refImport = UDF_REFS[dialect] || UDF_REFS[GENERIC];
|
|
|
+ const module = await refImport();
|
|
|
+ return module.UDF_CATEGORIES || [];
|
|
|
}
|
|
|
- return {};
|
|
|
-};
|
|
|
|
|
|
-export const isReserved = async (connector: Connector, word: string): Promise<boolean> => {
|
|
|
- const refImport = (connector.dialect && KEYWORD_REFS[connector.dialect]) || KEYWORD_REFS[GENERIC];
|
|
|
- const module = await refImport();
|
|
|
- if (module.RESERVED_WORDS) {
|
|
|
- return module.RESERVED_WORDS.has(word.toUpperCase());
|
|
|
+ hasUdfCategories(dialect: string): boolean {
|
|
|
+ return !!UDF_REFS[dialect];
|
|
|
}
|
|
|
+}
|
|
|
|
|
|
- return false;
|
|
|
-};
|
|
|
-
|
|
|
-const findUdfInCategories = (
|
|
|
- categories: UdfCategory[],
|
|
|
- udfName: string
|
|
|
-): UdfDetails | undefined => {
|
|
|
- let foundUdf = undefined;
|
|
|
- categories.some(category =>
|
|
|
- Object.values(category.functions).some(udf => {
|
|
|
- if (udf.name === udfName) {
|
|
|
- foundUdf = udf;
|
|
|
- return true;
|
|
|
- }
|
|
|
- })
|
|
|
- );
|
|
|
- return foundUdf;
|
|
|
-};
|
|
|
+const sqlReferenceRepository = new SqlReferenceRepository();
|
|
|
|
|
|
-huePubSub.subscribe(
|
|
|
- DESCRIBE_UDF_EVENT,
|
|
|
- async (details: { connector: Connector; udfName: string; database?: string }): Promise<void> => {
|
|
|
- const categories = await getUdfCategories(details.connector, details.database);
|
|
|
- const foundUdf = findUdfInCategories(categories, details.udfName);
|
|
|
- if (foundUdf && !foundUdf.described) {
|
|
|
- const apiUdf = await fetchDescribe(details.connector, foundUdf, details.database);
|
|
|
- if (apiUdf) {
|
|
|
- if (apiUdf.description) {
|
|
|
- foundUdf.description = apiUdf.description;
|
|
|
- }
|
|
|
- if (apiUdf.signature) {
|
|
|
- foundUdf.signature = apiUdf.signature;
|
|
|
- }
|
|
|
- foundUdf.described = true;
|
|
|
- await setCachedUdfCategories(details.connector, details.database, categories);
|
|
|
- huePubSub.publish(UDF_DESCRIBED_EVENT, {
|
|
|
- connector: details.connector,
|
|
|
- database: details.database,
|
|
|
- udf: foundUdf
|
|
|
- });
|
|
|
- }
|
|
|
- }
|
|
|
- }
|
|
|
-);
|
|
|
-
|
|
|
-huePubSub.subscribe(
|
|
|
- CLEAR_UDF_CACHE_EVENT,
|
|
|
- async (details: { connector: Connector; callback: () => void }) => {
|
|
|
- await clearUdfCache(details.connector);
|
|
|
- Object.keys(mergedUdfPromises).forEach(key => {
|
|
|
- if (key === details.connector.id || key.indexOf(details.connector.id + '_') === 0) {
|
|
|
- delete mergedUdfPromises[key];
|
|
|
- }
|
|
|
- });
|
|
|
- if (details.callback) {
|
|
|
- details.callback();
|
|
|
- }
|
|
|
- }
|
|
|
-);
|
|
|
+export default sqlReferenceRepository;
|