瀏覽代碼

HUE-8818 [frontend] Add js tools folder to eslint and fix linting issues

Johan Ahlen 6 年之前
父節點
當前提交
777f0ff78a

+ 1 - 1
package.json

@@ -109,7 +109,7 @@
     "less": "./node_modules/.bin/grunt less",
     "less-dev": "./node_modules/.bin/grunt watch",
     "less-lint": "stylelint \"desktop/core/src/desktop/static/desktop/less/**/*.less\" --fix",
-    "lint": "eslint desktop/core/src/desktop/js",
+    "lint": "eslint desktop/core/src/desktop/js tools/sql-docs tools/jison",
     "lint-debug": "npm run lint -- --debug",
     "lint-fix": "npm run lint -- --fix",
     "test": "babel-node desktop/core/src/desktop/js/spec/run.js",

+ 191 - 115
tools/jison/generateParsers.js

@@ -14,10 +14,11 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-let fs = require('fs');
-let exec = require('child_process').exec;
+const fs = require('fs');
+const exec = require('child_process').exec;
 
-const LICENSE = '// Licensed to Cloudera, Inc. under one\n' +
+const LICENSE =
+  '// Licensed to Cloudera, Inc. under one\n' +
   '// or more contributor license agreements.  See the NOTICE file\n' +
   '// distributed with this work for additional information\n' +
   '// regarding copyright ownership.  Cloudera, Inc. licenses this file\n' +
@@ -33,7 +34,8 @@ const LICENSE = '// Licensed to Cloudera, Inc. under one\n' +
   '// See the License for the specific language governing permissions and\n' +
   '// limitations under the License.\n';
 
-const SQL_STATEMENTS_PARSER_JSDOC = '/**\n' +
+const SQL_STATEMENTS_PARSER_JSDOC =
+  '/**\n' +
   ' * @param {string} input\n' +
   ' *\n' +
   ' * @return {SqlStatementsParserResult}\n' +
@@ -46,141 +48,214 @@ const PARSERS = {
   globalSearchParser: {
     sources: ['globalSearchParser.jison'],
     target: 'globalSearchParser.jison',
-    afterParse: (contents) => new Promise(resolve => {
-      resolve(LICENSE +
-        contents.replace('var globalSearchParser = ', 'import SqlParseSupport from \'parse/sqlParseSupport\';\n\nvar globalSearchParser = ') +
-        '\nexport default globalSearchParser;\n');
-    })
+    afterParse: contents =>
+      new Promise(resolve => {
+        resolve(
+          LICENSE +
+            contents.replace(
+              'var globalSearchParser = ',
+              "import SqlParseSupport from 'parse/sqlParseSupport';\n\nvar globalSearchParser = "
+            ) +
+            '\nexport default globalSearchParser;\n'
+        );
+      })
   },
   solrFormulaParser: {
     sources: ['solrFormulaParser.jison'],
     target: 'solrFormulaParser.jison',
-    afterParse: (contents) => new Promise(resolve => {
-      resolve(LICENSE + contents + 'export default solrFormulaParser;\n');
-    })
+    afterParse: contents =>
+      new Promise(resolve => {
+        resolve(LICENSE + contents + 'export default solrFormulaParser;\n');
+      })
   },
   solrQueryParser: {
     sources: ['solrQueryParser.jison'],
     target: 'solrQueryParser.jison',
-    afterParse: (contents) => new Promise(resolve => {
-      resolve(LICENSE + contents + 'export default solrQueryParser;\n');
-    })
+    afterParse: contents =>
+      new Promise(resolve => {
+        resolve(LICENSE + contents + 'export default solrQueryParser;\n');
+      })
   },
   sqlAutocompleteParser: {
     sources: [
-      'autocomplete_header.jison', 'sql_main.jison', 'sql_valueExpression.jison', 'sql_error.jison', 'sql_alter.jison',
-      'sql_analyze.jison', 'sql_create.jison', 'sql_drop.jison', 'sql_grant.jison', 'sql_insert.jison', 'sql_load.jison',
-      'sql_set.jison', 'sql_show.jison', 'sql_update.jison', 'sql_use.jison', 'autocomplete_footer.jison'
+      'autocomplete_header.jison',
+      'sql_main.jison',
+      'sql_valueExpression.jison',
+      'sql_error.jison',
+      'sql_alter.jison',
+      'sql_analyze.jison',
+      'sql_create.jison',
+      'sql_drop.jison',
+      'sql_grant.jison',
+      'sql_insert.jison',
+      'sql_load.jison',
+      'sql_set.jison',
+      'sql_show.jison',
+      'sql_update.jison',
+      'sql_use.jison',
+      'autocomplete_footer.jison'
     ],
     target: 'sqlAutocompleteParser.jison',
     lexer: 'sql.jisonlex',
-    afterParse: (contents) => new Promise(resolve => {
-      resolve(LICENSE +
-        contents.replace('var sqlAutocompleteParser = ', 'import SqlParseSupport from \'parse/sqlParseSupport\';\n\nvar sqlAutocompleteParser = ') +
-        '\nexport default sqlAutocompleteParser;\n');
-    })
+    afterParse: contents =>
+      new Promise(resolve => {
+        resolve(
+          LICENSE +
+            contents.replace(
+              'var sqlAutocompleteParser = ',
+              "import SqlParseSupport from 'parse/sqlParseSupport';\n\nvar sqlAutocompleteParser = "
+            ) +
+            '\nexport default sqlAutocompleteParser;\n'
+        );
+      })
   },
   sqlStatementsParser: {
     sources: ['sqlStatementsParser.jison'],
     target: 'sqlStatementsParser.jison',
-    afterParse: (contents) => new Promise(resolve => {
-      resolve(LICENSE + contents.replace('parse: function parse', SQL_STATEMENTS_PARSER_JSDOC + 'parse: function parse') + 'export default sqlStatementsParser;\n');
-    })
+    afterParse: contents =>
+      new Promise(resolve => {
+        resolve(
+          LICENSE +
+            contents.replace(
+              'parse: function parse',
+              SQL_STATEMENTS_PARSER_JSDOC + 'parse: function parse'
+            ) +
+            'export default sqlStatementsParser;\n'
+        );
+      })
   },
   sqlSyntaxParser: {
     sources: [
-      'syntax_header.jison', 'sql_main.jison', 'sql_valueExpression.jison', 'sql_alter.jison', 'sql_analyze.jison',
-      'sql_create.jison', 'sql_drop.jison', 'sql_grant.jison', 'sql_insert.jison', 'sql_load.jison', 'sql_set.jison',
-      'sql_show.jison', 'sql_update.jison', 'sql_use.jison', 'syntax_footer.jison'
+      'syntax_header.jison',
+      'sql_main.jison',
+      'sql_valueExpression.jison',
+      'sql_alter.jison',
+      'sql_analyze.jison',
+      'sql_create.jison',
+      'sql_drop.jison',
+      'sql_grant.jison',
+      'sql_insert.jison',
+      'sql_load.jison',
+      'sql_set.jison',
+      'sql_show.jison',
+      'sql_update.jison',
+      'sql_use.jison',
+      'syntax_footer.jison'
     ],
     target: 'sqlSyntaxParser.jison',
     lexer: 'sql.jisonlex',
-    afterParse: (contents) => new Promise(resolve => {
-      resolve(LICENSE +
-        contents.replace('var sqlSyntaxParser = ', 'import SqlParseSupport from \'parse/sqlParseSupport\';\n\nvar sqlSyntaxParser = ')
-          .replace('loc: yyloc,', 'loc: lexer.yylloc, ruleId: stack.slice(stack.length - 2, stack.length).join(\'\'),') +
-        '\nexport default sqlSyntaxParser;\n');
-    })
-  },
+    afterParse: contents =>
+      new Promise(resolve => {
+        resolve(
+          LICENSE +
+            contents
+              .replace(
+                'var sqlSyntaxParser = ',
+                "import SqlParseSupport from 'parse/sqlParseSupport';\n\nvar sqlSyntaxParser = "
+              )
+              .replace(
+                'loc: yyloc,',
+                "loc: lexer.yylloc, ruleId: stack.slice(stack.length - 2, stack.length).join(''),"
+              ) +
+            '\nexport default sqlSyntaxParser;\n'
+        );
+      })
+  }
 };
 
-const readFile = (path) => new Promise((resolve, reject) => {
-  fs.readFile(path, (err, buf) => {
-    if (err) {
-      reject();
-    }
-    resolve(buf.toString());
-  })
-});
+const readFile = path =>
+  new Promise((resolve, reject) => {
+    fs.readFile(path, (err, buf) => {
+      if (err) {
+        reject();
+      }
+      resolve(buf.toString());
+    });
+  });
 
-const writeFile = (path, contents) => new Promise((resolve, reject) => {
-  fs.writeFile(path, contents, function(err, data) {
-    if (err) {
-      reject();
-    }
-    resolve();
+const writeFile = (path, contents) =>
+  new Promise((resolve, reject) => {
+    fs.writeFile(path, contents, (err, data) => {
+      if (err) {
+        reject();
+      }
+      resolve();
+    });
   });
-});
 
-const deleteFile = (path) => {
+const deleteFile = path => {
   fs.unlinkSync(path);
 };
 
-const execCmd = (cmd) => new Promise((resolve, reject) => {
-  exec(cmd, function(err, stdout, stderr) {
-    if (err) {
-      reject(stderr);
-    }
-    resolve();
+const execCmd = cmd =>
+  new Promise((resolve, reject) => {
+    exec(cmd, (err, stdout, stderr) => {
+      if (err) {
+        reject(stderr);
+      }
+      resolve();
+    });
   });
-});
 
-const generateParser = parserName => new Promise((resolve, reject) => {
-  let parserConfig = PARSERS[parserName];
-
-  let concatPromise = new Promise((resolve, reject) => {
-    if (parserConfig.sources.length > 1 && parserConfig.target) {
-      console.log('Concatenating files...');
-      let promises = parserConfig.sources.map(fileName => readFile(JISON_FOLDER + fileName));
-
-      Promise.all(promises).then(contents => {
-        writeFile(JISON_FOLDER + parserConfig.target, contents).then(() => {
-          resolve(JISON_FOLDER + parserConfig.target)
-        })
-      }).catch(reject);
-    } else if (parserConfig.sources.length === 1) {
-      resolve(JISON_FOLDER + parserConfig.sources[0]);
-    } else {
-      reject('No jison source specified');
-    }
-  });
+const generateParser = parserName =>
+  new Promise((resolve, reject) => {
+    const parserConfig = PARSERS[parserName];
 
+    const concatPromise = new Promise((resolve, reject) => {
+      if (parserConfig.sources.length > 1 && parserConfig.target) {
+        console.log('Concatenating files...');
+        const promises = parserConfig.sources.map(fileName => readFile(JISON_FOLDER + fileName));
 
-  concatPromise.then((targetPath) => {
-    let jisonCommand = 'jison ' + targetPath;
-    if (parserConfig.lexer) {
-      jisonCommand += ' ' + JISON_FOLDER + parserConfig.lexer
-    }
-    jisonCommand += ' -m js';
-    console.log('Generating parser...');
-    execCmd(jisonCommand).then(() => {
-      if (parserConfig.sources.length > 1) {
-        deleteFile(targetPath); // Remove concatenated file
+        Promise.all(promises)
+          .then(contents => {
+            writeFile(JISON_FOLDER + parserConfig.target, contents).then(() => {
+              resolve(JISON_FOLDER + parserConfig.target);
+            });
+          })
+          .catch(reject);
+      } else if (parserConfig.sources.length === 1) {
+        resolve(JISON_FOLDER + parserConfig.sources[0]);
+      } else {
+        reject('No jison source specified');
       }
-      console.log('Adjusting JS...');
-      let generatedJsFileName = parserConfig.target.replace('.jison', '.js');
-      readFile(generatedJsFileName).then(contents => {
-        parserConfig.afterParse(contents).then(finalContents => {
-          writeFile(TARGET_FOLDER + generatedJsFileName, finalContents).then(() => {
-            deleteFile(generatedJsFileName);
-            console.log('Done!\n');
-            resolve();
-          }).catch(reject);
-        }).catch(reject);
-      }).catch(reject);
-    }).catch(reject);
-  }).catch(reject);
-});
+    });
+
+    concatPromise
+      .then(targetPath => {
+        let jisonCommand = 'jison ' + targetPath;
+        if (parserConfig.lexer) {
+          jisonCommand += ' ' + JISON_FOLDER + parserConfig.lexer;
+        }
+        jisonCommand += ' -m js';
+        console.log('Generating parser...');
+        execCmd(jisonCommand)
+          .then(() => {
+            if (parserConfig.sources.length > 1) {
+              deleteFile(targetPath); // Remove concatenated file
+            }
+            console.log('Adjusting JS...');
+            const generatedJsFileName = parserConfig.target.replace('.jison', '.js');
+            readFile(generatedJsFileName)
+              .then(contents => {
+                parserConfig
+                  .afterParse(contents)
+                  .then(finalContents => {
+                    writeFile(TARGET_FOLDER + generatedJsFileName, finalContents)
+                      .then(() => {
+                        deleteFile(generatedJsFileName);
+                        console.log('Done!\n');
+                        resolve();
+                      })
+                      .catch(reject);
+                  })
+                  .catch(reject);
+              })
+              .catch(reject);
+          })
+          .catch(reject);
+      })
+      .catch(reject);
+  });
 
 let parsersToGenerate = [];
 const invalid = [];
@@ -206,31 +281,32 @@ if (all) {
 }
 
 if (invalid.length) {
-  console.log('No parser config found for: \'' + invalid.join('\', \'') + '\'');
-  console.log('\nPossible options are:\n  ' + ['all'].concat(Object.keys(PARSERS)).join('\n  ') + '\n');
+  console.log("No parser config found for: '" + invalid.join("', '") + "'");
+  console.log(
+    '\nPossible options are:\n  ' + ['all'].concat(Object.keys(PARSERS)).join('\n  ') + '\n'
+  );
   return;
 }
 
-parserCount = parsersToGenerate.length;
+const parserCount = parsersToGenerate.length;
 let idx = 0;
 
 const generateRecursive = () => {
   idx++;
   if (parsersToGenerate.length) {
-    let parserName = parsersToGenerate.pop();
+    const parserName = parsersToGenerate.pop();
     if (parserCount > 1) {
-      console.log('Generating \'' + parserName + '\' (' + idx + '/' + parserCount + ')...');
+      console.log("Generating '" + parserName + "' (" + idx + '/' + parserCount + ')...');
     } else {
-      console.log('Generating \'' + parserName + '\'...');
+      console.log("Generating '" + parserName + "'...");
     }
-    generateParser(parserName).then(generateRecursive).catch(error => {
-      console.log(error);
-      console.log('FAIL!');
-    })
+    generateParser(parserName)
+      .then(generateRecursive)
+      .catch(error => {
+        console.log(error);
+        console.log('FAIL!');
+      });
   }
 };
 
 generateRecursive();
-
-
-

+ 2 - 2
tools/sql-docs/DocFragment.js

@@ -19,7 +19,7 @@
  * to specific sections of a topic.
  */
 class DocFragment {
-  constructor (id, domElement) {
+  constructor(id, domElement) {
     this.id = id;
     this.domElement = domElement;
 
@@ -29,4 +29,4 @@ class DocFragment {
   }
 }
 
-module.exports = DocFragment;
+module.exports = DocFragment;

+ 6 - 4
tools/sql-docs/Topic.js

@@ -20,14 +20,13 @@ const libxml = require('libxmljs');
  * Class representing a doc topic
  */
 class Topic {
-
   /**
    * Create a topic
    *
    * @param {string} docRootPath - The start path
    * @param {string} ref - The relative path of the topic
    */
-  constructor (docRootPath, ref) {
+  constructor(docRootPath, ref) {
     this.docRootPath = docRootPath;
     this.ref = ref;
     this.children = [];
@@ -40,9 +39,12 @@ class Topic {
   toJson() {
     return JSON.stringify({
       body: this.domXml.toString(),
-      title: this.fragment.title.text().replace(/[\n\r]/g, ' ').trim()
+      title: this.fragment.title
+        .text()
+        .replace(/[\n\r]/g, ' ')
+        .trim()
     });
   }
 }
 
-module.exports = Topic;
+module.exports = Topic;

+ 71 - 30
tools/sql-docs/ditamapParser.js

@@ -43,60 +43,95 @@ const LOG_NAME = 'ditamapParser.js: ';
  *
  * @return {Promise<DitamapParseResult>} - A promise of the Topic tree and index
  */
-const parseDitamap = (ditamapFile, docRootPath) => new Promise((resolve, reject) => {
-  let parseResult = {
-    topics: [],
-    topicIndex: {},
-    keyDefs: {}
-  };
-  extractFromDitamapFile(ditamapFile, docRootPath, parseResult).then(() => {
-    resolve(parseResult);
-  }).catch(reject);
-});
+const parseDitamap = (ditamapFile, docRootPath) =>
+  new Promise((resolve, reject) => {
+    const parseResult = {
+      topics: [],
+      topicIndex: {},
+      keyDefs: {}
+    };
+    extractFromDitamapFile(ditamapFile, docRootPath, parseResult)
+      .then(() => {
+        resolve(parseResult);
+      })
+      .catch(reject);
+  });
 
-const extractFromDitamapFile = (ditamapFile, docRootPath, parseResult) => new Promise((resolve, reject) => {
-  extractorUtils.readFile(docRootPath + ditamapFile).then(contents => {
-    let mapNode = libxml.parseXmlString(contents).get('//map');
-    extractFromMapNode(mapNode, ditamapFile, docRootPath, parseResult).then(resolve).catch(reject);
-  }).catch(reject);
-});
+const extractFromDitamapFile = (ditamapFile, docRootPath, parseResult) =>
+  new Promise((resolve, reject) => {
+    extractorUtils
+      .readFile(docRootPath + ditamapFile)
+      .then(contents => {
+        const mapNode = libxml.parseXmlString(contents).get('//map');
+        extractFromMapNode(mapNode, ditamapFile, docRootPath, parseResult)
+          .then(resolve)
+          .catch(reject);
+      })
+      .catch(reject);
+  });
 
 const extractFromMapNode = (mapNode, ditamapFile, docRootPath, parseResult) => {
-  let promises = [];
+  const promises = [];
 
-  let handleMapNodeChildren = (childNodes, currentTopic) => {
+  const handleMapNodeChildren = (childNodes, currentTopic) => {
     childNodes.forEach(node => {
       switch (node.name()) {
         case 'topicref': {
           if (extractorUtils.hasAttributes(node, 'href')) {
-            if (~node.attr('href').value().indexOf('.ditamap')) {
-              promises.push(extractFromDitamapFile(node.attr('href').value(), docRootPath, parseResult));
+            if (
+              ~node
+                .attr('href')
+                .value()
+                .indexOf('.ditamap')
+            ) {
+              promises.push(
+                extractFromDitamapFile(node.attr('href').value(), docRootPath, parseResult)
+              );
               break;
             }
-            let topic = new Topic(docRootPath, node.attr('href').value());
+            const topic = new Topic(docRootPath, node.attr('href').value());
             if (currentTopic) {
               currentTopic.children.push(topic);
             } else {
               parseResult.topics.push(topic);
             }
-            parseResult.topicIndex[node.attr('href').value().replace(/#.*$/, '')] = topic;
+            parseResult.topicIndex[
+              node
+                .attr('href')
+                .value()
+                .replace(/#.*$/, '')
+            ] = topic;
             handleMapNodeChildren(node.childNodes(), topic);
           } else {
-            console.log('%s: Couldn\'t handle "topicref" node: %s in file %s%s', LOG_NAME,  node.toString(), docRootPath, ditamapFile);
+            console.log(
+              '%s: Couldn\'t handle "topicref" node: %s in file %s%s',
+              LOG_NAME,
+              node.toString(),
+              docRootPath,
+              ditamapFile
+            );
           }
           break;
         }
         case 'mapref': {
           if (extractorUtils.hasAttributes(node, 'href')) {
-            promises.push(extractFromDitamapFile(node.attr('href').value(), docRootPath, parseResult));
+            promises.push(
+              extractFromDitamapFile(node.attr('href').value(), docRootPath, parseResult)
+            );
           } else {
-            console.log('%s: Couldn\'t handle "mapref" node: \n%s in file %s%s', LOG_NAME,  node.toString(), docRootPath, ditamapFile);
+            console.log(
+              '%s: Couldn\'t handle "mapref" node: \n%s in file %s%s',
+              LOG_NAME,
+              node.toString(),
+              docRootPath,
+              ditamapFile
+            );
           }
           break;
         }
         case 'keydef':
           if (extractorUtils.hasAttributes(node, 'keys')) {
-            let valNode = node.get('topicmeta/keywords/keyword');
+            const valNode = node.get('topicmeta/keywords/keyword');
             if (valNode) {
               parseResult.keyDefs[node.attr('keys').value()] = { text: valNode.text() };
             } else if (node.attr('href')) {
@@ -106,7 +141,7 @@ const extractFromMapNode = (mapNode, ditamapFile, docRootPath, parseResult) => {
                 parseResult.keyDefs[node.attr('keys').value()] = {
                   href: node.attr('href').value(),
                   external: node.attr('scope') && node.attr('scope').value() === 'external'
-                }
+                };
               }
             }
           }
@@ -117,9 +152,15 @@ const extractFromMapNode = (mapNode, ditamapFile, docRootPath, parseResult) => {
         case 'topicmeta':
           break;
         default:
-          console.log('%s: Couldn\'t handle map node: \n%s in file %s%s', LOG_NAME,  node.toString(), docRootPath, ditamapFile);
+          console.log(
+            "%s: Couldn't handle map node: \n%s in file %s%s",
+            LOG_NAME,
+            node.toString(),
+            docRootPath,
+            ditamapFile
+          );
       }
-    })
+    });
   };
 
   handleMapNodeChildren(mapNode.childNodes());
@@ -129,4 +170,4 @@ const extractFromMapNode = (mapNode, ditamapFile, docRootPath, parseResult) => {
 
 module.exports = {
   parseDitamap: parseDitamap
-};
+};

+ 25 - 15
tools/sql-docs/docExtractor.js

@@ -28,29 +28,37 @@ const LOG_NAME = 'docExtractor.js';
 program
   .version('1.0')
   .option('-f, --folder [path]', 'the folder where the ditamap file(s) reside (required)')
-  .option('-d, --ditamap [path]', 'comma-separated ditamap file names, the first will define the topic ' +
-    'tree (at least one file is required). Note that there should be no whitespace around the \',\'')
+  .option(
+    '-d, --ditamap [path]',
+    'comma-separated ditamap file names, the first will define the topic ' +
+      "tree (at least one file is required). Note that there should be no whitespace around the ','"
+  )
   .option('-o, --output [path]', 'output folder where the json files will be written to (required)')
   .option('-c, --cssClassPrefix [prefix]', 'optional css class prefix')
-  .option('-m, --mako [path]', 'optional path to a .mako file where the index is written, ' +
-    'used for django if the output folder is a static resource')
+  .option(
+    '-m, --mako [path]',
+    'optional path to a .mako file where the index is written, ' +
+      'used for django if the output folder is a static resource'
+  )
   .parse(process.argv);
 
 extractorUtils.checkArguments(program);
 
-const ensureTrailingSlash = (path) => {
+const ensureTrailingSlash = path => {
   if (!path.endsWith('/')) {
     return path + '/';
   }
   return path;
 };
 
-let ditamapFiles = program.ditamap.split(',').map(file => file.trim());
+const ditamapFiles = program.ditamap.split(',').map(file => file.trim());
 
 console.log('%s: Parsing ditamap file(s)...', LOG_NAME);
-let ditamapParsePromises = ditamapFiles.map(ditamapFile => ditamapParser.parseDitamap(ditamapFile, ensureTrailingSlash(program.folder)));
+const ditamapParsePromises = ditamapFiles.map(ditamapFile =>
+  ditamapParser.parseDitamap(ditamapFile, ensureTrailingSlash(program.folder))
+);
 
-Promise.all(ditamapParsePromises).then((parseResults) => {
+Promise.all(ditamapParsePromises).then(parseResults => {
   let cssClassPrefix = program.cssClassPrefix || '';
   if (cssClassPrefix && !/-$/.test(cssClassPrefix)) {
     cssClassPrefix += '-';
@@ -62,12 +70,14 @@ Promise.all(ditamapParsePromises).then((parseResults) => {
     topicLinker.linkTopics(parseResults, cssClassPrefix);
 
     console.log('%s: Saving topic tree json files...', LOG_NAME);
-    jsonHandler.saveTopics(parseResults[0].topics, ensureTrailingSlash(program.output), program.mako).then((savedFiles) => {
-      console.log('%s: Done! Saved %d files.', LOG_NAME, savedFiles.length);
-    }).catch(err => {
-      console.log('%s: Failed saving files!', LOG_NAME);
-      console.log(err);
-    });
+    jsonHandler
+      .saveTopics(parseResults[0].topics, ensureTrailingSlash(program.output), program.mako)
+      .then(savedFiles => {
+        console.log('%s: Done! Saved %d files.', LOG_NAME, savedFiles.length);
+      })
+      .catch(err => {
+        console.log('%s: Failed saving files!', LOG_NAME);
+        console.log(err);
+      });
   });
 });
-

+ 127 - 78
tools/sql-docs/docXmlParser.js

@@ -21,8 +21,10 @@ const libxml = require('libxmljs');
 
 const LOG_NAME = 'docXmlParser.js';
 
-const isHidden = (docElement) => docElement.attr('audience') &&
-  (docElement.attr('audience').value() === 'hidden' || docElement.attr('audience').value() === 'PDF');
+const isHidden = docElement =>
+  docElement.attr('audience') &&
+  (docElement.attr('audience').value() === 'hidden' ||
+    docElement.attr('audience').value() === 'PDF');
 
 // Turn relative anchor or topic links into absolute
 const makeAbsoluteRef = (href, topic) => {
@@ -45,23 +47,35 @@ const makeAbsoluteRef = (href, topic) => {
 
 const parseTopic = (topic, cssClassPrefix, conrefCallback) => {
   return new Promise((resolve, reject) => {
-    extractorUtils.readFile(topic.docRootPath + (~topic.ref.indexOf('#') ? topic.ref.replace(/#.*$/, '') : topic.ref)).then(contents => {
-      let xmlDoc = libxml.parseXmlString(contents);
-      let docElement = xmlDoc.root();
-      if (~topic.ref.indexOf('#')) {
-        docElement = docElement.get('//*[@id=\'' + topic.ref.replace(/^.*#/, '') + '\']')
-      }
-      parseDocElement(docElement, topic.domXml, cssClassPrefix, topic, undefined, conrefCallback);
+    extractorUtils
+      .readFile(
+        topic.docRootPath + (~topic.ref.indexOf('#') ? topic.ref.replace(/#.*$/, '') : topic.ref)
+      )
+      .then(contents => {
+        const xmlDoc = libxml.parseXmlString(contents);
+        let docElement = xmlDoc.root();
+        if (~topic.ref.indexOf('#')) {
+          docElement = docElement.get("//*[@id='" + topic.ref.replace(/^.*#/, '') + "']");
+        }
+        parseDocElement(docElement, topic.domXml, cssClassPrefix, topic, undefined, conrefCallback);
 
-      resolve();
-    }).catch(reject);
-  })
+        resolve();
+      })
+      .catch(reject);
+  });
 };
 
-const parseDocElement = (docElement, domElement, cssClassPrefix, topic, activeFragment, conrefCallback) => {
+const parseDocElement = (
+  docElement,
+  domElement,
+  cssClassPrefix,
+  topic,
+  activeFragment,
+  conrefCallback
+) => {
   // return in the switch stops the recursion at this node
   if (extractorUtils.hasAttributes(docElement, 'conref')) {
-    let absoluteConRef = makeAbsoluteRef(docElement.attr('conref').value(), topic);
+    const absoluteConRef = makeAbsoluteRef(docElement.attr('conref').value(), topic);
     docElement.attr('conref', absoluteConRef);
     conrefCallback(topic, absoluteConRef.replace(/#.*$/, ''));
   }
@@ -80,11 +94,11 @@ const parseDocElement = (docElement, domElement, cssClassPrefix, topic, activeFr
     case 'colspec':
     case 'dlentry':
       if (extractorUtils.hasAttributes(docElement, 'id')) {
-        let id = docElement.attr('id') && docElement.attr('id').value();
+        const id = docElement.attr('id') && docElement.attr('id').value();
         // Move id attribute to first child element
-        for (let node of docElement.childNodes()) {
+        for (const node of docElement.childNodes()) {
           if (node.type() === 'element') {
-            node.attr({'id': id});
+            node.attr({ id: id });
             break;
           }
         }
@@ -124,7 +138,7 @@ const parseDocElement = (docElement, domElement, cssClassPrefix, topic, activeFr
       break;
     case 'sthead':
       domElement = domElement.node('tr');
-      domElement.attr({ 'class': cssClassPrefix + 'doc-sthead' });
+      domElement.attr({ class: cssClassPrefix + 'doc-sthead' });
       break;
     case 'stentry':
       domElement = domElement.node('td');
@@ -137,15 +151,28 @@ const parseDocElement = (docElement, domElement, cssClassPrefix, topic, activeFr
       domElement = domElement.node('tr');
       break;
     case 'entry':
-      if (docElement.parent().name().toLowerCase() === 'row') {
+      if (
+        docElement
+          .parent()
+          .name()
+          .toLowerCase() === 'row'
+      ) {
         domElement = domElement.node('td');
       } else {
-        console.log('%s: Got "entry" element without a parent "row": %s in ref %s', LOG_NAME, docElement.toString(), topic.ref);
+        console.log(
+          '%s: Got "entry" element without a parent "row": %s in ref %s',
+          LOG_NAME,
+          docElement.toString(),
+          topic.ref
+        );
         return;
       }
       break;
     case 'xref':
-      if (extractorUtils.hasAttributes(docElement, 'href') && (!docElement.attr('scope') || docElement.attr('scope').value() !== 'external')) {
+      if (
+        extractorUtils.hasAttributes(docElement, 'href') &&
+        (!docElement.attr('scope') || docElement.attr('scope').value() !== 'external')
+      ) {
         docElement.attr('href', makeAbsoluteRef(docElement.attr('href').value(), topic));
       }
     case 'image':
@@ -154,21 +181,29 @@ const parseDocElement = (docElement, domElement, cssClassPrefix, topic, activeFr
       // These elements are dealt with later, we don't deep clone as there might be child elements to parse
       domElement = domElement.node(docElement.name());
       docElement.attrs().forEach(attr => {
-        domElement.attr(attr.name(), attr.value())
+        domElement.attr(attr.name(), attr.value());
       });
       break;
     case 'object':
       if (extractorUtils.hasAttributes(docElement, ['data', 'outputclass'])) {
         domElement = domElement.node('iframe');
-        domElement.attr({ 'class': cssClassPrefix + 'doc-iframe', 'src': docElement.attr('data').value() });
+        domElement.attr({
+          class: cssClassPrefix + 'doc-iframe',
+          src: docElement.attr('data').value()
+        });
         if (extractorUtils.hasAttributes(docElement, 'width')) {
-          domElement.attr({ 'width': docElement.attr('width').value() });
+          domElement.attr({ width: docElement.attr('width').value() });
         }
         if (extractorUtils.hasAttributes(docElement, 'height')) {
-          domElement.attr({ 'height': docElement.attr('height').value() });
+          domElement.attr({ height: docElement.attr('height').value() });
         }
       } else {
-        console.log('%s: Got "object" element without data and outputclass: %s in ref %s', LOG_NAME, docElement.toString(), topic.ref);
+        console.log(
+          '%s: Got "object" element without data and outputclass: %s in ref %s',
+          LOG_NAME,
+          docElement.toString(),
+          topic.ref
+        );
         return;
       }
       break;
@@ -189,7 +224,7 @@ const parseDocElement = (docElement, domElement, cssClassPrefix, topic, activeFr
         return;
       }
       domElement = domElement.node('span');
-      domElement.attr({ 'class': cssClassPrefix + 'doc-' + docElement.name() });
+      domElement.attr({ class: cssClassPrefix + 'doc-' + docElement.name() });
       break;
     case 'codeblock':
     case 'conbodydiv':
@@ -206,16 +241,18 @@ const parseDocElement = (docElement, domElement, cssClassPrefix, topic, activeFr
         return;
       }
       domElement = domElement.node('div');
-      domElement.attr({ 'class': cssClassPrefix + 'doc-' + docElement.name() });
+      domElement.attr({ class: cssClassPrefix + 'doc-' + docElement.name() });
       if (docElement.name() === 'title' && activeFragment && !activeFragment.title) {
         activeFragment.title = domElement;
       }
       break;
     case 'text':
       if (docElement.text().trim()) {
-        let firstInDiv = domElement.name() === 'div' && domElement.childNodes().length === 0;
+        const firstInDiv = domElement.name() === 'div' && domElement.childNodes().length === 0;
         domElement = domElement.node('text');
-        domElement.replace(firstInDiv ? docElement.text().replace(/^[\n\r]*/, '') : docElement.text());
+        domElement.replace(
+          firstInDiv ? docElement.text().replace(/^[\n\r]*/, '') : docElement.text()
+        );
       }
       break;
     case 'abstract':
@@ -236,26 +273,28 @@ const parseDocElement = (docElement, domElement, cssClassPrefix, topic, activeFr
     case undefined:
       if (/^<\!\[cdata.*/i.test(docElement.toString())) {
         if (docElement.text().trim()) {
-          let firstInDiv = domElement.name() === 'div' && domElement.childNodes().length === 0;
+          const firstInDiv = domElement.name() === 'div' && domElement.childNodes().length === 0;
           domElement = domElement.node('text');
-          domElement.replace(firstInDiv ? docElement.text().replace(/^[\n\r]*/, '') : docElement.text());
+          domElement.replace(
+            firstInDiv ? docElement.text().replace(/^[\n\r]*/, '') : docElement.text()
+          );
         }
         break;
       }
     default:
-      console.log('%s: Can\'t handle node: %s in ref %s', LOG_NAME, docElement.name(), topic.ref);
+      console.log("%s: Can't handle node: %s in ref %s", LOG_NAME, docElement.name(), topic.ref);
       return;
   }
 
   if (isHidden(docElement)) {
-    domElement.attr({ 'style': 'display:none;' });
+    domElement.attr({ style: 'display:none;' });
   }
 
   if (extractorUtils.hasAttributes(docElement, 'id')) {
-    let fragmentId = docElement.attr('id') && docElement.attr('id').value();
-    let newFragment = new DocFragment(fragmentId, domElement);
+    const fragmentId = docElement.attr('id') && docElement.attr('id').value();
+    const newFragment = new DocFragment(fragmentId, domElement);
     if (!extractorUtils.hasAttributes(domElement, 'id') && domElement.type() === 'element') {
-      domElement.attr({'id': fragmentId});
+      domElement.attr({ id: fragmentId });
     }
     if (!topic.fragment) {
       topic.fragment = newFragment;
@@ -265,10 +304,17 @@ const parseDocElement = (docElement, domElement, cssClassPrefix, topic, activeFr
     activeFragment = newFragment;
   }
 
-  if (extractorUtils.hasAttributes(docElement, 'conref') && !extractorUtils.hasAttributes(domElement, 'conref')) {
+  if (
+    extractorUtils.hasAttributes(docElement, 'conref') &&
+    !extractorUtils.hasAttributes(domElement, 'conref')
+  ) {
     domElement.attr('conref', docElement.attr('conref').value());
   }
-  docElement.childNodes().forEach(childNode => parseDocElement(childNode, domElement, cssClassPrefix, topic, activeFragment, conrefCallback));
+  docElement
+    .childNodes()
+    .forEach(childNode =>
+      parseDocElement(childNode, domElement, cssClassPrefix, topic, activeFragment, conrefCallback)
+    );
 };
 
 /**
@@ -279,51 +325,54 @@ const parseDocElement = (docElement, domElement, cssClassPrefix, topic, activeFr
  * @param cssClassPrefix
  * @return {Promise}
  */
-const parseTopics = (parseResults, cssClassPrefix) => new Promise((resolve, reject) => {
-  let topicIndex = {};
-  let topicsToParse = [];
+const parseTopics = (parseResults, cssClassPrefix) =>
+  new Promise((resolve, reject) => {
+    const topicIndex = {};
+    const topicsToParse = [];
 
-  let populateTopicsFromTree = topics => {
-    topics.forEach(topic => {
-      topicsToParse.push(topic);
-      topicIndex[topic.ref] = true;
-      populateTopicsFromTree(topic.children);
-    })
-  };
+    const populateTopicsFromTree = topics => {
+      topics.forEach(topic => {
+        topicsToParse.push(topic);
+        topicIndex[topic.ref] = true;
+        populateTopicsFromTree(topic.children);
+      });
+    };
 
-  // Topics might be referenced from within .xml files thar are not part of the ditamap, we add them here to make
-  // sure they're parsed
-  let conrefCallback = (sourceTopic, ref) => {
-    if (!topicIndex[ref]) {
-      let topic = new Topic(sourceTopic.docRootPath, ref);
-      topicIndex[ref] = true;
-      topicsToParse.push(topic);
-      if (parseResults.length < 2) {
-        // We add additional topics to any ditamap parseresults except the first one, this prevents
-        // them from being part of the tree.
-        parseResults.push({
-          topics: [],
-          topicIndex: {},
-          keyDefs: {}
-        })
+    // Topics might be referenced from within .xml files thar are not part of the ditamap, we add them here to make
+    // sure they're parsed
+    const conrefCallback = (sourceTopic, ref) => {
+      if (!topicIndex[ref]) {
+        const topic = new Topic(sourceTopic.docRootPath, ref);
+        topicIndex[ref] = true;
+        topicsToParse.push(topic);
+        if (parseResults.length < 2) {
+          // We add additional topics to any ditamap parseresults except the first one, this prevents
+          // them from being part of the tree.
+          parseResults.push({
+            topics: [],
+            topicIndex: {},
+            keyDefs: {}
+          });
+        }
+        parseResults[parseResults.length - 1].topicIndex[ref] = topic;
       }
-      parseResults[parseResults.length - 1].topicIndex[ref] = topic;
-    }
-  };
+    };
 
-  parseResults.forEach(parseResult => populateTopicsFromTree(parseResult.topics));
+    parseResults.forEach(parseResult => populateTopicsFromTree(parseResult.topics));
 
-  let parseNextTopic = () => {
-    if (topicsToParse.length) {
-      parseTopic(topicsToParse.shift(), cssClassPrefix, conrefCallback).then(parseNextTopic).catch(reject);
-    } else {
-      resolve();
-    }
-  };
-  parseNextTopic();
-});
+    const parseNextTopic = () => {
+      if (topicsToParse.length) {
+        parseTopic(topicsToParse.shift(), cssClassPrefix, conrefCallback)
+          .then(parseNextTopic)
+          .catch(reject);
+      } else {
+        resolve();
+      }
+    };
+    parseNextTopic();
+  });
 
 module.exports = {
   parseTopics: parseTopics,
   isHidden: isHidden
-};
+};

+ 28 - 21
tools/sql-docs/extractorUtils.js

@@ -15,7 +15,6 @@
 // limitations under the License.
 
 const fs = require('fs');
-const util = require('util');
 
 const LOG_NAME = 'extractorUtils.js';
 
@@ -25,17 +24,18 @@ const LOG_NAME = 'extractorUtils.js';
  * @param {string} path
  * @return {Promise} - A promise, fulfilled with the file contents or rejected
  */
-const readFile = path => new Promise((resolve, reject) => {
-  fs.readFile(path, 'utf8', (err, contents) => {
-    if (err) {
-      console.log('%s: Could not read file \'%s\'', LOG_NAME, path);
-      console.log(err);
-      reject(err)
-    } else {
-      resolve(contents);
-    }
-  })
-});
+const readFile = path =>
+  new Promise((resolve, reject) => {
+    fs.readFile(path, 'utf8', (err, contents) => {
+      if (err) {
+        console.log("%s: Could not read file '%s'", LOG_NAME, path);
+        console.log(err);
+        reject(err);
+      } else {
+        resolve(contents);
+      }
+    });
+  });
 
 /**
  * Returns the parent folder of a file path
@@ -78,7 +78,14 @@ const hasAttributes = (node, attributes) => {
   if (typeof attributes === 'string') {
     attributes = [attributes];
   }
-  return attributes.every(attribute => node.attr(attribute) && node.attr(attribute).value().trim())
+  return attributes.every(
+    attribute =>
+      node.attr(attribute) &&
+      node
+        .attr(attribute)
+        .value()
+        .trim()
+  );
 };
 
 /**
@@ -89,7 +96,7 @@ const hasAttributes = (node, attributes) => {
 const removeAllAttributes = node => {
   node.attrs().forEach(attr => {
     attr.remove();
-  })
+  });
 };
 
 /**
@@ -104,9 +111,9 @@ const findFragmentInTopic = (topic, anchorId) => {
   if (!anchorId) {
     return topic.fragment;
   }
-  let splitIds = anchorId.split('/');
+  const splitIds = anchorId.split('/');
 
-  let findDeep = (fragments, id) => {
+  const findDeep = (fragments, id) => {
     let foundFragment = undefined;
     fragments.some(fragment => {
       if (fragment.id === id) {
@@ -119,7 +126,7 @@ const findFragmentInTopic = (topic, anchorId) => {
     return foundFragment;
   };
 
-  let fragmentsToSearch = [ topic.fragment ];
+  let fragmentsToSearch = [topic.fragment];
 
   let result = undefined;
   while (splitIds.length) {
@@ -130,7 +137,7 @@ const findFragmentInTopic = (topic, anchorId) => {
     fragmentsToSearch = result.children;
   }
   if (!result) {
-    console.log('%s: Could not find id \'%s\' in ref \'%s\'', LOG_NAME, anchorId, topic.ref);
+    console.log("%s: Could not find id '%s' in ref '%s'", LOG_NAME, anchorId, topic.ref);
     return topic.fragment;
   }
   return result;
@@ -150,9 +157,9 @@ const findFragmentInTopic = (topic, anchorId) => {
  * @return {FragmentSearchResult}
  */
 const findFragment = (parseResults, ref, anchorId) => {
-  let result = { partOfTree: true, fragment: undefined };
+  const result = { partOfTree: true, fragment: undefined };
   parseResults.some(parseResult => {
-    let topic = parseResult.topicIndex[ref];
+    const topic = parseResult.topicIndex[ref];
     if (topic) {
       result.fragment = findFragmentInTopic(topic, anchorId);
     } else {
@@ -170,4 +177,4 @@ module.exports = {
   hasAttributes: hasAttributes,
   removeAllAttributes: removeAllAttributes,
   findFragment: findFragment
-};
+};

+ 60 - 46
tools/sql-docs/hiveExtractor.js

@@ -14,9 +14,6 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-const fs = require('fs');
-const mkdirp = require('mkdirp');
-const path = require('path');
 const program = require('commander');
 
 const EPub = require('epub');
@@ -52,7 +49,13 @@ const convertToPre = (element, fragments) => {
       }
       break;
     case 'code':
-      if (element.attr('class') && element.attr('class').value().indexOf('value') !== -1) {
+      if (
+        element.attr('class') &&
+        element
+          .attr('class')
+          .value()
+          .indexOf('value') !== -1
+      ) {
         fragments.push('<span class="hue-doc-varname">');
         element.childNodes().forEach(node => {
           convertToPre(node, fragments);
@@ -67,14 +70,20 @@ const convertToPre = (element, fragments) => {
   }
 };
 
-const adaptElement = (element) => {
-  if (element.attr('class') && element.attr('class').value().indexOf('syntaxhighlighter') !== -1) {
-    let fragments = ['<div class="hue-doc-codeblock">'];
+const adaptElement = element => {
+  if (
+    element.attr('class') &&
+    element
+      .attr('class')
+      .value()
+      .indexOf('syntaxhighlighter') !== -1
+  ) {
+    const fragments = ['<div class="hue-doc-codeblock">'];
     element.childNodes().forEach(childNode => {
       convertToPre(childNode, fragments);
     });
     fragments.push('</div>');
-    let replacement = fragments.join('');
+    const replacement = fragments.join('');
     element.replace(libxml.parseHtmlFragment(replacement).root());
   } else if (element.attr('class')) {
     element.attr('class').remove();
@@ -82,54 +91,59 @@ const adaptElement = (element) => {
   element.childNodes().forEach(adaptElement);
 };
 
-epub.on("end", function(){
-  let savePromises = [];
+epub.on('end', () => {
+  const rootTopics = [];
 
-  let rootTopics = [];
-  let topicStack = [];
+  const lastTopicPerLevel = {};
 
-  let lastTopicPerLevel = {};
-
-  let promises = [];
+  const promises = [];
 
   epub.flow.forEach(chapter => {
-    promises.push(new Promise((resolve, reject) => {
-      let topic = new Topic('/', chapter.id);
-      topic.fragment = {
-        title : {
-          text: () => chapter.title.replace(/LanguageManual\s(.+)/, '$1')
-        }
-      };
-
-      epub.getChapter(chapter.id, (error, text) => {
-        try {
-          let contents = libxml.parseHtmlFragment('<div>' + text + '</div>');
-          topic.domXml = contents.root();
-          adaptElement(topic.domXml);
-          resolve();
-        } catch (error) {
-          reject();
-        }
-      });
+    promises.push(
+      new Promise((resolve, reject) => {
+        const topic = new Topic('/', chapter.id);
+        topic.fragment = {
+          title: {
+            text: () => chapter.title.replace(/LanguageManual\s(.+)/, '$1')
+          }
+        };
+
+        epub.getChapter(chapter.id, (error, text) => {
+          try {
+            const contents = libxml.parseHtmlFragment('<div>' + text + '</div>');
+            topic.domXml = contents.root();
+            adaptElement(topic.domXml);
+            resolve();
+          } catch (error) {
+            reject();
+          }
+        });
 
-      if (lastTopicPerLevel[chapter.level - 1]) {
-        lastTopicPerLevel[chapter.level - 1].children.push(topic);
-      }
+        if (lastTopicPerLevel[chapter.level - 1]) {
+          lastTopicPerLevel[chapter.level - 1].children.push(topic);
+        }
 
-      if (chapter.level === 0) {
-        rootTopics.push(topic);
-      }
+        if (chapter.level === 0) {
+          rootTopics.push(topic);
+        }
 
-      lastTopicPerLevel[chapter.level] = topic;
-    }));
+        lastTopicPerLevel[chapter.level] = topic;
+      })
+    );
   });
 
   Promise.all(promises).then(() => {
-    jsonHandler.saveTopics(rootTopics, outputPath, mako, false).then(() => {
-      console.log('Done.');
-    }).catch(() => {
-      console.log('Fail.');
-    });
+    const rootTopic = rootTopics[0];
+    rootTopic.children.forEach(childTopic => {});
+
+    jsonHandler
+      .saveTopics(rootTopics, outputPath, mako, false)
+      .then(() => {
+        console.log('Done.');
+      })
+      .catch(() => {
+        console.log('Fail.');
+      });
   });
 });
 

+ 70 - 49
tools/sql-docs/jsonHandler.js

@@ -29,19 +29,22 @@ const LOG_NAME = 'jsonHandler.js';
  * @return {Promise}
  */
 const saveTopics = (topics, outputPath, makoPath, isImpala) => {
-  let index = {};
-  let topLevel = [];
-  let savePromises = [];
+  const index = {};
+  const topLevel = [];
+  const savePromises = [];
 
-  let saveTopicsInternal = (topics, parent) => {
+  const saveTopicsInternal = (topics, parent) => {
     topics.forEach(topic => {
-      let entry = {
-        title: topic.fragment.title.text().replace(/[\n\r]/g, '').trim(),
+      const entry = {
+        title: topic.fragment.title
+          .text()
+          .replace(/[\n\r]/g, '')
+          .trim(),
         ref: topic.ref,
         children: []
       };
       if (!parent) {
-        topLevel.push(entry)
+        topLevel.push(entry);
       } else {
         parent.children.push(entry);
       }
@@ -52,22 +55,24 @@ const saveTopics = (topics, outputPath, makoPath, isImpala) => {
       }
       index[topic.ref] = fileName;
 
-      let filePath = outputPath + fileName;
-      savePromises.push(new Promise((resolve, reject) => {
-        mkdirp(path.dirname(filePath), err => {
-          if (!err) {
-            fs.writeFile(filePath, topic.toJson(), err => {
-              if (err) {
-                reject(err);
-              } else {
-                resolve();
-              }
-            });
-          } else {
-            reject(err);
-          }
-        });
-      }));
+      const filePath = outputPath + fileName;
+      savePromises.push(
+        new Promise((resolve, reject) => {
+          mkdirp(path.dirname(filePath), err => {
+            if (!err) {
+              fs.writeFile(filePath, topic.toJson(), err => {
+                if (err) {
+                  reject(err);
+                } else {
+                  resolve();
+                }
+              });
+            } else {
+              reject(err);
+            }
+          });
+        })
+      );
       saveTopicsInternal(topic.children, entry);
     });
   };
@@ -76,14 +81,14 @@ const saveTopics = (topics, outputPath, makoPath, isImpala) => {
 
   const indexTypes = {
     impala: {
-      staticPrefix: '\':\'${ static(\'desktop/docs/impala/',
+      staticPrefix: "':'${ static('desktop/docs/impala/",
       docIndexRegex: /window\.IMPALA_DOC_INDEX.*\n/,
       docIndexPrefix: 'window.IMPALA_DOC_INDEX = {',
       topLevelRegex: /window\.IMPALA_DOC_TOP_LEVEL.*\n/,
       topLevelPrefix: 'window.IMPALA_DOC_TOP_LEVEL = ['
     },
     hive: {
-      staticPrefix: '\':\'${ static(\'desktop/docs/hive/',
+      staticPrefix: "':'${ static('desktop/docs/hive/",
       docIndexRegex: /window\.HIVE_DOC_INDEX.*\n/,
       docIndexPrefix: 'window.HIVE_DOC_INDEX = {',
       topLevelRegex: /window\.HIVE_DOC_TOP_LEVEL.*\n/,
@@ -92,34 +97,50 @@ const saveTopics = (topics, outputPath, makoPath, isImpala) => {
   };
 
   if (makoPath) {
-    let indexType = isImpala ? indexTypes.impala : indexTypes.hive;
-    savePromises.push(new Promise((resolve, reject) => {
-      fs.readFile(makoPath, 'utf-8', (err, contents) => {
-        if (err) {
-          reject(err);
-          return;
-        }
-        let indexStrings = [];
-        Object.keys(index).forEach(key => {
-          indexStrings.push('\'' + key + indexType.staticPrefix + index[key] + '\') }\'')
-        });
-        contents = contents.replace(indexType.docIndexRegex, indexType.docIndexPrefix + indexStrings.join(',') + '};\n');
-
-        let createTopicJs = (entry) => {
-          return '{title:\'' + entry.title +'\',ref:\'' + entry.ref + '\',children:[' + entry.children.map(createTopicJs).join(',') + ']}';
-        };
-
-        contents = contents.replace(indexType.topLevelRegex, indexType.topLevelPrefix + topLevel.map(createTopicJs).join(',') + '];\n');
-        fs.writeFile(makoPath.replace('.template', ''), contents, (err) => {
+    const indexType = isImpala ? indexTypes.impala : indexTypes.hive;
+    savePromises.push(
+      new Promise((resolve, reject) => {
+        fs.readFile(makoPath, 'utf-8', (err, contents) => {
           if (err) {
             reject(err);
             return;
           }
-          console.log('%s: %s written.', LOG_NAME, makoPath.replace('.template', ''));
-          resolve();
-        })
-      });
-    }));
+          const indexStrings = [];
+          Object.keys(index).forEach(key => {
+            indexStrings.push("'" + key + indexType.staticPrefix + index[key] + "') }'");
+          });
+          contents = contents.replace(
+            indexType.docIndexRegex,
+            indexType.docIndexPrefix + indexStrings.join(',') + '};\n'
+          );
+
+          const createTopicJs = entry => {
+            return (
+              "{title:'" +
+              entry.title +
+              "',ref:'" +
+              entry.ref +
+              "',children:[" +
+              entry.children.map(createTopicJs).join(',') +
+              ']}'
+            );
+          };
+
+          contents = contents.replace(
+            indexType.topLevelRegex,
+            indexType.topLevelPrefix + topLevel.map(createTopicJs).join(',') + '];\n'
+          );
+          fs.writeFile(makoPath.replace('.template', ''), contents, err => {
+            if (err) {
+              reject(err);
+              return;
+            }
+            console.log('%s: %s written.', LOG_NAME, makoPath.replace('.template', ''));
+            resolve();
+          });
+        });
+      })
+    );
   }
 
   return Promise.all(savePromises);

+ 51 - 30
tools/sql-docs/topicLinker.js

@@ -29,7 +29,7 @@ const LOG_NAME = 'topicLinker.js';
 const replaceWithExternalLink = (node, href, cssClassPrefix) => {
   node.name('a');
   extractorUtils.removeAllAttributes(node);
-  node.attr({ 'class': cssClassPrefix + 'doc-external-link', 'href': href, 'target': '_blank' });
+  node.attr({ class: cssClassPrefix + 'doc-external-link', href: href, target: '_blank' });
 };
 
 /**
@@ -42,20 +42,24 @@ const replaceWithExternalLink = (node, href, cssClassPrefix) => {
  * @param {string} cssClassPrefix
  */
 const replaceWithInternalLink = (node, ref, anchorId, parseResults, cssClassPrefix) => {
-  let fullRef = ref + (anchorId ? '#' + anchorId : '');
+  const fullRef = ref + (anchorId ? '#' + anchorId : '');
 
-  let fragmentSearchResult = extractorUtils.findFragment(parseResults, ref, anchorId);
+  const fragmentSearchResult = extractorUtils.findFragment(parseResults, ref, anchorId);
   if (fragmentSearchResult.fragment && fragmentSearchResult.partOfTree) {
     // Here the topic is parsed and it's part of the main topic tree
     node.name('a');
     extractorUtils.removeAllAttributes(node);
-    node.attr({ 'class': cssClassPrefix + 'doc-internal-link', 'href': 'javascript:void(0);', 'data-doc-ref': ref });
+    node.attr({
+      class: cssClassPrefix + 'doc-internal-link',
+      href: 'javascript:void(0);',
+      'data-doc-ref': ref
+    });
     if (anchorId) {
       node.attr('data-doc-anchor-id', anchorId);
     }
   } else {
     // Here the topic is unknown or not part of the main topic tree so we make an external link instead
-    let href = EXTERNAL_DOC_URL_PREFIX + fullRef.replace('.xml', '.html');
+    const href = EXTERNAL_DOC_URL_PREFIX + fullRef.replace('.xml', '.html');
     replaceWithExternalLink(node, href, cssClassPrefix);
   }
 
@@ -83,7 +87,7 @@ const handleKeywordNode = (node, parseResults, cssClassPrefix) => {
     return;
   }
 
-  let keyRef = node.attr('keyref').value();
+  const keyRef = node.attr('keyref').value();
   node.attr('keyref').remove();
 
   let keyDef = undefined;
@@ -122,14 +126,19 @@ const handleKeywordNode = (node, parseResults, cssClassPrefix) => {
       }
     } else {
       if (keyDef.href.indexOf('scalability_file_handle_cache') !== -1) {
-
       }
-      replaceWithInternalLink(node, keyDef.href.replace(/#.*$/, ''), keyRef, parseResults, cssClassPrefix);
+      replaceWithInternalLink(
+        node,
+        keyDef.href.replace(/#.*$/, ''),
+        keyRef,
+        parseResults,
+        cssClassPrefix
+      );
     }
     return;
   }
 
-  throw new Error('Failed handling keyword node.')
+  throw new Error('Failed handling keyword node.');
 };
 
 /**
@@ -139,20 +148,20 @@ const handleKeywordNode = (node, parseResults, cssClassPrefix) => {
  * @param {DitamapParseResult[]} parseResults
  * @param {string} cssClassPrefix
  */
-let handkeXrefNode = (node, parseResults, cssClassPrefix) => {
+const handkeXrefNode = (node, parseResults, cssClassPrefix) => {
   if (extractorUtils.hasAttributes(node, 'href')) {
-    let href = node.attr('href').value();
+    const href = node.attr('href').value();
 
     if (node.attr('scope') && node.attr('scope').value() === 'external') {
       replaceWithExternalLink(node, href, cssClassPrefix);
       if (!node.text()) {
-        node.text(href)
+        node.text(href);
       }
       return;
     }
 
-    let ref = ~href.indexOf('#') ? href.replace(/#.*$/, '') : href;
-    let anchorId = ~href.indexOf('#') && href.replace(/^.*#/, '');
+    const ref = ~href.indexOf('#') ? href.replace(/#.*$/, '') : href;
+    const anchorId = ~href.indexOf('#') && href.replace(/^.*#/, '');
     replaceWithInternalLink(node, ref, anchorId, parseResults, cssClassPrefix);
   }
   if (extractorUtils.hasAttributes(node, 'keyref')) {
@@ -169,14 +178,14 @@ let handkeXrefNode = (node, parseResults, cssClassPrefix) => {
 const handleTocNode = (topic, node) => {
   if (topic.children.length) {
     node.name('div');
-    let header = node.node('div');
+    const header = node.node('div');
     header.text('Continue reading:');
-    let ul = node.node('ul');
+    const ul = node.node('ul');
     topic.children.forEach(childTopic => {
-      let li = ul.node('li');
-      let xrefNode = li.node('xref');
+      const li = ul.node('li');
+      const xrefNode = li.node('xref');
       xrefNode.attr('href', childTopic.ref);
-    })
+    });
   } else {
     node.remove();
   }
@@ -210,7 +219,11 @@ const linkNodesInDomXml = (node, parseResults, cssClassPrefix, foundCssClasses)
   if (extractorUtils.hasAttributes(node, 'class')) {
     foundCssClasses[node.attr('class').value()] = true;
   }
-  node.childNodes().forEach(childNode => linkNodesInDomXml(childNode, parseResults, cssClassPrefix, foundCssClasses));
+  node
+    .childNodes()
+    .forEach(childNode =>
+      linkNodesInDomXml(childNode, parseResults, cssClassPrefix, foundCssClasses)
+    );
   return foundCssClasses;
 };
 
@@ -226,9 +239,13 @@ const insertConrefsAndToc = (topic, node, parseResults) => {
     handleTocNode(topic, node);
   }
   if (extractorUtils.hasAttributes(node, 'conref')) {
-    let conref = node.attr('conref').value();
-    var splitRef = conref.split('#');
-    let fragmentSearchResult = extractorUtils.findFragment(parseResults, splitRef[0], splitRef[1]);
+    const conref = node.attr('conref').value();
+    const splitRef = conref.split('#');
+    const fragmentSearchResult = extractorUtils.findFragment(
+      parseResults,
+      splitRef[0],
+      splitRef[1]
+    );
     if (!fragmentSearchResult.fragment) {
       console.log(node.toString());
       console.log('%s: Could not find fragment for conref: %s', LOG_NAME, conref);
@@ -250,12 +267,14 @@ const insertConrefsAndToc = (topic, node, parseResults) => {
  * @param {String} cssClassPrefix
  * @param {Object} foundCssClasses
  */
-let linkNodesInTopic = (topic, parseResults, cssClassPrefix, foundCssClasses) => {
+const linkNodesInTopic = (topic, parseResults, cssClassPrefix, foundCssClasses) => {
   // First insert all the conrefs and topics
   insertConrefsAndToc(topic, topic.domXml, parseResults);
   // Then deal with xrefs, keywords etc.
   linkNodesInDomXml(topic.domXml, parseResults, cssClassPrefix, foundCssClasses);
-  topic.children.forEach(childTopic => linkNodesInTopic(childTopic, parseResults, cssClassPrefix, foundCssClasses));
+  topic.children.forEach(childTopic =>
+    linkNodesInTopic(childTopic, parseResults, cssClassPrefix, foundCssClasses)
+  );
 };
 
 /**
@@ -265,13 +284,15 @@ let linkNodesInTopic = (topic, parseResults, cssClassPrefix, foundCssClasses) =>
  * @param cssClassPrefix
  */
 const linkTopics = (parseResults, cssClassPrefix) => {
-  let foundCssClasses = {};
-  parseResults.forEach(parseResult => parseResult.topics.forEach(topic => {
-    linkNodesInTopic(topic, parseResults, cssClassPrefix, foundCssClasses);
-  }));
+  const foundCssClasses = {};
+  parseResults.forEach(parseResult =>
+    parseResult.topics.forEach(topic => {
+      linkNodesInTopic(topic, parseResults, cssClassPrefix, foundCssClasses);
+    })
+  );
   console.log('%s: Found CSS classes: %s', LOG_NAME, Object.keys(foundCssClasses).join(','));
 };
 
 module.exports = {
   linkTopics: linkTopics
-};
+};