Quellcode durchsuchen

HUE-4003 [editor] Add parser support for HDFS path completion

Johan Ahlen vor 9 Jahren
Ursprung
Commit
51a46c5

+ 172 - 0
desktop/core/src/desktop/static/desktop/js/autocomplete/sql.jison

@@ -16,6 +16,8 @@
 
 %lex
 %options case-insensitive
+%s hive impala
+%x hdfs
 %%
 
 [ \t\n]                             { /* skip whitespace */ }
@@ -26,9 +28,18 @@
 '|PARTIAL_CURSOR|'                  { parser.yy.cursorFound = true; return 'PARTIAL_CURSOR'; }
 
 'AND'                               { return 'AND'; }
+'BIGINT'                            { return 'BIGINT'; }
+'BOOLEAN'                           { return 'BOOLEAN'; }
 'BY'                                { return 'BY'; }
+'CHAR'                              { return 'CHAR'; }
+'CREATE'                            { return 'CREATE'; }
+'DECIMAL'                           { return 'DECIMAL'; }
+'DOUBLE'                            { return 'DOUBLE'; }
+'FLOAT'                             { return 'FLOAT'; }
 'FROM'                              { return 'FROM'; }
 'GROUP'                             { return 'GROUP'; }
+'INT'                               { return 'INT'; }
+'INTO'                              { return 'INTO'; }
 'IS'                                { return 'IS'; }
 'JOIN'                              { return 'JOIN'; }
 'NOT'                               { return 'NOT'; }
@@ -36,12 +47,39 @@
 'OR'                                { return 'OR'; }
 'ORDER'                             { return 'ORDER'; }
 'SELECT'                            { determineCase(yytext); return 'SELECT'; }
+'SMALLINT'                          { return 'SMALLINT'; }
+'STRING'                            { return 'STRING'; }
+'TABLE'                             { return 'TABLE'; }
+'TIMESTAMP'                         { return 'TIMESTAMP'; }
+'TINYINT'                           { return 'TINYINT'; }
 'USE'                               { determineCase(yytext); return 'USE'; }
+'VARCHAR'                           { return 'VARCHAR'; }
 'WHERE'                             { return 'WHERE'; }
 
+<hive>'BINARY'                      { return '<hive>BINARY'; }
+<hive>'DATA'                        { return '<hive>DATA'; }
+<hive>'DATE'                        { return '<hive>DATE'; }
+<hive>'EXTERNAL'                    { return '<hive>EXTERNAL'; }
+<hive>'INPATH'                      { this.begin('hdfs'); return '<hive>INPATH'; }
+<hive>'LOAD'                        { return '<hive>LOAD'; }
+<hive>'LOCATION'                    { this.begin('hdfs'); return '<hive>LOCATION'; }
+
+<impala>'DATA'                      { return '<impala>DATA'; }
+<impala>'EXTERNAL'                  { return '<impala>EXTERNAL'; }
+<impala>'INPATH'                    { this.begin('hdfs'); return '<impala>INPATH'; }
+<impala>'LOAD'                      { return '<impala>LOAD'; }
+<impala>'LOCATION'                  { this.begin('hdfs'); return '<impala>LOCATION'; }
+
 [0-9]+                              { return 'UNSIGNED_INTEGER'; }
 [A-Za-z][A-Za-z0-9_]*               { return 'REGULAR_IDENTIFIER'; }
 
+<hdfs>'|CURSOR|'                    { parser.yy.cursorFound = true; return 'CURSOR'; }
+<hdfs>'|PARTIAL_CURSOR|'            { parser.yy.cursorFound = true; return 'PARTIAL_CURSOR'; }
+<hdfs>\s+[']                        { return 'HDFS_START_QUOTE'; }
+<hdfs>[^'|]+                        { return 'HDFS_PATH'; }
+<hdfs>[']                           { this.popState(); return 'HDFS_END_QUOTE'; }
+<hdfs><<EOF>>                       { return 'EOF'; }
+
 [-+&~|^/%*(),.;!]                   { return yytext; }
 [=<>]                               { return yytext; }
 
@@ -95,6 +133,8 @@ SqlStatements
 
 SqlStatement
  : UseStatement
+ | DataManipulation
+ | TableDefinition
  | QueryExpression
  | 'REGULAR_IDENTIFIER' AnyCursor 'REGULAR_IDENTIFIER'
  | 'REGULAR_IDENTIFIER' AnyCursor
@@ -124,6 +164,104 @@ UseStatement
    }
  ;
 
+DataManipulation
+ : HiveOrImpalaLoad HiveOrImpalaData HiveOrImpalaInpath HdfsPath 'INTO' 'TABLE' 'REGULAR_IDENTIFIER'
+ | HiveOrImpalaLoad HiveOrImpalaData HiveOrImpalaInpath HdfsPath
+ ;
+
+HiveOrImpalaLoad
+ : '<hive>LOAD'
+ | '<impala>LOAD'
+ ;
+
+HiveOrImpalaData
+ : '<hive>DATA'
+ | '<impala>DATA'
+ ;
+
+HiveOrImpalaInpath
+ : '<hive>INPATH'
+ | '<impala>INPATH'
+ ;
+
+TableDefinition
+ : 'CREATE' TableScope 'TABLE' 'REGULAR_IDENTIFIER' TableElementList TableLocation
+ | 'CREATE' 'TABLE'
+ ;
+
+TableScope
+ : '<hive>EXTERNAL'
+ | '<impala>EXTERNAL'
+ ;
+
+TableElementList
+ : '(' TableElements ')'
+ ;
+
+TableElements
+ : TableElement
+ | TableElements ',' TableElement
+ ;
+
+TableElement
+ : ColumnDefinition
+ ;
+
+ColumnDefinition
+ : 'REGULAR_IDENTIFIER' PrimitiveType
+ ;
+
+TableLocation
+ : HiveOrImpalaLocation HdfsPath
+ ;
+
+HiveOrImpalaLocation
+ : '<hive>LOCATION'
+ | '<impala>LOCATION'
+ ;
+
+HdfsPath
+ : 'HDFS_START_QUOTE' 'HDFS_PATH' 'HDFS_END_QUOTE'
+ | 'HDFS_START_QUOTE' 'HDFS_PATH' 'PARTIAL_CURSOR' 'HDFS_PATH' 'HDFS_END_QUOTE'
+    {
+      suggestHdfs({ path: $2 });
+    }
+ | 'HDFS_START_QUOTE' 'HDFS_PATH' 'PARTIAL_CURSOR' 'HDFS_END_QUOTE'
+   {
+     suggestHdfs({ path: $2 });
+   }
+ | 'HDFS_START_QUOTE' 'HDFS_PATH' 'PARTIAL_CURSOR'
+    {
+      suggestHdfs({ path: $2 });
+    }
+ | 'HDFS_START_QUOTE' 'PARTIAL_CURSOR' 'HDFS_END_QUOTE'
+   {
+     suggestHdfs({ path: '/' });
+   }
+ | 'HDFS_START_QUOTE' 'PARTIAL_CURSOR'
+    {
+      suggestHdfs({ path: '/' });
+    }
+ ;
+
+// TODO: Support | DECIMAL(precision, scale)  -- (Note: Available in Hive 0.13.0 and later)
+PrimitiveType
+ : 'TINYINT'
+ | 'SMALLINT'
+ | 'INT'
+ | 'BIGINT'
+ | 'BOOLEAN'
+ | 'FLOAT'
+ | 'DOUBLE'
+ | 'STRING'
+ | 'DECIMAL'
+ | 'CHAR'
+ | 'VARCHAR'
+ | 'TIMESTAMP'
+ | '<hive>BINARY'
+ | '<hive>DATE'
+ ;
+
 QueryExpression
  : 'SELECT' SelectList TableExpression
    {
@@ -488,14 +626,32 @@ var suggestDatabases = function (details) {
   parser.yy.result.suggestDatabases = details || {};
 }
 
+var suggestHdfs = function (details) {
+  parser.yy.result.suggestHdfs = details || {}
+}
+
 var determineCase = function (text) {
   parser.yy.lowerCase = text.toLowerCase() === text;
 };
 
+var lexerModified = false;
+
 /**
  * Main parser function
  */
 parser.parseSql = function(beforeCursor, afterCursor, dialect) {
+
+  // Hack to set the inital state of the lexer without first having to hit a token
+  // has to be done as the first token found can be dependant on dialect
+  if (!lexerModified && typeof dialect !== 'undefined') {
+    var originalSetInput = parser.lexer.setInput;
+    parser.lexer.setInput = function (input) {
+      var lexer = originalSetInput.bind(parser.lexer)(input);
+      lexer.begin(dialect)
+    }
+    lexerModified = true;
+  }
+
   var result;
   parser.yy.dialect = dialect;
   try {
@@ -509,6 +665,22 @@ parser.parseSql = function(beforeCursor, afterCursor, dialect) {
     result = parser.yy.result;
   }
 
+  if (typeof result.error !== 'undefined' && typeof result.error.expected !== 'undefined') {
+    // Remove any expected tokens from other dialects, jison doesn't remove tokens from other lexer states.
+    var actualExpected = [];
+    result.error.expected.forEach(function (expected) {
+      var match = expected.match(/\<([a-z]+)\>(.*)/);
+      if (match !== null) {
+        if (typeof parser.yy.dialect !== 'undefined' && parser.yy.dialect === match[1]) {
+          actualExpected.push(match[2]);
+        }
+      } else {
+        actualExpected.push(expected);
+      }
+    });
+    result.error.expected = actualExpected;
+  }
+
   return result;
 }
 

Datei-Diff unterdrückt, da er zu groß ist
+ 56 - 31
desktop/core/src/desktop/static/desktop/js/autocomplete/sql.js


+ 92 - 96
desktop/core/src/desktop/static/desktop/spec/autocomplete/sqlSpec.js

@@ -374,48 +374,108 @@ define([
       });
     });
 
-    describe('hive-specific stuff', function() {
-      describe('HDFS autocompletion', function () {
+    describe('HDFS autocompletion', function () {
+      it('should autocomplete hdfs paths in location references without initial /', function () {
+        assertAutoComplete({
+          beforeCursor: 'CREATE EXTERNAL TABLE foo (id int) LOCATION \'',
+          afterCursor: '\'',
+          dialect: 'hive',
+          expectedResult: {
+            lowerCase: false,
+            suggestHdfs : { path: '/' }
+          }
+        });
+      });
 
-        xit("should autocomplete hdfs paths in location references without initial /", function () {
-          assertAutoComplete({
-            beforeCursor: 'CREATE EXTERNAL TABLE foo (id int) LOCATION \'',
-            afterCursor: '\'',
-            dialect: 'hive',
-            expectedResult: {
-              suggestHdfsFiles : { location: '' }
-            }
-          });
+      it('should autocomplete hdfs paths in location references from root', function () {
+        assertAutoComplete({
+          beforeCursor: 'CREATE EXTERNAL TABLE foo (id int) LOCATION \'/',
+          afterCursor: '\'',
+          dialect: 'hive',
+          expectedResult: {
+            lowerCase: false,
+            suggestHdfs : { path: '/' }
+          }
         });
+      });
 
-        xit('should autocomplete hdfs paths in location references from root', function () {
-          assertAutoComplete({
-            serverResponses: {},
-            beforeCursor: 'CREATE EXTERNAL TABLE foo (id int) LOCATION \'/',
-            afterCursor: '\'',
-            expectedSuggestions: ['file_one', 'folder_one/']
-          });
+      it('should autocomplete hdfs paths and suggest trailing apostrophe if empty after cursor', function () {
+        assertAutoComplete({
+          beforeCursor: 'CREATE EXTERNAL TABLE foo (id int) LOCATION \'/',
+          afterCursor: '',
+          dialect: 'hive',
+          expectedResult: {
+            lowerCase: false,
+            suggestHdfs : { path: '/' }
+          }
         });
+      });
 
-        xit('should autocomplete hdfs paths and suggest trailing apostrophe if empty after cursor', function () {
-          assertAutoComplete({
-            serverResponses: {},
-            beforeCursor: 'CREATE EXTERNAL TABLE foo (id int) LOCATION \'/',
-            afterCursor: '',
-            expectedSuggestions: ['file_one\'', 'folder_one/']
-          });
+      it('should autocomplete hdfs paths in location references from inside a path', function () {
+        assertAutoComplete({
+          beforeCursor: 'CREATE EXTERNAL TABLE foo (id int) LOCATION \'/',
+          afterCursor: '/bar\'',
+          dialect: 'hive',
+          expectedResult: {
+            lowerCase: false,
+            suggestHdfs : { path: '/' }
+          }
         });
+      });
 
-        xit('should autocomplete hdfs paths in location references from inside a path', function () {
-          assertAutoComplete({
-            serverResponses: {},
-            beforeCursor: 'CREATE EXTERNAL TABLE foo (id int) LOCATION \'/',
-            afterCursor: '/bar\'',
-            expectedSuggestions: ['file_one', 'folder_one']
-          });
+      it('should autocomplete hdfs paths in location references without initial /', function () {
+        assertAutoComplete({
+          beforeCursor: 'LOAD DATA INPATH \'',
+          afterCursor: '\'',
+          dialect: 'impala',
+          expectedResult: {
+            lowerCase: false,
+            suggestHdfs: { path: '/'}
+          }
+        });
+      });
+
+      it('should autocomplete hdfs paths in location references from root', function () {
+        assertAutoComplete({
+          beforeCursor: 'LOAD DATA INPATH \'/',
+          afterCursor: '\'',
+          dialect: 'hive',
+          expectedResult: {
+            lowerCase: false,
+            suggestHdfs: { path: '/'}
+          }
         });
       });
 
+      it('should autocomplete hdfs paths and suggest trailing apostrophe if empty after cursor', function () {
+        assertAutoComplete({
+          beforeCursor: 'LOAD DATA INPATH \'/',
+          afterCursor: '',
+          dialect: 'impala',
+          expectedResult: {
+            lowerCase: false,
+            suggestHdfs: { path: '/'}
+          }
+        });
+      });
+
+      it('should autocomplete hdfs paths in location references from inside a path', function () {
+        assertAutoComplete({
+          serverResponses: {},
+          beforeCursor: 'LOAD DATA INPATH \'/',
+          afterCursor: '/bar\' INTO TABLE foo',
+          dialect: 'impala',
+          expectedResult: {
+            lowerCase: false,
+            suggestHdfs: { path: '/'}
+          }
+        });
+      });
+    });
+
+    describe('hive-specific stuff', function() {
+
+
       xit('should suggest struct from map values', function() {
         assertAutoComplete({
           serverResponses: {
@@ -691,70 +751,6 @@ define([
     });
 
     describe('impala-specific stuff', function() {
-      beforeEach(function (done) {
-        changeType('impala', done);
-        ajaxHelper.responseForUrls = {};
-      });
-
-      describe('HDFS autocompletion', function () {
-        beforeEach(function() {
-          subject = new SqlAutocompleter({
-            hdfsAutocompleter: {
-              autocomplete: function(before, after, callback) {
-                callback([
-                  {
-                    meta: 'file',
-                    score: 1000,
-                    value: 'file_one'
-                  },
-                  {
-                    meta: 'dir',
-                    score: 999,
-                    value: 'folder_one'
-                  }
-                ])
-              }
-            },
-            snippet: snippet
-          });
-        });
-
-        xit('should autocomplete hdfs paths in location references without initial /', function () {
-          assertAutoComplete({
-            serverResponses: {},
-            beforeCursor: 'LOAD DATA INPATH \'',
-            afterCursor: '\'',
-            expectedSuggestions: ['/file_one', '/folder_one/']
-          });
-        });
-
-        xit('should autocomplete hdfs paths in location references from root', function () {
-          assertAutoComplete({
-            serverResponses: {},
-            beforeCursor: 'LOAD DATA INPATH \'/',
-            afterCursor: '\'',
-            expectedSuggestions: ['file_one', 'folder_one/']
-          });
-        });
-
-        xit('should autocomplete hdfs paths and suggest trailing apostrophe if empty after cursor', function () {
-          assertAutoComplete({
-            serverResponses: {},
-            beforeCursor: 'LOAD DATA INPATH \'/',
-            afterCursor: '',
-            expectedSuggestions: ['file_one\'', 'folder_one/']
-          });
-        });
-
-        xit('should autocomplete hdfs paths in location references from inside a path', function () {
-          assertAutoComplete({
-            serverResponses: {},
-            beforeCursor: 'LOAD DATA INPATH \'/',
-            afterCursor: '/bar\' INTO TABLE foo',
-            expectedSuggestions: ['file_one', 'folder_one']
-          });
-        });
-      });
 
       xit('should not suggest struct from map values with hive style syntax', function() {
         assertAutoComplete({

Einige Dateien werden nicht angezeigt, da zu viele Dateien in diesem Diff geändert wurden.