浏览代码

[notebook] Update interfaces to use livy and livy-batch

Jenny Kim 10 年之前
父节点
当前提交
1f6abeb983

+ 8 - 8
desktop/conf.dist/hue.ini

@@ -533,23 +533,23 @@
 
     [[[scala]]]
     name=Scala
-    interface=spark
+    interface=livy
 
-    [[[python]]]
+    [[[pyspark]]]
     name=PySpark
-    interface=spark
+    interface=livy
 
     [[[r]]]
     name=R
-    interface=spark
+    interface=livy
 
     [[[jar]]]
-    name=Spark submit Jar
-    interface=spark-submit
+    name=Spark Submit Jar
+    interface=livy-batch
 
     [[[py]]]
-    name=Spark submit Python
-    interface=spark-submit
+    name=Spark Submit Python
+    interface=livy-batch
 
     [[[text]]]
     name=Text

+ 23 - 8
desktop/conf/pseudo-distributed.ini.tmpl

@@ -534,7 +534,7 @@
   # - name: is the nice name to print in the notebook
   # - interface: is the connector to use to talk to the server:
   #   * hiveserver2 is configurable in [beeswax] and [impala]
-  #   * livy and livy-batch are configurable in [spark]
+  #   * livy and livy-batch are configurable for spark shell and spark-submit, respectively
   #   * dbms-X in [libdbms] [[X]]
   #   * jdbc in [jdbc]
   #   * odbc in [odbc]
@@ -546,32 +546,47 @@
   name=Hive
   interface=hiveserver2
 
-  [impala]
+  [[[impala]]]
   name=Impala
   interface=hiveserver2
 
-  [pyspark]
+  [[[scala]]]
+  name=Scala
+  interface=livy
+
+  [[[pyspark]]]
   name=PySpark
   interface=livy
 
-  [spark-submit]
-  name=Spark Submit
+  [[[r]]]
+  name=R
+  interface=livy
+
+  [[[jar]]]
+  name=Spark Submit Jar
+  interface=livy-batch
+
+  [[[py]]]
+  name=Spark Submit Python
   interface=livy-batch
 
-  [phoenix]
+  [[[phoenix]]]
   name=Phoenix
   interface=jdbc
 
-  [mysql]
+  [[[mysql]]]
   name=MySql
   interface=dbms-mysql
   # interface=jdbc
   # interface=odbc
 
-  [pig]
+  [[[pig]]]
   name=Pig
   interface=pig
 
+  [[[text]]]
+  name=Text
+  interface=text
 
 [jdbc]
   [[mysql]]

+ 4 - 2
desktop/libs/notebook/src/notebook/connectors/base.py

@@ -92,7 +92,9 @@ def get_api(user, snippet):
 
   if interface == 'hiveserver2':
     return HS2Api(user)
-  elif interface == 'spark-submit':
+  elif interface == 'livy':
+    return SparkApi(user)
+  elif interface == 'livy-batch':
     return SparkBatchApi(user)
   elif interface == 'text':
     return TextApi(user)
@@ -101,7 +103,7 @@ def get_api(user, snippet):
   elif interface == 'jdbc':
     return JDBCApi(user)
   else:
-    return SparkApi(user)
+    raise PopupException(_('Notebook connector interface not recognized: ') % interface)
 
 
 def _get_snippet_session(notebook, snippet):