浏览代码

[notebook] Make snippets configrable from hue.ini

Romain Rigaux 10 年之前
父节点
当前提交
9b9f71b

+ 1 - 1
apps/about/src/about/templates/admin_wizard.mako

@@ -103,7 +103,7 @@ ${ header.menubar() }
           % if 'spark' in app_names:
               <li>
                 <a href="javascript:void(0)" class="installBtn" data-loading-text="${ _('Installing...') }"
-                   data-sample-url="${ url('spark:install_examples') }">
+                   data-sample-url="${ url('notebook:install_examples') }">
                   <i class="fa fa-download"></i> ${ apps['spark'].nice_name }
                 </a>
               </li>

+ 1 - 1
apps/zookeeper/src/zookeeper/conf.py

@@ -18,7 +18,7 @@
 from desktop.lib.conf import Config, UnspecifiedConfigSection, ConfigSection, coerce_string
 
 
-# Used only for ZooKeeper app proeprties, ZooKeeper specific properties should come from libzookeeper
+# Used only for ZooKeeper app properties, ZooKeeper specific properties should come from libzookeeper
 CLUSTERS = UnspecifiedConfigSection(
   "clusters",
   help="One entry for each Zookeeper cluster",

+ 49 - 0
desktop/conf.dist/hue.ini

@@ -515,6 +515,55 @@
    ## collection_interval=30000
 
 
+###########################################################################
+# Settings to configure the snippets available in the Notebook
+###########################################################################
+
+[notebook]
+
+  [[interpreters]]
+
+    [[[hive]]]
+    name=Hive
+    interface=hiveserver2
+
+    [[[impala]]]
+    name=Impala
+    interface=hiveserver2
+
+    [[[scala]]]
+    name=Scala
+    interface=spark
+
+    [[[python]]]
+    name=PySpark
+    interface=spark
+
+    [[[r]]]
+    name=R
+    interface=spark
+
+    [[[jar]]]
+    name=Spark submit Jar
+    interface=spark-submit
+
+    [[[py]]]
+    name=Spark submit Python
+    interface=spark-submit
+
+    [[[text]]]
+    name=Text
+    interface=text
+
+    ## [[[mysql]]]
+    ## name=MySql
+    ## interface=mysql
+
+    ## [[[phoenix]]]
+    ## name=Phoenix
+    ## interface=jdbc
+
+
 ###########################################################################
 # Settings to configure SAML
 ###########################################################################

+ 59 - 3
desktop/conf/pseudo-distributed.ini.tmpl

@@ -525,6 +525,65 @@
    ## collection_interval=30000
 
 
+###########################################################################
+# Settings to configure the snippets available in the Notebook
+###########################################################################
+
+[notebook]
+  # - section name: is a unique id of the type of snippet
+  # - name: is the nice name to print in the notebook
+  # - interface: is the connector to use to talk to the server:
+  #   * hiveserver2 is configurable in [beeswax] and [impala]
+  #   * livy and livy-batch are configurable in [spark]
+  #   * dbms-X in [libdbms] [[X]]
+  #   * jdbc in [jdbc]
+  #   * odbc in [odbc]
+  #   * pig in [pig] 
+  
+  [[interpreters]]
+  
+  [[[hive]]]
+  name=Hive
+  interface=hiveserver2
+
+  [impala]
+  name=Impala
+  interface=hiveserver2
+
+  [pyspark]
+  name=PySpark
+  interface=livy
+
+  [spark-submit]
+  name=Spark Submit
+  interface=livy-batch
+
+  [phoenix]
+  name=Phoenix
+  interface=jdbc
+
+  [mysql]
+  name=MySql
+  interface=dbms-mysql
+  # interface=jdbc
+  # interface=odbc
+
+  [pig]
+  name=Pig
+  interface=pig
+
+
+[jdbc]
+  [[mysql]]
+  driver={host, port, database}
+  auth={user, password}
+  # auth: optional: prompt user or impersonation if available, e.g. https://issues.apache.org/jira/browse/HIVE-6486
+
+  [[phoenix]]
+  driver={host, port, database}
+  auth={user, password}
+
+
 ###########################################################################
 # Settings to configure SAML
 ###########################################################################
@@ -1127,9 +1186,6 @@
   # If livy should use proxy users when submitting a job.
   ## livy_impersonation_enabled=true
 
-  # List of available types of snippets
-  ## languages='[{"name": "Scala Shell", "type": "scala"},{"name": "PySpark Shell", "type": "python"},{"name": "R Shell", "type": "r"},{"name": "Jar", "type": "Jar"},{"name": "Python", "type": "py"},{"name": "Impala SQL", "type": "impala"},{"name": "Hive SQL", "type": "hive"},{"name": "Text", "type": "text"}]'
-
 
 ###########################################################################
 # Settings for the User Admin application

+ 21 - 21
desktop/libs/notebook/src/notebook/conf.py

@@ -15,29 +15,29 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import json
-
 from django.utils.translation import ugettext_lazy as _t
 
-from desktop.lib.conf import Config
-
-
-def coerce_json(j):
-  return json.loads(j)
+from desktop.lib.conf import Config, UnspecifiedConfigSection, ConfigSection
 
 
-LANGUAGES = Config(
-  key="languages",
-  help=_t("List of available types of snippets."),
-  type=coerce_json,
-  default="""[
-      {"name": "Scala", "type": "spark"},
-      {"name": "PySpark", "type": "pyspark"},
-      {"name": "R", "type": "r"},
-      {"name": "Impala", "type": "impala"},
-      {"name": "Hive", "type": "hive"},
-      {"name": "Jar", "type": "jar"},
-      {"name": "Python", "type": "py"},
-      {"name": "Text", "type": "text"}
-  ]"""
+INTERPRETERS = UnspecifiedConfigSection(
+  "interpreters",
+  help="One entry for each type of snippet",
+  each=ConfigSection(
+    help=_t("Information about a single Zookeeper cluster"),
+    members=dict(
+      NAME=Config(
+          "name",
+          help=_t("Nice name"),
+          default="SQL",
+          type=str,
+      ),
+      INTERFACE=Config(
+          "interface",
+          help="The backend connection to use to communicate with the server",
+          default="hiveserver2",
+          type=str,
+      ),
+    )
+  )
 )

+ 3 - 2
desktop/libs/notebook/src/notebook/views.py

@@ -26,11 +26,11 @@ from desktop.lib.json_utils import JSONEncoderForHTML
 from desktop.models import Document2, Document
 from spark.conf import LIVY_SERVER_SESSION_KIND
 
-from notebook.conf import LANGUAGES
 from notebook.decorators import check_document_access_permission, check_document_modify_permission
 from notebook.connectors.base import Notebook, get_api
 from notebook.management.commands.notebook_setup import Command
 from notebook.connectors.spark_shell import SparkApi
+from notebook.conf import INTERPRETERS
 
 
 LOG = logging.getLogger(__name__)
@@ -54,8 +54,9 @@ def notebook(request):
   return render('notebook.mako', request, {
       'notebooks_json': json.dumps([notebook.get_data()]),
       'options_json': json.dumps({
-          'languages': LANGUAGES.get(),
+          'languages': [{"name": INTERPRETERS.get()[i].NAME.get(), "type": i} for i in INTERPRETERS.get()],
           'snippet_placeholders' : {
+              'sql': _('Example: 1 + 1, or press CTRL + space'),
               'spark': _('Example: 1 + 1, or press CTRL + space'),
               'pyspark': _('Example: 1 + 1, or press CTRL + space'),
               'impala': _('Example: SELECT * FROM tablename, or press CTRL + space'),