Browse Source

HUE-8259 [core] Integrate task server in the configuration

For now use logs directory for result storage as it is automatically created in dev mode.
Romain Rigaux 6 years ago
parent
commit
634ddbb957

+ 2 - 2
README.md

@@ -4,7 +4,7 @@
 Query. Explore. Repeat.
 -----------------------
 
-Hue is an open source Analytic Workbench for browsing, querying and visualizing data with focus on SQL and Search: [gethue.com](http://gethue.com)
+Hue is an open source SQL Workbench for browsing, querying and visualizing data in cloud/on-prem Data Warehouses: [gethue.com](http://gethue.com)
 
 It features:
 
@@ -21,7 +21,7 @@ It features:
 
 Who is using Hue
 ----------------
-Thousands of companies and organizations use Hue to open-up and query their data in order to make smarter decisions. Just at Cloudera, Hue is heavily used by hundreds of customers executing millions of queries daily. Hue directly ships in Cloudera, Amazon, MapR, BigTop and is compatible with the other distributions.
+Thousands of companies and organizations use Hue to open-up their data and provide self service querying in order to make smarter decisions. Just at Cloudera, Hue is heavily used by hundreds of customers executing millions of queries daily. Hue directly ships in Cloudera, Amazon, MapR, BigTop and is compatible with the other distributions.
 
 
 Getting Started

+ 0 - 27
celery_rabbitmq_install.txt

@@ -1,27 +0,0 @@
-Hue Celery/RabbitMQ Install
-
-Installation tested on Ubuntu 16.04
-
-echo "deb https://packages.erlang-solutions.com/ubuntu $(lsb_release -sc) contrib" >> /etc/apt/sources.list.d/erlang.list
-wget https://packages.erlang-solutions.com/ubuntu/erlang_solutions.asc
-sudo apt-key add erlang_solutions.asc
-
-echo "deb https://dl.bintray.com/rabbitmq/debian $(lsb_release -sc) main" >> /etc/apt/sources.list.d/rabbitmq.list
-wget -O- https://dl.bintray.com/rabbitmq/Keys/rabbitmq-release-signing-key.asc | sudo apt-key add -
-wget -O- https://www.rabbitmq.com/rabbitmq-release-signing-key.asc | sudo apt-key add -
-
-
-apt update -y
-apt-get install -y erlang
-apt-get install -y rabbitmq-server
-
-systemctl enable rabbitmq-server
-systemctl start rabbitmq-server
-
-rabbitmq-plugins enable rabbitmq_management
-rabbitmqctl cluster_status
-rabbitmqctl add_user hueuser cloudera
-rabbitmqctl add_vhost huevhost
-rabbitmqctl set_user_tags hueuser administrator
-rabbitmqctl set_permissions -p huevhost hueuser ".*" ".*" ".*"
-rabbitmqctl delete_user guest

+ 17 - 0
desktop/conf.dist/hue.ini

@@ -779,6 +779,23 @@
    ## collection_interval=30000
 
 
+  # Configuration options for the Task Server
+  # ------------------------------------------------------------------------
+  [[task_server]]
+
+   # If resource intensive or blocking can be delegated to an already running task server.
+   ## enabled=False
+
+   # How the task server and tasks communicate.
+   ## broker_url=amqp://guest:guest@localhost//
+
+   # Local file system path used to store task results when using the file result backend.
+   ## result_backend=file:///$HUE_ROOT/logs
+
+   # Default options provided to the task server at startup.
+   ## celeryd_opts='--time-limit=300'
+
+
 ###########################################################################
 # Settings to configure the snippets available in the Notebook
 ###########################################################################

+ 17 - 0
desktop/conf/pseudo-distributed.ini.tmpl

@@ -781,6 +781,23 @@
    ## collection_interval=30000
 
 
+  # Configuration options for the Task Server
+  # ------------------------------------------------------------------------
+  [[task_server]]
+
+   # If resource intensive or blocking can be delegated to an already running task server.
+   ## enabled=False
+
+   # How the task server and tasks communicate.
+   ## broker_url=amqp://guest:guest@localhost//
+
+   # Local file system path used to store task results when using the file result backend.
+   ## result_backend=file://$HUE_ROOT/logs
+
+   # Default options provided to the task server at startup.
+   ## celeryd_opts='--time-limit=300'
+
+
 ###########################################################################
 # Settings to configure the snippets available in the Notebook
 ###########################################################################

+ 23 - 0
desktop/core/src/desktop/celery.py

@@ -0,0 +1,23 @@
+from __future__ import absolute_import, unicode_literals
+import os
+from celery import Celery
+
+# set the default Django settings module for the 'celery' program.
+os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'desktop.settings')
+
+app = Celery('desktop')
+
+# Using a string here means the worker doesn't have to serialize
+# the configuration object to child processes.
+# - namespace='CELERY' means all celery-related configuration keys
+#   should have a `CELERY_` prefix.
+app.config_from_object('django.conf:settings', namespace='CELERY')
+
+# Load task modules from all registered Django app configs.
+app.autodiscover_tasks()
+
+
+@app.task(bind=True)
+def debug_task(self):
+    print('Request: {0!r}'.format(self.request))
+    return 'Hello'

+ 34 - 1
desktop/core/src/desktop/conf.py

@@ -40,7 +40,7 @@ from desktop.lib.conf import Config, ConfigSection, UnspecifiedConfigSection,\
                              validate_path, list_of_compiled_res, coerce_str_lowercase, \
                              coerce_password_from_script, coerce_string
 from desktop.lib.i18n import force_unicode
-from desktop.lib.paths import get_desktop_root
+from desktop.lib.paths import get_desktop_root, get_run_root
 
 LOG = logging.getLogger(__name__)
 
@@ -1610,6 +1610,39 @@ IS_K8S_ONLY = Config(
 )
 
 
+def task_server_default_result_directory():
+  """Local directory to store task results."""
+  return 'file://%s' % get_run_root('logs')
+
+
+TASK_SERVER = ConfigSection(
+  key="task_server",
+  help=_("Task Server configuration."),
+  members=dict(
+    ENABLED= Config(
+      key='enabled',
+      default=False,
+      type=coerce_bool,
+      help=_('If resource intensive or blocking can be delegated to an already running task server.')
+    ),
+    BROKER_URL = Config(
+      key='broker_url',
+      default='amqp://guest:guest@localhost//',
+      help=_('How the task server and tasks communicate.')
+    ),
+    RESULT_BACKEND = Config(
+      key='result_backend',
+      dynamic_default=task_server_default_result_directory,
+      help=_('Local file system path used to store task results when using the file result backend.')
+    ),
+    RESULT_CELERYD_OPTS = Config(
+      key='celeryd_opts',
+      default='--time-limit=300',
+      help=_('Default options provided to the task server at startup.')
+    )
+))
+
+
 def get_clusters(user):
   clusters = []
   cluster_config = CLUSTERS.get()

+ 23 - 17
desktop/core/src/desktop/settings.py

@@ -203,7 +203,7 @@ INSTALLED_APPS = [
     # App that keeps track of failed logins.
     'axes',
     'webpack_loader',
-    'django_celery_results',
+    #'django_celery_results',
 ]
 
 WEBPACK_LOADER = {
@@ -217,22 +217,6 @@ LOCALE_PATHS = [
   get_desktop_root('core/src/desktop/locale')
 ]
 
-# Celery
-CELERY_BROKER_URL='pyamqp://hueuser:cloudera@localhost:5672/huevhost/'
-#CELERY_BIN=
-#CELERY_RESULT_BACKEND='django-db'
-CELERY_APP="desktop"
-CELERYD_OPTS="--time-limit=300 --concurrency=8"
-
-# %n will be replaced with the first part of the nodename.
-CELERYD_LOG_FILE="/var/log/celery/%n%I.log"
-CELERYD_PID_FILE="/var/run/celery/%n.pid"
-CELERY_CREATE_DIRS=1
-CELERYD_USER="root"
-CELERYD_GROUP="root"
-
-CELERY_RESULT_BACKEND = 'django-db'
-
 # Keep default values up to date
 GTEMPLATE_CONTEXT_PROCESSORS = (
   'django.contrib.auth.context_processors.auth',
@@ -656,3 +640,25 @@ if DEBUG and desktop.conf.ENABLE_DJANGO_DEBUG_TOOL.get():
           }
       }
   })
+
+
+################################################################
+# Celery settings
+################################################################
+
+if desktop.conf.TASK_SERVER.ENABLED.get():
+  CELERY_BROKER_URL = desktop.conf.TASK_SERVER.BROKER_URL.get()
+  
+  CELERY_ACCEPT_CONTENT = ['json']
+  CELERY_RESULT_BACKEND = desktop.conf.TASK_SERVER.RESULT_BACKEND.get()
+  CELERY_TASK_SERIALIZER = 'json'
+  
+  CELERYD_OPTS = desktop.conf.TASK_SERVER.RESULT_CELERYD_OPTS.get()
+
+# %n will be replaced with the first part of the nodename.
+# CELERYD_LOG_FILE="/var/log/celery/%n%I.log"
+# CELERYD_PID_FILE="/var/run/celery/%n.pid"
+# CELERY_CREATE_DIRS = 1
+# CELERYD_USER = desktop.conf.SERVER_USER.get()
+# CELERYD_GROUP = desktop.conf.SERVER_GROUP.get()
+

+ 0 - 41
desktop/libs/notebook/src/notebook/tasks.py

@@ -1,41 +0,0 @@
-from __future__ import absolute_import, unicode_literals
-import os
-import django
-# set the default Django settings module for the 'celery' program.
-os.environ.setdefault("DJANGO_SETTINGS_MODULE", "desktop.settings")
-django.setup()
-from django.conf import settings
-
-from celery import Celery
-app = Celery("desktop")
-app.config_from_object('django.conf:settings', namespace='CELERY')
-
-import json
-import logging
-
-from desktop import conf
-from desktop.conf import ENABLE_DOWNLOAD, USE_NEW_EDITOR
-
-from celery.utils.log import get_task_logger
-logger = get_task_logger(__name__)
-from django.http import HttpRequest
-from django.contrib.auth.models import User
-
-from notebook.connectors.base import get_api, _get_snippet_name
-from django.contrib.auth import authenticate
-
-@app.task()
-def download(postdict, notebook, snippet, file_format):
-    request = HttpRequest()
-    request.POST = postdict
-    user = authenticate(username="admin",password="admin")
-    request.user = user
-    response = get_api(request, snippet).download(notebook, snippet, file_format)
-    f=open("/tmp/foo","w")
-    for data in response.streaming_content:
-      f.write(data)
-    f.close()
-    return 0
-
-if __name__ == '__main__':
-    task = download.s(notebook, snippet, file_format).delay()

+ 1 - 23
desktop/libs/notebook/src/notebook/views.py

@@ -40,7 +40,6 @@ from notebook.decorators import check_editor_access_permission, check_document_a
 from notebook.management.commands.notebook_setup import Command
 from notebook.models import make_notebook
 
-import notebook.tasks as ntasks
 
 LOG = logging.getLogger(__name__)
 
@@ -314,7 +313,7 @@ def copy(request):
 
 
 @check_document_access_permission()
-def download_new(request):
+def download(request):
   if not ENABLE_DOWNLOAD.get():
     return serve_403_error(request)
 
@@ -333,27 +332,6 @@ def download_new(request):
 
   return response
 
-@check_document_access_permission()
-def download(request):
-  if not ENABLE_DOWNLOAD.get():
-    return serve_403_error(request)
-
-  notebook = json.loads(request.POST.get('notebook', '{}'))
-  snippet = json.loads(request.POST.get('snippet', '{}'))
-  file_format = request.POST.get('format', 'csv')
-
-  ntasks.download.delay(request.POST, notebook, snippet, file_format)
-  #response = get_api(request, snippet).download(notebook, snippet, file_format, user_agent=request.META.get('HTTP_USER_AGENT'))
-  response = {}
-
-  if response:
-    request.audit = {
-      'operation': 'DOWNLOAD',
-      'operationText': 'User %s downloaded results from %s as %s' % (request.user.username, _get_snippet_name(notebook), file_format),
-      'allowed': True
-    }
-
-  return response
 
 def install_examples(request):
   response = {'status': -1, 'message': ''}

+ 35 - 1
docs/admin-manual/manual.md

@@ -217,7 +217,6 @@ components. Your Hue installation is now running.
 
 # Configuration
 
-
 ## Reference Architecture
 * 3 Hues and 1 Load Balancer
 * Databases: MySQL InnoDB, PostgreSQL, Oracle
@@ -237,6 +236,41 @@ Hue is often run with:
 * [Apache mod Python](http://gethue.com/how-to-run-hue-with-the-apache-server/)
 * [NGINX](Using NGINX to speed up Hue)
 
+### Task Server
+
+Make sure you have Rabbit MQ installed and running.
+
+```
+sudo apt-get install rabbitmq-server -y
+```
+
+In hue.ini, telling the API server that the Task Server is available:
+
+```
+[desktop]
+[[task_server]]
+enabled=true
+```
+
+Starting the  Task server:
+
+```
+./build/env/bin/celery worker -l info -A desktop
+```
+
+Running a test tasks:
+
+```
+./build/env/bin/hue shell
+
+from desktop.celery import debug_task
+
+debug_task.delay()
+debug_task.delay().get() # Works if result backend is setup and task_server is true in the hue.ini
+
+```
+
+
 ### Proxy
 
 A Web proxy lets you centralize all the access to a certain URL and prettify the address (e.g. ec2-54-247-321-151.compute-1.amazonaws.com --> demo.gethue.com).

+ 0 - 1
webpack-stats.json

@@ -1 +0,0 @@
-{"status":"done","chunks":{"hue":[{"name":"hue-bundle-a524402fd1cd012cb181.js","path":"/Users/jahlen/dev/hue/desktop/core/src/desktop/static/desktop/js/bundles/hue-bundle-a524402fd1cd012cb181.js"}],"hue~notebook":[{"name":"hue~notebook-bundle-a524402fd1cd012cb181.js","path":"/Users/jahlen/dev/hue/desktop/core/src/desktop/static/desktop/js/bundles/hue~notebook-bundle-a524402fd1cd012cb181.js"}],"login":[{"name":"login-bundle-a524402fd1cd012cb181.js","path":"/Users/jahlen/dev/hue/desktop/core/src/desktop/static/desktop/js/bundles/login-bundle-a524402fd1cd012cb181.js"}],"notebook":[{"name":"notebook-bundle-a524402fd1cd012cb181.js","path":"/Users/jahlen/dev/hue/desktop/core/src/desktop/static/desktop/js/bundles/notebook-bundle-a524402fd1cd012cb181.js"}],"sqlSyntaxWebWorker":[{"name":"sqlSyntaxWebWorker-bundle-a524402fd1cd012cb181.js","path":"/Users/jahlen/dev/hue/desktop/core/src/desktop/static/desktop/js/bundles/sqlSyntaxWebWorker-bundle-a524402fd1cd012cb181.js"}],"vendors~hue":[{"name":"vendors~hue-bundle-a524402fd1cd012cb181.js","path":"/Users/jahlen/dev/hue/desktop/core/src/desktop/static/desktop/js/bundles/vendors~hue-bundle-a524402fd1cd012cb181.js"}],"vendors~hue~notebook":[{"name":"vendors~hue~notebook-bundle-a524402fd1cd012cb181.js","path":"/Users/jahlen/dev/hue/desktop/core/src/desktop/static/desktop/js/bundles/vendors~hue~notebook-bundle-a524402fd1cd012cb181.js"}],"vendors~notebook":[{"name":"vendors~notebook-bundle-a524402fd1cd012cb181.js","path":"/Users/jahlen/dev/hue/desktop/core/src/desktop/static/desktop/js/bundles/vendors~notebook-bundle-a524402fd1cd012cb181.js"}]}}