Quellcode durchsuchen

HUE-8737 [docker] Run with gunicorn for Python 3

Romain vor 5 Jahren
Ursprung
Commit
355d343b9c

+ 1 - 3
desktop/core/requirements.txt

@@ -1,3 +1,4 @@
+# Minimal version of requirements_py3.txt
 asn1crypto==0.24.0
 avro-python3==1.8.2
 Babel==2.5.1
@@ -10,7 +11,6 @@ channels-redis==2.3.2
 configobj==5.0.6
 cryptography==2.8
 Django==1.11.28 # Django 2 then 3?
-# django-auth-ldap==1.3.0
 django-axes==2.2.0
 django-celery-beat==1.4.0
 django_celery_results==1.0.4
@@ -35,7 +35,6 @@ lockfile==0.12.2
 lxml==4.5.0
 Mako==1.0.7
 Markdown==3.1
-#MarkupSafe==0.9.3
 nose==1.3.7
 openpyxl==2.6.2
 pyformance==0.3.2
@@ -43,7 +42,6 @@ pysaml2>=4.5.0
 python-crontab==2.3.6
 python-dateutil==2.4.2
 python-daemon==2.2.4
-# python-ldap==3.1.0
 python-oauth2==1.1.0
 pytidylib==0.3.2
 pytz==2015.2

+ 1 - 1
desktop/core/src/desktop/lib/wsgiserver.py

@@ -654,7 +654,7 @@ class HTTPRequest(object):
         # exc_info tuple."
         if self.sent_headers:
             try:
-                raise exc_info[1]
+                raise exc_info[0], exc_info[1], exc_info[2]
             finally:
                 exc_info = None
 

+ 1 - 1
tools/docker/hue/Dockerfile.py3

@@ -67,7 +67,7 @@ RUN ./build/env/bin/pip install \
   thrift-sasl==0.2.1
 
 
-COPY tools/docker/hue/conf desktop/conf
+COPY tools/docker/hue/conf3 desktop/conf
 COPY tools/docker/hue/startup.sh .
 
 USER hue

+ 381 - 0
tools/docker/hue/conf3/hue-overrides.ini

@@ -0,0 +1,381 @@
+# Lightweight Hue configuration file
+# ==================================
+
+[desktop]
+
+  # Set this to a random string, the longer the better.
+  secret_key=kasdlfjknasdfl3hbaksk3bwkasdfkasdfba23asdf
+
+  # Webserver listens on this address and port
+  http_host=0.0.0.0
+  http_port=8888
+
+  # Time zone name
+  time_zone=America/Los_Angeles
+
+  # Enable or disable debug mode.
+  django_debug_mode=false
+
+  # Enable or disable backtrace for server error
+  http_500_debug_mode=false
+
+  app_blacklist=search,hbase,security
+
+  # Use gunicorn
+  use_cherrypy_server=false
+
+  # Gunicorn work class: gevent or evenlet, gthread or sync.
+  gunicorn_work_class=sync
+
+  # Configuration options for specifying the Desktop Database. For more info,
+  # see http://docs.djangoproject.com/en/1.11/ref/settings/#database-engine
+  # ------------------------------------------------------------------------
+  [[database]]
+    # Database engine is typically one of:
+    # postgresql_psycopg2, mysql, sqlite3 or oracle.
+    #
+    # Note that for sqlite3, 'name', below is a path to the filename. For other backends, it is the database name
+    # Note for Oracle, options={"threaded":true} must be set in order to avoid crashes.
+    # Note for Oracle, you can use the Oracle Service Name by setting "host=" and "port=" and then "name=<host>:<port>/<service_name>".
+    # Note for MariaDB use the 'mysql' engine.
+
+    # engine=postgresql_psycopg2
+    # host=hue-postgres
+    # port=5432
+    # user=hue
+    # password=hue
+    # name=hue
+
+    # engine=mysql
+    # host=database
+    # port=3306
+    # user=root
+    # password=secret
+    # name=hue
+
+###########################################################################
+# Settings to configure the snippets available in the Notebook
+###########################################################################
+
+# [notebook]
+
+  # One entry for each type of snippet.
+  # [[interpreters]]
+    # Define the name and how to connect and execute the language.
+    # https://docs.gethue.com/administrator/configuration/editor/
+
+    # Example for Docker compose
+    # [[[mysql]]]
+    #   name = MySQL
+    #   interface=sqlalchemy
+    #   ## https://docs.sqlalchemy.org/en/latest/dialects/mysql.html
+    #   options='{"url": "mysql://root:secret@database:3306/hue"}'
+    #   ## options='{"url": "mysql://${USER}:${PASSWORD}@localhost:3306/hue"}'
+
+    # [[[hive]]]
+    #   name=Hive
+    #   interface=hiveserver2
+
+    # [[[impala]]]
+    #   name=Impala
+    #   interface=hiveserver2
+
+    # [[[sql]]]
+    #   name=SparkSql
+    #   interface=livy
+
+    # [[[spark]]]
+    #   name=Scala
+    #   interface=livy
+
+    # [[[pyspark]]]
+    #   name=PySpark
+    #   interface=livy
+
+    # [[[r]]]
+    #   name=R
+    #   interface=livy
+
+    # [[jar]]]
+    #   name=Spark Submit Jar
+    #   interface=livy-batch
+
+    # [[[py]]]
+    #   name=Spark Submit Python
+    #   interface=livy-batch
+
+    # [[[text]]]
+    #   name=Text
+    #   interface=text
+
+    # [[[markdown]]]
+    #   name=Markdown
+    #   interface=text
+
+    # [[[sqlite]]]
+    #   name = SQLite
+    #   interface=rdbms
+
+    # [[[postgresql]]]
+    #   name = PostgreSQL
+    #   interface=rdbms
+
+    # [[[oracle]]]
+    #   name = Oracle
+    #   interface=rdbms
+
+    # [[[solr]]]
+    #   name = Solr SQL
+    #   interface=solr
+    #   ## Name of the collection handler
+    #   # options='{"collection": "default"}'
+
+    # [[[pig]]]
+    #   name=Pig
+    #   interface=oozie
+
+    # [[[java]]]
+    #   name=Java
+    #   interface=oozie
+
+    # [[[spark2]]]
+    #   name=Spark
+    #   interface=oozie
+
+    # [[[mapreduce]]]
+    #   name=MapReduce
+    #   interface=oozie
+
+    # [[[sqoop1]]]
+    #   name=Sqoop1
+    #   interface=oozie
+
+    # [[[distcp]]]
+    #   name=Distcp
+    #   interface=oozie
+
+    # [[[shell]]]
+    #   name=Shell
+    #   interface=oozie
+
+    # [[[presto]]]
+    #   name=Presto SQL
+    #   interface=presto
+    #   ## Specific options for connecting to the Presto server.
+    #   ## The JDBC driver presto-jdbc.jar need to be in the CLASSPATH environment variable.
+    #   ## If 'user' and 'password' are omitted, they will be prompted in the UI.
+    #   options='{"url": "jdbc:presto://localhost:8080/catalog/schema", "driver": "io.prestosql.jdbc.PrestoDriver", "user": "root", "password": "root"}'
+
+    # [[[clickhouse]]]
+    #   name=ClickHouse
+    #   interface=jdbc
+    #   ## Specific options for connecting to the ClickHouse server.
+    #   ## The JDBC driver clickhouse-jdbc.jar and its related jars need to be in the CLASSPATH environment variable.
+    #   options='{"url": "jdbc:clickhouse://localhost:8123", "driver": "ru.yandex.clickhouse.ClickHouseDriver", "user": "readonly", "password": ""}'
+
+
+[dashboard]
+
+  # Activate the SQL Dashboard (beta).
+  has_sql_enabled=true
+
+
+[hadoop]
+
+  # Configuration for HDFS NameNode
+  # ------------------------------------------------------------------------
+  [[hdfs_clusters]]
+    # HA support by using HttpFs
+
+    # [[[default]]]
+      # Enter the filesystem uri
+      # fs_defaultfs=hdfs://localhost:8020
+
+      # Use WebHdfs/HttpFs as the communication mechanism.
+      # Domain should be the NameNode or HttpFs host.
+      # Default port is 14000 for HttpFs.
+      ## webhdfs_url=http://localhost:50070/webhdfs/v1
+
+  # Configuration for YARN (MR2)
+  # ------------------------------------------------------------------------
+  [[yarn_clusters]]
+
+    # [[[default]]]
+      # Enter the host on which you are running the ResourceManager
+      ## resourcemanager_host=localhost
+
+      # The port where the ResourceManager IPC listens on
+      ## resourcemanager_port=8032
+
+      # URL of the ResourceManager API
+      ## resourcemanager_api_url=http://localhost:8088
+
+      # URL of the ProxyServer API
+      ## proxy_api_url=http://localhost:8088
+
+      # URL of the HistoryServer API
+      ## history_server_api_url=http://localhost:19888
+
+      # URL of the Spark History Server
+      ## spark_history_server_url=http://localhost:18088
+
+
+###########################################################################
+# Settings to configure Beeswax with Hive
+###########################################################################
+
+[beeswax]
+
+  # Host where HiveServer2 is running.
+  # If Kerberos security is enabled, use fully-qualified domain name (FQDN).
+  ## hive_server_host=localhost
+
+  # Port where HiveServer2 Thrift server runs on.
+  ## hive_server_port=10000
+
+
+###########################################################################
+# Settings to configure Impala
+###########################################################################
+
+[impala]
+  # Host of the Impala Server (one of the Impalad)
+  ## server_host=localhost
+
+  # Port of the Impala Server
+  ## server_port=21050
+
+
+###########################################################################
+# Settings to configure the Spark application.
+###########################################################################
+
+[spark]
+  # The Livy Server URL.
+  ## livy_server_url=http://localhost:8998
+
+  # Configure Livy to start in local 'process' mode, or 'yarn' workers.
+  ## livy_server_session_kind=yarn
+
+  # Whether Livy requires client to perform Kerberos authentication.
+  ## security_enabled=false
+
+  # Host of the Sql Server
+  ## sql_server_host=localhost
+
+  # Port of the Sql Server
+  ## sql_server_port=10000
+
+  # Choose whether Hue should validate certificates received from the server.
+  ## ssl_cert_ca_verify=true
+
+
+###########################################################################
+# Settings to configure HBase Browser
+###########################################################################
+
+[hbase]
+  # Comma-separated list of HBase Thrift servers for clusters in the format of '(name|host:port)'.
+  ## hbase_clusters=(Cluster|localhost:9090)
+
+
+###########################################################################
+# Settings to configure Solr Search
+###########################################################################
+
+[search]
+
+  # URL of the Solr Server
+  ## solr_url=http://localhost:8983/solr/
+
+
+###########################################################################
+# Settings to configure liboozie
+###########################################################################
+
+[liboozie]
+  # The URL where the Oozie service runs on. This is required in order for
+  # users to submit jobs. Empty value disables the config check.
+  ## oozie_url=http://localhost:11000/oozie
+
+
+###########################################################################
+# Settings for the AWS lib
+###########################################################################
+
+[aws]
+  [[aws_accounts]]
+    # Default AWS account
+    ## [[[default]]]
+      # AWS credentials
+      ## access_key_id=
+      ## secret_access_key=
+      ## security_token=
+
+      # Execute this script to produce the AWS access key ID.
+      ## access_key_id_script=/path/access_key_id.sh
+
+      # Execute this script to produce the AWS secret access key.
+      ## secret_access_key_script=/path/secret_access_key.sh
+
+      # Allow to use either environment variables or
+      # EC2 InstanceProfile to retrieve AWS credentials.
+      ## allow_environment_credentials=yes
+
+      # AWS region to use, if no region is specified, will attempt to connect to standard s3.amazonaws.com endpoint
+      ## region=us-east-1
+
+      # Endpoint overrides
+      ## host=
+
+      # Proxy address and port
+      ## proxy_address=
+      ## proxy_port=8080
+      ## proxy_user=
+      ## proxy_pass=
+
+      # Secure connections are the default, but this can be explicitly overridden:
+      ## is_secure=true
+
+
+###########################################################################
+# Settings for the Azure lib
+###########################################################################
+[azure]
+  [[azure_accounts]]
+    # [[[default]]]
+    # client_id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxx
+    # client_secret=xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+    # tenant_id=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxx
+
+    # [[abfs_clusters]]
+    # [[[default]]]
+    # fs_defaultfs=abfs://account@account.dfs.core.windows.net
+    # webhdfs_url=https://account.dfs.core.windows.net/
+
+
+###########################################################################
+# Settings to configure Metadata
+###########################################################################
+
+[metadata]
+
+  [[navigator]]
+    # Navigator API URL (without version suffix).
+    ## api_url=http://localhost:7187/api
+
+    # Which authentication to use: CM or external via LDAP or SAML.
+    ## navmetadataserver_auth_type=CMDB
+
+    # Username of the CM user used for authentication.
+    ## navmetadataserver_cmdb_user=hue
+    # CM password of the user used for authentication.
+    ## navmetadataserver_cmdb_password=
+    # Execute this script to produce the CM password. This will be used when the plain password is not set.
+    # navmetadataserver_cmdb_password_script=
+
+  # [[atlas]]
+  #  interface=atlas
+  #  api_url=http://localhost:21000/api/atlas/v2
+  #  server_user=admin
+  #  server_password=admin

+ 44 - 0
tools/docker/hue/conf3/log.conf

@@ -0,0 +1,44 @@
+# Just log to stdout for Docker
+[logger_root]
+handlers=stdout
+
+[logger_access]
+handlers=stdout
+qualname=access
+
+[logger_django_auth_ldap]
+handlers=stdout
+qualname=django_auth_ldap
+
+[logger_kazoo_client]
+handlers=stdout
+qualname=kazoo.client
+
+[logger_djangosaml2]
+handlers=stdout
+qualname=djangosaml2
+
+[logger_django_db]
+handlers=stdout
+qualname=django.db.backends
+
+# Handlers
+[handler_stdout]
+level=INFO
+class=StreamHandler
+formatter=default
+args=(sys.stdout,)
+
+[formatter_default]
+class=desktop.log.formatter.Formatter
+format=[%(asctime)s] %(module)-12s %(levelname)-8s %(message)s
+datefmt=%d/%b/%Y %H:%M:%S %z
+
+[loggers]
+keys=root,access,django_auth_ldap,kazoo_client,djangosaml2,django_db
+
+[handlers]
+keys=stdout
+
+[formatters]
+keys=default