|
@@ -26,6 +26,17 @@ import sys
|
|
|
import tempfile
|
|
import tempfile
|
|
|
import zipfile
|
|
import zipfile
|
|
|
|
|
|
|
|
|
|
+from celery.app.control import Control
|
|
|
|
|
+from desktop.conf import TASK_SERVER
|
|
|
|
|
+if hasattr(TASK_SERVER, 'get') and TASK_SERVER.ENABLED.get():
|
|
|
|
|
+ from desktop.celery import app as celery_app
|
|
|
|
|
+import psutil
|
|
|
|
|
+import datetime
|
|
|
|
|
+import redis
|
|
|
|
|
+import re
|
|
|
|
|
+from django.http import HttpResponse, JsonResponse
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
from collections import defaultdict
|
|
from collections import defaultdict
|
|
|
from datetime import datetime
|
|
from datetime import datetime
|
|
|
|
|
|
|
@@ -47,7 +58,7 @@ from beeswax.models import Namespace
|
|
|
from desktop import appmanager
|
|
from desktop import appmanager
|
|
|
from desktop.auth.backend import is_admin
|
|
from desktop.auth.backend import is_admin
|
|
|
from desktop.conf import ENABLE_CONNECTORS, ENABLE_GIST_PREVIEW, CUSTOM, get_clusters, IS_K8S_ONLY, ENABLE_SHARING
|
|
from desktop.conf import ENABLE_CONNECTORS, ENABLE_GIST_PREVIEW, CUSTOM, get_clusters, IS_K8S_ONLY, ENABLE_SHARING
|
|
|
-from desktop.conf import ENABLE_NEW_STORAGE_BROWSER, ENABLE_CHUNKED_FILE_UPLOADER
|
|
|
|
|
|
|
+from desktop.conf import ENABLE_NEW_STORAGE_BROWSER, ENABLE_CHUNKED_FILE_UPLOADER, TASK_SERVER
|
|
|
from desktop.lib.conf import BoundContainer, GLOBAL_CONFIG, is_anonymous
|
|
from desktop.lib.conf import BoundContainer, GLOBAL_CONFIG, is_anonymous
|
|
|
from desktop.lib.django_util import JsonResponse, login_notrequired, render
|
|
from desktop.lib.django_util import JsonResponse, login_notrequired, render
|
|
|
from desktop.lib.exceptions_renderable import PopupException
|
|
from desktop.lib.exceptions_renderable import PopupException
|
|
@@ -57,6 +68,9 @@ from desktop.lib.paths import get_desktop_root
|
|
|
from desktop.models import Document2, Document, Directory, FilesystemException, uuid_default, \
|
|
from desktop.models import Document2, Document, Directory, FilesystemException, uuid_default, \
|
|
|
UserPreferences, get_user_preferences, set_user_preferences, get_cluster_config, __paginate, _get_gist_document
|
|
UserPreferences, get_user_preferences, set_user_preferences, get_cluster_config, __paginate, _get_gist_document
|
|
|
from desktop.views import get_banner_message, serve_403_error
|
|
from desktop.views import get_banner_message, serve_403_error
|
|
|
|
|
+from desktop.log import DEFAULT_LOG_DIR
|
|
|
|
|
+from filebrowser.tasks import check_disk_usage_and_clean_task
|
|
|
|
|
+from filebrowser.tasks import document_cleanup_task
|
|
|
|
|
|
|
|
from hadoop.cluster import is_yarn
|
|
from hadoop.cluster import is_yarn
|
|
|
|
|
|
|
@@ -101,6 +115,7 @@ def get_config(request):
|
|
|
config['hue_config']['is_yarn_enabled'] = is_yarn()
|
|
config['hue_config']['is_yarn_enabled'] = is_yarn()
|
|
|
config['hue_config']['enable_new_storage_browser'] = ENABLE_NEW_STORAGE_BROWSER.get()
|
|
config['hue_config']['enable_new_storage_browser'] = ENABLE_NEW_STORAGE_BROWSER.get()
|
|
|
config['hue_config']['enable_chunked_file_uploader'] = ENABLE_CHUNKED_FILE_UPLOADER.get()
|
|
config['hue_config']['enable_chunked_file_uploader'] = ENABLE_CHUNKED_FILE_UPLOADER.get()
|
|
|
|
|
+ config['hue_config']['enable_task_server'] = TASK_SERVER.ENABLED.get()
|
|
|
config['clusters'] = list(get_clusters(request.user).values())
|
|
config['clusters'] = list(get_clusters(request.user).values())
|
|
|
config['documents'] = {
|
|
config['documents'] = {
|
|
|
'types': list(Document2.objects.documents(user=request.user).order_by().values_list('type', flat=True).distinct())
|
|
'types': list(Document2.objects.documents(user=request.user).order_by().values_list('type', flat=True).distinct())
|
|
@@ -688,6 +703,137 @@ def share_document(request):
|
|
|
'document': doc.to_dict()
|
|
'document': doc.to_dict()
|
|
|
})
|
|
})
|
|
|
|
|
|
|
|
|
|
+@api_error_handler
|
|
|
|
|
+@require_POST
|
|
|
|
|
+def handle_submit(request):
|
|
|
|
|
+ # Extract the task name and params from the request
|
|
|
|
|
+ try:
|
|
|
|
|
+ data = json.loads(request.body)
|
|
|
|
|
+ task_name = data.get('taskName')
|
|
|
|
|
+ task_params = data.get('taskParams')
|
|
|
|
|
+ except json.JSONDecodeError as e:
|
|
|
|
|
+ return JsonResponse({'error': str(e)}, status=500)
|
|
|
|
|
+
|
|
|
|
|
+ if task_name == 'document cleanup':
|
|
|
|
|
+ keep_days = task_params.get('keep-days')
|
|
|
|
|
+ task_kwargs = {
|
|
|
|
|
+ 'keep_days': keep_days,
|
|
|
|
|
+ 'user_id': request.user.id,
|
|
|
|
|
+ 'username': request.user.username,
|
|
|
|
|
+ }
|
|
|
|
|
+ # Enqueue the document cleanup task with keyword arguments
|
|
|
|
|
+ task = document_cleanup_task.apply_async(kwargs=task_kwargs)
|
|
|
|
|
+
|
|
|
|
|
+ # Return a response indicating the task has been scheduled
|
|
|
|
|
+ return JsonResponse({
|
|
|
|
|
+ 'taskName': task_name,
|
|
|
|
|
+ 'taskParams': task_params,
|
|
|
|
|
+ 'status': 'Scheduled',
|
|
|
|
|
+ 'task_id': task.id, # The task ID generated by Celery
|
|
|
|
|
+ })
|
|
|
|
|
+
|
|
|
|
|
+ elif task_name == 'tmp clean up':
|
|
|
|
|
+ cleanup_threshold = task_params.get('threshold for clean up')
|
|
|
|
|
+ disk_check_interval = task_params.get('disk check interval')
|
|
|
|
|
+ task_kwargs = {
|
|
|
|
|
+ 'username': request.user.username,
|
|
|
|
|
+ 'cleanup_threshold': cleanup_threshold,
|
|
|
|
|
+ 'disk_check_interval': disk_check_interval
|
|
|
|
|
+ }
|
|
|
|
|
+ task = check_disk_usage_and_clean_task.apply_async(kwargs=task_kwargs)
|
|
|
|
|
+
|
|
|
|
|
+ return JsonResponse({
|
|
|
|
|
+ 'taskName': task_name,
|
|
|
|
|
+ 'status': 'Scheduled',
|
|
|
|
|
+ 'task_id': task.id, # The task ID generated by Celery
|
|
|
|
|
+ })
|
|
|
|
|
+
|
|
|
|
|
+ return JsonResponse({
|
|
|
|
|
+ 'status': 0
|
|
|
|
|
+ })
|
|
|
|
|
+
|
|
|
|
|
+@api_error_handler
|
|
|
|
|
+def get_taskserver_tasks(request):
|
|
|
|
|
+ """Retirve the tasks from the database"""
|
|
|
|
|
+ redis_client = redis.Redis(host='localhost', port=6379, db=0)
|
|
|
|
|
+ tasks = []
|
|
|
|
|
+
|
|
|
|
|
+ # Use scan_iter to efficiently iterate over keys matching the first pattern
|
|
|
|
|
+ for key in redis_client.scan_iter('celery-task-meta-*'):
|
|
|
|
|
+ task = json.loads(redis_client.get(key))
|
|
|
|
|
+ tasks.append(task)
|
|
|
|
|
+
|
|
|
|
|
+ # Use scan_iter to efficiently iterate over keys matching the second pattern
|
|
|
|
|
+ for key in redis_client.scan_iter('task:*'):
|
|
|
|
|
+ task = json.loads(redis_client.get(key))
|
|
|
|
|
+ tasks.append(task)
|
|
|
|
|
+
|
|
|
|
|
+ return JsonResponse(tasks, safe=False)
|
|
|
|
|
+
|
|
|
|
|
+@api_error_handler
|
|
|
|
|
+def check_upload_status(request, task_id):
|
|
|
|
|
+ redis_client = redis.Redis(host='localhost', port=6379, db=0)
|
|
|
|
|
+ task_key = f'celery-task-meta-{task_id}'
|
|
|
|
|
+ task_data = redis_client.get(task_key)
|
|
|
|
|
+
|
|
|
|
|
+ if task_data is None:
|
|
|
|
|
+ return JsonResponse({'error': 'Task not found'}, status=404)
|
|
|
|
|
+
|
|
|
|
|
+ task = json.loads(task_data)
|
|
|
|
|
+ is_finalized = task.get('status') == 'SUCCESS'
|
|
|
|
|
+ is_running = task.get('status') == 'RUNNING'
|
|
|
|
|
+ is_failure = task.get('status') == 'FAILURE'
|
|
|
|
|
+ is_revoked = task.get('status') == 'REVOKED'
|
|
|
|
|
+
|
|
|
|
|
+ return JsonResponse({'isFinalized': is_finalized, 'isRunning': is_running, 'isFailure': is_failure, 'isRevoked': is_revoked})
|
|
|
|
|
+
|
|
|
|
|
+@api_error_handler
|
|
|
|
|
+def kill_task(request, task_id):
|
|
|
|
|
+ # Check the current status of the task
|
|
|
|
|
+ status_response = check_upload_status(request, task_id)
|
|
|
|
|
+ status_data = json.loads(status_response.content)
|
|
|
|
|
+
|
|
|
|
|
+ if status_data.get('isFinalized') or status_data.get('isRevoked') or status_data.get('isFailure'):
|
|
|
|
|
+ return JsonResponse({'status': 'info', 'message': f'Task {task_id} has already been completed or revoked.'})
|
|
|
|
|
+
|
|
|
|
|
+ try:
|
|
|
|
|
+ control = Control(app=celery_app)
|
|
|
|
|
+ control.revoke(task_id, terminate=True)
|
|
|
|
|
+ return JsonResponse({'status': 'success', 'message': f'Task {task_id} has been terminated.'})
|
|
|
|
|
+ except Exception as e:
|
|
|
|
|
+ return JsonResponse({'status': 'error', 'message': f'Failed to terminate task {task_id}: {str(e)}'})
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+def get_available_space(request):
|
|
|
|
|
+ free_space = psutil.disk_usage('/tmp').free
|
|
|
|
|
+ return JsonResponse({'free_space': free_space})
|
|
|
|
|
+
|
|
|
|
|
+def get_task_logs(request, task_id):
|
|
|
|
|
+ log_dir = os.getenv("DESKTOP_LOG_DIR", DEFAULT_LOG_DIR)
|
|
|
|
|
+ log_file = "%s/rungunicornserver.log" % (log_dir)
|
|
|
|
|
+ task_log = []
|
|
|
|
|
+ escaped_task_id = re.escape(task_id)
|
|
|
|
|
+
|
|
|
|
|
+ # Using a simpler and more explicit regex to debug
|
|
|
|
|
+ task_end_pattern = re.compile(rf"\[{escaped_task_id}\].*succeeded")
|
|
|
|
|
+ task_start_pattern = re.compile(rf"\[{escaped_task_id}\].*received")
|
|
|
|
|
+ try:
|
|
|
|
|
+ with open(log_file, 'r') as file:
|
|
|
|
|
+ recording = False
|
|
|
|
|
+ for line in file:
|
|
|
|
|
+ if task_start_pattern.search(line):
|
|
|
|
|
+ recording = True # Start recording log lines
|
|
|
|
|
+ if recording:
|
|
|
|
|
+ task_log.append(line)
|
|
|
|
|
+ if task_end_pattern.search(line) and recording:
|
|
|
|
|
+ break # Stop recording after finding the end of the task
|
|
|
|
|
+
|
|
|
|
|
+ except FileNotFoundError:
|
|
|
|
|
+ return HttpResponse(f'Log file not found at {log_file}', status=404)
|
|
|
|
|
+ except Exception as e:
|
|
|
|
|
+ return HttpResponse(str(e), status=500)
|
|
|
|
|
+
|
|
|
|
|
+ return HttpResponse(''.join(task_log), content_type='text/plain')
|
|
|
|
|
|
|
|
@api_error_handler
|
|
@api_error_handler
|
|
|
@require_POST
|
|
@require_POST
|