|
|
@@ -14,181 +14,171 @@
|
|
|
// See the License for the specific language governing permissions and
|
|
|
// limitations under the License.
|
|
|
|
|
|
-import { useEffect, useState } from 'react';
|
|
|
+import { useState } from 'react';
|
|
|
import { getLastKnownConfig } from '../../../config/hueConfig';
|
|
|
import useSaveData from '../useSaveData/useSaveData';
|
|
|
import useQueueProcessor from '../useQueueProcessor/useQueueProcessor';
|
|
|
import {
|
|
|
DEFAULT_CHUNK_SIZE,
|
|
|
- DEFAULT_CONCURRENT_MAX_CONNECTIONS,
|
|
|
- FileUploadStatus
|
|
|
+ DEFAULT_CONCURRENT_MAX_CONNECTIONS
|
|
|
} from '../../constants/storageBrowser';
|
|
|
import useLoadData from '../useLoadData/useLoadData';
|
|
|
import { TaskServerResponse, TaskStatus } from '../../../reactComponents/TaskBrowser/TaskBrowser';
|
|
|
import {
|
|
|
- createChunks,
|
|
|
getChunksCompletePayload,
|
|
|
+ getItemProgress,
|
|
|
+ getItemsTotalProgress,
|
|
|
getChunkItemPayload,
|
|
|
- getChunkSinglePayload,
|
|
|
+ createChunks,
|
|
|
getStatusHashMap,
|
|
|
- getTotalChunk,
|
|
|
- UploadChunkItem,
|
|
|
- UploadItem
|
|
|
-} from './util';
|
|
|
-import { get } from '../../../api/utils';
|
|
|
-import { UPLOAD_AVAILABLE_SPACE_URL } from '../../../apps/storageBrowser/api';
|
|
|
-
|
|
|
-interface UseUploadQueueResponse {
|
|
|
- addFiles: (item: UploadItem[]) => void;
|
|
|
- removeFile: (item: UploadItem) => void;
|
|
|
+ addChunkToInProcess,
|
|
|
+ isSpaceAvailableInServer,
|
|
|
+ isAllChunksOfFileUploaded
|
|
|
+} from './utils';
|
|
|
+import {
|
|
|
+ RegularFile,
|
|
|
+ ChunkedFile,
|
|
|
+ FileVariables,
|
|
|
+ FileStatus,
|
|
|
+ ChunkedFilesInProgress
|
|
|
+} from './types';
|
|
|
+
|
|
|
+interface UseChunkUploadResponse {
|
|
|
+ addFiles: (item: RegularFile[]) => void;
|
|
|
+ cancelFile: (item: RegularFile['uuid']) => void;
|
|
|
isLoading: boolean;
|
|
|
}
|
|
|
|
|
|
interface ChunkUploadOptions {
|
|
|
concurrentProcess?: number;
|
|
|
- onStatusUpdate: (item: UploadItem, newStatus: FileUploadStatus) => void;
|
|
|
+ updateFileVariables: (itemId: ChunkedFile['uuid'], variables: FileVariables) => void;
|
|
|
onComplete: () => void;
|
|
|
}
|
|
|
|
|
|
const useChunkUpload = ({
|
|
|
concurrentProcess = DEFAULT_CONCURRENT_MAX_CONNECTIONS,
|
|
|
- onStatusUpdate,
|
|
|
+ updateFileVariables,
|
|
|
onComplete
|
|
|
-}: ChunkUploadOptions): UseUploadQueueResponse => {
|
|
|
+}: ChunkUploadOptions): UseChunkUploadResponse => {
|
|
|
const config = getLastKnownConfig();
|
|
|
const chunkSize = config?.storage_browser?.file_upload_chunk_size ?? DEFAULT_CHUNK_SIZE;
|
|
|
- const [processingItem, setProcessingItem] = useState<UploadItem>();
|
|
|
- const [pendingUploadItems, setPendingUploadItems] = useState<UploadItem[]>([]);
|
|
|
- const [awaitingStatusItems, setAwaitingStatusItems] = useState<UploadItem[]>([]);
|
|
|
-
|
|
|
- const onError = () => {
|
|
|
- if (processingItem) {
|
|
|
- onStatusUpdate(processingItem, FileUploadStatus.Failed);
|
|
|
- setProcessingItem(undefined);
|
|
|
- }
|
|
|
- };
|
|
|
-
|
|
|
- const onSuccess = (item: UploadItem) => () => {
|
|
|
- setAwaitingStatusItems(prev => [...prev, item]);
|
|
|
- setProcessingItem(undefined);
|
|
|
- };
|
|
|
+ const [filesWaitingFinalStatus, setFilesWaitingFinalStatus] = useState<ChunkedFile['uuid'][]>([]);
|
|
|
+ const [filesInProgress, setFilesInProgress] = useState<ChunkedFilesInProgress>({});
|
|
|
|
|
|
const { save } = useSaveData(undefined, {
|
|
|
postOptions: {
|
|
|
qsEncodeData: false,
|
|
|
headers: { 'Content-Type': 'multipart/form-data' }
|
|
|
- },
|
|
|
- onError
|
|
|
+ }
|
|
|
});
|
|
|
|
|
|
- const updateItemStatus = (serverResponse: TaskServerResponse[]) => {
|
|
|
- const statusMap = getStatusHashMap(serverResponse);
|
|
|
-
|
|
|
- const remainingItems = awaitingStatusItems.filter(item => {
|
|
|
- const status = statusMap[item.uuid];
|
|
|
- if (status === TaskStatus.Success || status === TaskStatus.Failure) {
|
|
|
- const ItemStatus =
|
|
|
- status === TaskStatus.Success ? FileUploadStatus.Uploaded : FileUploadStatus.Failed;
|
|
|
- onStatusUpdate(item, ItemStatus);
|
|
|
- return false;
|
|
|
+ const processTaskServerResponse = (response: TaskServerResponse[]) => {
|
|
|
+ const statusMap = getStatusHashMap(response);
|
|
|
+ setFilesWaitingFinalStatus(prev => {
|
|
|
+ const remainingFiles = prev.filter(uuid => {
|
|
|
+ const fileStatus = statusMap[uuid];
|
|
|
+ if (fileStatus === TaskStatus.Success || fileStatus === TaskStatus.Failure) {
|
|
|
+ const mappedStatus =
|
|
|
+ fileStatus === TaskStatus.Success ? FileStatus.Uploaded : FileStatus.Failed;
|
|
|
+ updateFileVariables(uuid, { status: mappedStatus });
|
|
|
+ return false; // remove the file as final status is received
|
|
|
+ }
|
|
|
+ return true;
|
|
|
+ });
|
|
|
+ if (remainingFiles.length === 0) {
|
|
|
+ onComplete();
|
|
|
}
|
|
|
- return true;
|
|
|
+ return remainingFiles;
|
|
|
});
|
|
|
- if (remainingItems.length === 0) {
|
|
|
- onComplete();
|
|
|
- }
|
|
|
- setAwaitingStatusItems(remainingItems);
|
|
|
};
|
|
|
|
|
|
- const { data: tasksStatus } = useLoadData<TaskServerResponse[]>(
|
|
|
- '/desktop/api2/taskserver/get_taskserver_tasks/',
|
|
|
- {
|
|
|
- pollInterval: awaitingStatusItems.length ? 5000 : undefined,
|
|
|
- skip: !awaitingStatusItems.length,
|
|
|
- transformKeys: 'none'
|
|
|
- }
|
|
|
- );
|
|
|
+ useLoadData<TaskServerResponse[]>('/desktop/api2/taskserver/get_taskserver_tasks/', {
|
|
|
+ pollInterval: 5000,
|
|
|
+ skip: filesWaitingFinalStatus.length === 0,
|
|
|
+ onSuccess: processTaskServerResponse,
|
|
|
+ transformKeys: 'none'
|
|
|
+ });
|
|
|
|
|
|
- useEffect(() => {
|
|
|
- if (tasksStatus) {
|
|
|
- updateItemStatus(tasksStatus);
|
|
|
- }
|
|
|
- }, [tasksStatus]);
|
|
|
-
|
|
|
- const onChunksUploadComplete = async () => {
|
|
|
- if (processingItem) {
|
|
|
- const { url, payload } = getChunksCompletePayload(processingItem, chunkSize);
|
|
|
- return save(payload, {
|
|
|
- url,
|
|
|
- onSuccess: onSuccess(processingItem)
|
|
|
- });
|
|
|
- }
|
|
|
+ const handleAllChunksUploaded = (chunk: ChunkedFile) => {
|
|
|
+ const { url, payload } = getChunksCompletePayload(chunk);
|
|
|
+ return save(payload, {
|
|
|
+ url,
|
|
|
+ onSuccess: () => setFilesWaitingFinalStatus(prev => [...prev, chunk.uuid]),
|
|
|
+ onError: error => updateFileVariables(chunk.uuid, { status: FileStatus.Failed, error })
|
|
|
+ });
|
|
|
};
|
|
|
|
|
|
- const uploadChunk = async (chunkItem: UploadChunkItem) => {
|
|
|
- const { url, payload } = getChunkItemPayload(chunkItem, chunkSize);
|
|
|
- return save(payload, { url });
|
|
|
+ const onChunkUploadSuccess = (chunk: ChunkedFile) => () => {
|
|
|
+ setFilesInProgress(prev => {
|
|
|
+ const isAllChunksUploaded = isAllChunksOfFileUploaded(prev, chunk);
|
|
|
+ if (isAllChunksUploaded) {
|
|
|
+ handleAllChunksUploaded(chunk);
|
|
|
+ delete prev[chunk.uuid];
|
|
|
+ }
|
|
|
+
|
|
|
+ return prev;
|
|
|
+ });
|
|
|
};
|
|
|
|
|
|
- const { enqueue } = useQueueProcessor<UploadChunkItem>(uploadChunk, {
|
|
|
- concurrentProcess,
|
|
|
- onSuccess: onChunksUploadComplete
|
|
|
- });
|
|
|
+ const onUploadProgress = (chunk: ChunkedFile) => (chunkProgress: ProgressEvent) => {
|
|
|
+ setFilesInProgress(prev => {
|
|
|
+ const allChunks = prev[chunk.uuid] || [];
|
|
|
+ const chunk1 = allChunks.find(c => c.chunkNumber === chunk.chunkNumber);
|
|
|
+ if (!chunk1) {
|
|
|
+ return prev;
|
|
|
+ }
|
|
|
+ chunk1.progress = getItemProgress(chunkProgress);
|
|
|
|
|
|
- const uploadItemInChunks = (item: UploadItem) => {
|
|
|
- const chunks = createChunks(item, chunkSize);
|
|
|
- return enqueue(chunks);
|
|
|
+ const totalProgress = getItemsTotalProgress(chunk, allChunks);
|
|
|
+ updateFileVariables(chunk.uuid, { progress: totalProgress });
|
|
|
+ return { ...prev, [chunk.uuid]: allChunks };
|
|
|
+ });
|
|
|
};
|
|
|
|
|
|
- const uploadItemInSingleChunk = async (item: UploadItem) => {
|
|
|
- const { url, payload } = getChunkSinglePayload(item, chunkSize);
|
|
|
+ const uploadChunkToServer = async (chunk: ChunkedFile) => {
|
|
|
+ const { url, payload } = getChunkItemPayload(chunk);
|
|
|
return save(payload, {
|
|
|
url,
|
|
|
- onSuccess: onSuccess(item)
|
|
|
+ onSuccess: onChunkUploadSuccess(chunk),
|
|
|
+ onError: error => updateFileVariables(chunk.uuid, { status: FileStatus.Failed, error }),
|
|
|
+ postOptions: { onUploadProgress: onUploadProgress(chunk) }
|
|
|
});
|
|
|
};
|
|
|
|
|
|
- const checkAvailableSpace = async (fileSize: number) => {
|
|
|
- const { upload_available_space: availableSpace } = await get<{
|
|
|
- upload_available_space: number;
|
|
|
- }>(UPLOAD_AVAILABLE_SPACE_URL);
|
|
|
- return availableSpace >= fileSize;
|
|
|
- };
|
|
|
-
|
|
|
- const uploadItem = async (item: UploadItem) => {
|
|
|
- const isSpaceAvailable = await checkAvailableSpace(item.file.size);
|
|
|
- if (!isSpaceAvailable) {
|
|
|
- onStatusUpdate(item, FileUploadStatus.Failed);
|
|
|
- return Promise.resolve();
|
|
|
+ const processChunkedFile = async (chunk: ChunkedFile): Promise<void> => {
|
|
|
+ const isFirstChunk = !filesInProgress[chunk.uuid];
|
|
|
+ if (isFirstChunk) {
|
|
|
+ updateFileVariables(chunk.uuid, { status: FileStatus.Uploading });
|
|
|
+ const isUploadPossible = await isSpaceAvailableInServer(chunk.totalSize);
|
|
|
+ if (!isUploadPossible) {
|
|
|
+ const error = new Error('Upload server ran out of space. Try again later.');
|
|
|
+ cancelFile(chunk.uuid);
|
|
|
+ return updateFileVariables(chunk.uuid, { status: FileStatus.Failed, error });
|
|
|
+ }
|
|
|
}
|
|
|
+ setFilesInProgress(prev => addChunkToInProcess(prev, chunk));
|
|
|
|
|
|
- onStatusUpdate(item, FileUploadStatus.Uploading);
|
|
|
- const chunks = getTotalChunk(item.file.size, chunkSize);
|
|
|
- if (chunks === 1) {
|
|
|
- return uploadItemInSingleChunk(item);
|
|
|
- }
|
|
|
- return uploadItemInChunks(item);
|
|
|
+ return uploadChunkToServer(chunk);
|
|
|
};
|
|
|
|
|
|
- const addFiles = (newItems: UploadItem[]) => {
|
|
|
- setPendingUploadItems(prev => [...prev, ...newItems]);
|
|
|
- };
|
|
|
+ const { enqueue, dequeue } = useQueueProcessor<ChunkedFile>(processChunkedFile, {
|
|
|
+ concurrentProcess
|
|
|
+ });
|
|
|
|
|
|
- const removeFile = (item: UploadItem) => {
|
|
|
- setPendingUploadItems(prev => prev.filter(i => i.uuid !== item.uuid));
|
|
|
+ const addFiles = (newFiles: RegularFile[]) => {
|
|
|
+ newFiles.forEach(file => {
|
|
|
+ const chunks = createChunks(file, chunkSize);
|
|
|
+ enqueue(chunks);
|
|
|
+ });
|
|
|
};
|
|
|
|
|
|
- useEffect(() => {
|
|
|
- // Ensures one file is broken down in chunks and uploaded to the server
|
|
|
- if (!processingItem && pendingUploadItems.length) {
|
|
|
- const item = pendingUploadItems[0];
|
|
|
- setProcessingItem(item);
|
|
|
- setPendingUploadItems(prev => prev.slice(1));
|
|
|
- uploadItem(item);
|
|
|
- }
|
|
|
- }, [pendingUploadItems, processingItem]);
|
|
|
+ const cancelFile = (fileUuid: ChunkedFile['uuid']) => dequeue(fileUuid, 'uuid');
|
|
|
|
|
|
- return { addFiles, removeFile, isLoading: !!processingItem || !!pendingUploadItems.length };
|
|
|
+ return {
|
|
|
+ addFiles,
|
|
|
+ cancelFile,
|
|
|
+ isLoading: !!(filesWaitingFinalStatus.length || filesInProgress.length)
|
|
|
+ };
|
|
|
};
|
|
|
|
|
|
export default useChunkUpload;
|