Skip to content

Commit

Permalink
fix(bigFile): max upload size
Browse files Browse the repository at this point in the history
  • Loading branch information
ido-pluto committed Jul 28, 2024
1 parent 124519c commit 0abe5da
Show file tree
Hide file tree
Showing 2 changed files with 38 additions and 26 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,15 @@ type ProgressCallback = (progress: number, total: number) => void;

export type BigFileUploadOptions = {
retryChunks: number;
retryDelay?: number;
chunkSize: number;
parallelChunks: number;
parallelUploads: number;
};

const UPLOAD_BIG_FILE_OPTIONS: BigFileUploadOptions = {
retryChunks: 5,
retryDelay: 1000,
chunkSize: 1024 * 1024 * 5,
parallelChunks: 3,
parallelUploads: 3,
Expand Down Expand Up @@ -81,38 +83,32 @@ async function uploadBigFile(fileId: string, file: File, progressCallback: Progr
total: totalChunks,
};

const uploadPromiseWithRetry = (async function uploadPromise(retry = options.retryChunks) {
const upload = uploadChunkWithXHR(chunk, info, (loaded) => {
const uploadPromiseWithRetry = retry(async () => {
const upload = await uploadChunkWithXHR(chunk, info, (loaded) => {
activeLoads.set(i, loaded);
const loadedSize = Array.from(activeLoads.values()).reduce((a, b) => a + b, 0);
progressCallback(finishedSize + loadedSize, totalSize);
});

try {
const response: any = await upload;
if (response?.missingChunks && activeChunks.size < options.parallelChunks) {
const promises: Promise<any>[] = [];
for (const chunk of response.missingChunks) {
const {promise} = await uploadChunk(chunk - 1);
promises.push(promise);
}
await Promise.all(promises);
const response: any = await upload;
if (response?.missingChunks && activeChunks.size < options.parallelChunks) {
const promises: Promise<any>[] = [];
for (const chunk of response.missingChunks) {
const { promise } = await uploadChunk(chunk - 1);
promises.push(promise);
}
await Promise.all(promises);
}

if (!response?.ok) {
throw new Error(response.error);
}
} catch (error) {
if (retry === 0) {
throw error;
}
return await uploadPromise(retry - 1);
if (!response?.ok) {
throw new Error(response.error);
}
})().then(() => {
activeLoads.delete(i);
activeChunks.delete(uploadPromiseWithRetry);
finishedSize += chunk.size;
});
}, { retries: options.retryChunks, delay: options.retryDelay })
.then(() => {
activeLoads.delete(i);
activeChunks.delete(uploadPromiseWithRetry);
finishedSize += chunk.size;
});

activeChunks.add(uploadPromiseWithRetry);
return { promise: uploadPromiseWithRetry };
Expand Down Expand Up @@ -222,4 +218,20 @@ export function finishFormSubmission(form: HTMLFormElement, onClick?: string) {
}

form.submit();
}

async function retry(fn: () => Promise<void>, options: { retries: number, delay: number; } = { retries: 5, delay: 1000 }) {
let attempts = 0;
while (attempts < options.retries) {
try {
await fn();
return;
} catch (error) {
attempts++;
if (attempts >= options.retries) {
throw error;
}
await new Promise(res => setTimeout(res, options.delay));
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -89,12 +89,12 @@ export async function loadUploadFiles(astro: AstroGlobal, options: Partial<LoadU
return await sendError("File size exceeded");
}

const totalDirectorySizeWithNewUpload = (await totalDirectorySize(tempDirectory)) + Math.max(uploadSize, uploadFile.size);
const totalDirectorySizeWithNewUpload = (await totalDirectorySize(tempDirectory)) + part === 1 ? uploadSize : uploadFile.size;
if (totalDirectorySizeWithNewUpload > maxDirectorySize) {
return await sendError("Directory size exceeded");
}

const newTotalSize = (await totalDirectorySize(uploadDir)) + uploadSize;
const newTotalSize = (await totalDirectorySize(uploadDir)) + uploadFileMayBe.size;
if (newTotalSize > maxUploadSize) {
return await sendError("Upload size exceeded");
}
Expand Down

0 comments on commit 0abe5da

Please sign in to comment.