mirror of
https://github.com/streetwriters/notesnook.git
synced 2025-12-16 19:57:52 +01:00
web: optimize attachment uploading
This commit is contained in:
committed by
Abdullah Atta
parent
053f70cdb4
commit
e4a755c69f
9853
apps/web/package-lock.json
generated
9853
apps/web/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -15,6 +15,7 @@
|
||||
"@dnd-kit/sortable": "^8.0.0",
|
||||
"@emotion/react": "11.11.1",
|
||||
"@hazae41/foras": "^2.1.4",
|
||||
"@henrygd/queue": "^1.0.6",
|
||||
"@mdi/js": "^7.2.96",
|
||||
"@mdi/react": "^1.6.1",
|
||||
"@notesnook-importer/core": "^2.0.0",
|
||||
|
||||
@@ -51,6 +51,12 @@ export class IndexedDBFileStore implements IFileStorage {
|
||||
readChunk(chunkName: string): Promise<Uint8Array | undefined> {
|
||||
return this.storage.get(chunkName);
|
||||
}
|
||||
async listChunks(chunkPrefix: string): Promise<string[]> {
|
||||
const keys = await this.storage.keys();
|
||||
return keys.filter((k) =>
|
||||
(k as string).startsWith(chunkPrefix)
|
||||
) as string[];
|
||||
}
|
||||
}
|
||||
|
||||
export class CacheStorageFileStore implements IFileStorage {
|
||||
@@ -108,6 +114,14 @@ export class CacheStorageFileStore implements IFileStorage {
|
||||
return response ? new Uint8Array(await response.arrayBuffer()) : undefined;
|
||||
}
|
||||
|
||||
async listChunks(chunkPrefix: string): Promise<string[]> {
|
||||
const cache = await this.getCache();
|
||||
const keys = await cache.keys();
|
||||
return keys
|
||||
.filter((k) => k.url.startsWith(`/${chunkPrefix}`))
|
||||
.map((r) => r.url.slice(1));
|
||||
}
|
||||
|
||||
private toURL(chunkName: string) {
|
||||
return `/${chunkName}`;
|
||||
}
|
||||
@@ -158,4 +172,8 @@ export class OriginPrivateFileSystem implements IFileStorage {
|
||||
await this.create();
|
||||
return this.worker.readChunk(this.name, chunkName);
|
||||
}
|
||||
async listChunks(chunkPrefix: string): Promise<string[]> {
|
||||
await this.create();
|
||||
return (await this.worker.listChunks(this.name, chunkPrefix)) || [];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,7 +19,7 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import "web-streams-polyfill/dist/ponyfill";
|
||||
import { xxhash64, createXXHash64 } from "hash-wasm";
|
||||
import axios, { AxiosProgressEvent } from "axios";
|
||||
import axios from "axios";
|
||||
import { AppEventManager, AppEvents } from "../common/app-events";
|
||||
import { StreamableFS } from "@notesnook/streamable-fs";
|
||||
import { NNCrypto } from "./nncrypto";
|
||||
@@ -50,12 +50,13 @@ import {
|
||||
RequestOptions
|
||||
} from "@notesnook/core/dist/interfaces";
|
||||
import { logger } from "../utils/logger";
|
||||
import { newQueue } from "@henrygd/queue";
|
||||
|
||||
const ABYTES = 17;
|
||||
const CHUNK_SIZE = 512 * 1024;
|
||||
const ENCRYPTED_CHUNK_SIZE = CHUNK_SIZE + ABYTES;
|
||||
const UPLOAD_PART_REQUIRED_CHUNKS = Math.ceil(
|
||||
(5 * 1024 * 1024) / ENCRYPTED_CHUNK_SIZE
|
||||
(10 * 1024 * 1024) / ENCRYPTED_CHUNK_SIZE
|
||||
);
|
||||
const MINIMUM_MULTIPART_FILE_SIZE = 25 * 1024 * 1024;
|
||||
const streamablefs = new StreamableFS(
|
||||
@@ -241,7 +242,7 @@ async function uploadFile(
|
||||
if (uploadedFileSize > 0) return true;
|
||||
|
||||
const fileHandle = await streamablefs.readFile(filename);
|
||||
if (!fileHandle || !(await exists(filename)))
|
||||
if (!fileHandle || !(await exists(fileHandle)))
|
||||
throw new Error(
|
||||
`File is corrupt or missing data. Please upload the file again. (File hash: ${filename})`
|
||||
);
|
||||
@@ -260,9 +261,6 @@ async function uploadFile(
|
||||
await checkUpload(filename);
|
||||
|
||||
await fileHandle.addAdditionalData("uploaded", true);
|
||||
if (isAttachmentDeletable(fileHandle.file.type)) {
|
||||
await streamablefs.deleteFile(filename);
|
||||
}
|
||||
}
|
||||
|
||||
return uploaded;
|
||||
@@ -340,7 +338,7 @@ async function multiPartUploadFile(
|
||||
{}) as UploadAdditionalData;
|
||||
|
||||
const TOTAL_PARTS = Math.ceil(
|
||||
fileHandle.file.chunks / UPLOAD_PART_REQUIRED_CHUNKS
|
||||
fileHandle.chunks.length / UPLOAD_PART_REQUIRED_CHUNKS
|
||||
);
|
||||
const { uploadedChunks = [] } = additionalData;
|
||||
let { uploadedBytes = 0, uploadId = "" } = additionalData;
|
||||
@@ -368,11 +366,11 @@ async function multiPartUploadFile(
|
||||
|
||||
await fileHandle.addAdditionalData("uploadId", uploadId);
|
||||
|
||||
const onUploadProgress = (ev: AxiosProgressEvent) => {
|
||||
const onUploadProgress = () => {
|
||||
reportProgress(
|
||||
{
|
||||
total: fileHandle.file.size + ABYTES * TOTAL_PARTS,
|
||||
loaded: uploadedBytes + ev.loaded
|
||||
loaded: uploadedBytes
|
||||
},
|
||||
{
|
||||
type: "upload",
|
||||
@@ -381,13 +379,20 @@ async function multiPartUploadFile(
|
||||
);
|
||||
};
|
||||
|
||||
onUploadProgress({ bytes: 0, loaded: 0 });
|
||||
onUploadProgress();
|
||||
const queue = newQueue(4);
|
||||
for (let i = uploadedChunks.length; i < TOTAL_PARTS; ++i) {
|
||||
const blob = await fileHandle.readChunks(
|
||||
i * UPLOAD_PART_REQUIRED_CHUNKS,
|
||||
const from = i * UPLOAD_PART_REQUIRED_CHUNKS;
|
||||
const length = Math.min(
|
||||
fileHandle.chunks.length - from,
|
||||
UPLOAD_PART_REQUIRED_CHUNKS
|
||||
);
|
||||
const url = parts[i];
|
||||
queue.add(async () => {
|
||||
const blob = await fileHandle.readChunks(
|
||||
i * UPLOAD_PART_REQUIRED_CHUNKS,
|
||||
length
|
||||
);
|
||||
const response = await axios
|
||||
.request({
|
||||
url,
|
||||
@@ -395,7 +400,10 @@ async function multiPartUploadFile(
|
||||
headers: { "Content-Type": "" },
|
||||
signal,
|
||||
data: blob,
|
||||
onUploadProgress
|
||||
onUploadProgress: (ev) => {
|
||||
uploadedBytes += ev.bytes;
|
||||
onUploadProgress();
|
||||
}
|
||||
})
|
||||
.catch((e) => {
|
||||
throw new WrappedError(`Failed to upload part at offset ${i}`, e);
|
||||
@@ -405,17 +413,15 @@ async function multiPartUploadFile(
|
||||
throw new Error(
|
||||
`Failed to upload part at offset ${i}: invalid etag. ETag: ${response.headers.etag}`
|
||||
);
|
||||
|
||||
uploadedBytes += blob.size;
|
||||
uploadedChunks.push({
|
||||
PartNumber: i + 1,
|
||||
ETag: JSON.parse(response.headers.etag)
|
||||
});
|
||||
await fileHandle.addAdditionalData("uploadedChunks", uploadedChunks);
|
||||
await fileHandle.addAdditionalData("uploadedBytes", uploadedBytes);
|
||||
|
||||
onUploadProgress({ bytes: 0, loaded: 0 });
|
||||
});
|
||||
}
|
||||
await queue.done();
|
||||
|
||||
await axios
|
||||
.post(
|
||||
@@ -423,7 +429,7 @@ async function multiPartUploadFile(
|
||||
{
|
||||
Key: filename,
|
||||
UploadId: uploadId,
|
||||
PartETags: uploadedChunks
|
||||
PartETags: uploadedChunks.sort((a, b) => a.PartNumber - b.PartNumber)
|
||||
},
|
||||
{
|
||||
headers,
|
||||
@@ -473,12 +479,8 @@ async function downloadFile(
|
||||
const { url, headers, chunkSize, signal } = requestOptions;
|
||||
const handle = await streamablefs.readFile(filename);
|
||||
|
||||
if (
|
||||
handle &&
|
||||
handle.file.size === (await handle.size()) - handle.file.chunks * ABYTES
|
||||
)
|
||||
return true;
|
||||
else if (handle) await handle.delete();
|
||||
if (handle && (await exists(handle))) return true;
|
||||
if (handle) await handle.delete();
|
||||
|
||||
const attachment = await db.attachments.attachment(filename);
|
||||
if (!attachment) throw new Error("Attachment doesn't exist.");
|
||||
@@ -488,6 +490,21 @@ async function downloadFile(
|
||||
{ type: "download", hash: filename }
|
||||
);
|
||||
|
||||
const size = await getUploadedFileSize(filename);
|
||||
if (size <= 0) {
|
||||
const error = `File length is 0. Please upload this file again from the attachment manager. (File hash: ${filename})`;
|
||||
await db.attachments.markAsFailed(attachment.id, error);
|
||||
throw new Error(error);
|
||||
}
|
||||
|
||||
const totalChunks = Math.ceil(size / chunkSize);
|
||||
const decryptedLength = size - totalChunks * ABYTES;
|
||||
if (attachment && attachment.size !== decryptedLength) {
|
||||
const error = `File length mismatch. Expected ${attachment.size} but got ${decryptedLength} bytes. Please upload this file again from the attachment manager. (File hash: ${filename})`;
|
||||
await db.attachments.markAsFailed(attachment.id, error);
|
||||
throw new Error(error);
|
||||
}
|
||||
|
||||
const signedUrl = (
|
||||
await axios.get(url, {
|
||||
headers,
|
||||
@@ -497,55 +514,22 @@ async function downloadFile(
|
||||
|
||||
logger.debug("Got attachment signed url", { filename });
|
||||
|
||||
const response = await fetch(signedUrl, {
|
||||
signal
|
||||
});
|
||||
|
||||
logger.debug("Got attachment", { filename });
|
||||
|
||||
const contentType = response.headers.get("content-type");
|
||||
if (contentType === "application/xml") {
|
||||
const error = parseS3Error(await response.text());
|
||||
if (error.Code !== "Unknown") {
|
||||
throw new Error(`[${error.Code}] ${error.Message}`);
|
||||
}
|
||||
}
|
||||
const contentLength = parseInt(
|
||||
response.headers.get("content-length") || "0"
|
||||
);
|
||||
if (contentLength === 0 || isNaN(contentLength)) {
|
||||
const error = `File length is 0. Please upload this file again from the attachment manager. (File hash: ${filename})`;
|
||||
await db.attachments.markAsFailed(attachment.id, error);
|
||||
throw new Error(error);
|
||||
}
|
||||
|
||||
if (!response.body) {
|
||||
const error = `The download response does not contain a body. Please upload this file again from the attachment manager. (File hash: ${filename})`;
|
||||
await db.attachments.markAsFailed(attachment.id, error);
|
||||
throw new Error(error);
|
||||
}
|
||||
|
||||
const totalChunks = Math.ceil(contentLength / chunkSize);
|
||||
const decryptedLength = contentLength - totalChunks * ABYTES;
|
||||
if (attachment && attachment.size !== decryptedLength) {
|
||||
const error = `File length mismatch. Please upload this file again from the attachment manager. (File hash: ${filename})`;
|
||||
await db.attachments.markAsFailed(attachment.id, error);
|
||||
throw new Error(error);
|
||||
}
|
||||
|
||||
const fileHandle = await streamablefs.createFile(
|
||||
filename,
|
||||
decryptedLength,
|
||||
attachment?.mimeType || "application/octet-stream"
|
||||
attachment.mimeType || "application/octet-stream"
|
||||
);
|
||||
|
||||
const response = await fetch(signedUrl, {
|
||||
signal
|
||||
});
|
||||
await response.body
|
||||
.pipeThrough(
|
||||
?.pipeThrough(
|
||||
new ProgressStream((totalRead, done) => {
|
||||
reportProgress(
|
||||
{
|
||||
total: contentLength,
|
||||
loaded: done ? contentLength : totalRead
|
||||
total: size,
|
||||
loaded: done ? size : totalRead
|
||||
},
|
||||
{ type: "download", hash: filename }
|
||||
);
|
||||
@@ -569,11 +553,14 @@ async function downloadFile(
|
||||
}
|
||||
}
|
||||
|
||||
async function exists(filename: string) {
|
||||
const handle = await streamablefs.readFile(filename);
|
||||
async function exists(filename: string | FileHandle) {
|
||||
const handle =
|
||||
typeof filename === "string"
|
||||
? await streamablefs.readFile(filename)
|
||||
: filename;
|
||||
return (
|
||||
!!handle &&
|
||||
handle.file.size === (await handle.size()) - handle.file.chunks * ABYTES
|
||||
handle.file.size === (await handle.size()) - handle.chunks.length * ABYTES
|
||||
);
|
||||
}
|
||||
|
||||
@@ -610,14 +597,11 @@ export async function streamingDecryptFile(
|
||||
|
||||
export async function saveFile(filename: string, fileMetadata: FileMetadata) {
|
||||
logger.debug("Saving file", { filename });
|
||||
const { name, type, isUploaded } = fileMetadata;
|
||||
const { name, type } = fileMetadata;
|
||||
|
||||
const decrypted = await decryptFile(filename, fileMetadata);
|
||||
logger.debug("Decrypting file", { filename, result: !!decrypted });
|
||||
if (decrypted) saveAs(decrypted, getFileNameWithExtension(name, type));
|
||||
|
||||
if (isUploaded && isAttachmentDeletable(type))
|
||||
await streamablefs.deleteFile(filename);
|
||||
}
|
||||
|
||||
async function deleteFile(
|
||||
@@ -684,10 +668,6 @@ export const FileStorage: IFileStorage = {
|
||||
hashBase64
|
||||
};
|
||||
|
||||
function isAttachmentDeletable(type: string) {
|
||||
return !type.startsWith("image/") && !type.startsWith("application/pdf");
|
||||
}
|
||||
|
||||
function isSuccessStatusCode(statusCode: number) {
|
||||
return statusCode >= 200 && statusCode <= 299;
|
||||
}
|
||||
|
||||
@@ -93,6 +93,14 @@ class OriginPrivateFileStore implements IFileStorage {
|
||||
}
|
||||
}
|
||||
|
||||
async listChunks(chunkPrefix: string): Promise<string[]> {
|
||||
const chunks: string[] = [];
|
||||
for await (const entry of this.directory.keys()) {
|
||||
if (entry.startsWith(chunkPrefix)) chunks.push(entry);
|
||||
}
|
||||
return chunks;
|
||||
}
|
||||
|
||||
private async safeOp<T>(chunkName: string, createPromise: () => Promise<T>) {
|
||||
const lock = this.locks.get(chunkName);
|
||||
if (lock) await lock;
|
||||
@@ -139,6 +147,9 @@ const workerModule = {
|
||||
async readChunk(directoryName: string, chunkName: string) {
|
||||
const chunk = await fileStores.get(directoryName)?.readChunk(chunkName);
|
||||
return chunk ? transfer(chunk, [chunk.buffer]) : undefined;
|
||||
},
|
||||
async listChunks(directoryName: string, chunkPrefix: string) {
|
||||
return (await fileStores.get(directoryName)?.listChunks(chunkPrefix)) || [];
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
Reference in New Issue
Block a user