web: add support for storing attachments in opfs or cache storage

This commit is contained in:
Abdullah Atta
2023-09-20 12:16:56 +05:00
committed by Abdullah Atta
parent 275f9730f1
commit 0149d00d64
9 changed files with 4744 additions and 33647 deletions

38001
apps/web/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -84,6 +84,7 @@
"@types/react-dom": "17.0.2",
"@types/react-modal": "3.13.1",
"@types/tinycolor2": "^1.4.3",
"@types/wicg-file-system-access": "^2020.9.6",
"@vitejs/plugin-react-swc": "^3.3.2",
"autoprefixer": "^10.4.14",
"buffer": "^6.0.3",

View File

@@ -24,6 +24,7 @@ import Config from "./utils/config";
import { initalizeLogger, logger } from "./utils/logger";
import { AuthProps } from "./views/auth";
import { initializeFeatureChecks } from "./utils/feature-check";
type Route<TProps = null> = {
component: () => Promise<{
@@ -142,6 +143,8 @@ function isSessionExpired(path: Routes): RouteWithPath<AuthProps> | null {
export async function init() {
await initalizeLogger();
await initializeFeatureChecks();
const { path, route } = getRoute();
return { ...route, path };
}

View File

@@ -0,0 +1,161 @@
/*
This file is part of the Notesnook project (https://notesnook.com/)
Copyright (C) 2023 Streetwriters (Private) Limited
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import { IFileStorage } from "@notesnook/streamable-fs/dist/src/interfaces";
import { File } from "@notesnook/streamable-fs/dist/src/types";
import { IndexedDBKVStore } from "./key-value";
import OriginPrivateFileStoreWorker from "./opfs.worker?worker";
import { OriginPrivateFileStoreWorkerType } from "./opfs.worker";
import { transfer, wrap } from "comlink";
export class IndexedDBFileStore implements IFileStorage {
storage: IndexedDBKVStore;
constructor(name: string) {
this.storage = new IndexedDBKVStore(name, name);
}
clear(): Promise<void> {
return this.storage.clear();
}
setMetadata(filename: string, metadata: File): Promise<void> {
return this.storage.set(filename, metadata);
}
getMetadata(filename: string): Promise<File | undefined> {
return this.storage.get(filename);
}
deleteMetadata(filename: string): Promise<void> {
return this.storage.delete(filename);
}
writeChunk(chunkName: string, data: Uint8Array): Promise<void> {
return this.storage.set(chunkName, data);
}
deleteChunk(chunkName: string): Promise<void> {
return this.storage.delete(chunkName);
}
readChunk(chunkName: string): Promise<Uint8Array | undefined> {
return this.storage.get(chunkName);
}
}
export class CacheStorageFileStore implements IFileStorage {
storage: IndexedDBKVStore;
constructor(private readonly name: string) {
this.storage = new IndexedDBKVStore(name, name);
console.log("USING CACHE FILE STORE!");
}
private getCache() {
return window.caches.open(this.name);
}
async clear(): Promise<void> {
const cache = await this.getCache();
for (const req of await cache.keys()) {
await cache.delete(req);
}
return this.storage.clear();
}
setMetadata(filename: string, metadata: File): Promise<void> {
return this.storage.set(filename, metadata);
}
getMetadata(filename: string): Promise<File | undefined> {
return this.storage.get(filename);
}
deleteMetadata(filename: string): Promise<void> {
return this.storage.delete(filename);
}
async writeChunk(chunkName: string, data: Uint8Array): Promise<void> {
const cache = await this.getCache();
await cache.put(
this.toURL(chunkName),
new Response(data, {
headers: new Headers({
"Content-Length": data.length.toString(),
"Content-Type": "application/encrypted-octet-stream"
})
})
);
}
async deleteChunk(chunkName: string): Promise<void> {
const cache = await this.getCache();
await cache.delete(this.toURL(chunkName));
}
async readChunk(chunkName: string): Promise<Uint8Array | undefined> {
const cache = await this.getCache();
const response = await cache.match(this.toURL(chunkName));
return response ? new Uint8Array(await response.arrayBuffer()) : undefined;
}
private toURL(chunkName: string) {
return `/${chunkName}`;
}
}
export class OriginPrivateFileSystem implements IFileStorage {
private readonly worker = wrap<OriginPrivateFileStoreWorkerType>(
new OriginPrivateFileStoreWorker()
);
private created = false;
constructor(private readonly name: string) {
console.log("using origin private file store");
}
private async create() {
if (this.created) return;
await this.worker.create(this.name, this.name);
this.created = true;
}
async clear(): Promise<void> {
await this.create();
await this.worker.clear(this.name);
}
async setMetadata(filename: string, metadata: File): Promise<void> {
await this.create();
await this.worker.setMetadata(this.name, filename, metadata);
}
async getMetadata(filename: string): Promise<File | undefined> {
await this.create();
return this.worker.getMetadata(this.name, filename);
}
async deleteMetadata(filename: string): Promise<void> {
await this.create();
return this.worker.deleteMetadata(this.name, filename);
}
async writeChunk(chunkName: string, data: Uint8Array): Promise<void> {
await this.create();
return this.worker.writeChunk(
this.name,
chunkName,
transfer(data.buffer, [data.buffer])
);
}
async deleteChunk(chunkName: string): Promise<void> {
await this.create();
return this.worker.deleteChunk(this.name, chunkName);
}
async readChunk(chunkName: string): Promise<Uint8Array | undefined> {
await this.create();
return this.worker.readChunk(this.name, chunkName);
}
}

View File

@@ -37,6 +37,12 @@ import { Cipher, DataFormat, SerializedKey } from "@notesnook/crypto";
import { IDataType } from "hash-wasm/dist/lib/util";
import { IndexedDBKVStore } from "./key-value";
import FileHandle from "@notesnook/streamable-fs/dist/src/filehandle";
import {
CacheStorageFileStore,
IndexedDBFileStore,
OriginPrivateFileSystem
} from "./file-store";
import { isFeatureSupported } from "../utils/feature-check";
const ABYTES = 17;
const CHUNK_SIZE = 512 * 1024;
@@ -45,7 +51,13 @@ const UPLOAD_PART_REQUIRED_CHUNKS = Math.ceil(
(5 * 1024 * 1024) / ENCRYPTED_CHUNK_SIZE
);
const MINIMUM_MULTIPART_FILE_SIZE = 25 * 1024 * 1024;
const streamablefs = new StreamableFS("streamable-fs");
const streamablefs = new StreamableFS(
isFeatureSupported("opfs")
? new OriginPrivateFileSystem("streamable-fs")
: isFeatureSupported("cache")
? new CacheStorageFileStore("streamable-fs")
: new IndexedDBFileStore("streamable-fs")
);
async function writeEncryptedFile(
file: File,
@@ -69,7 +81,12 @@ async function writeEncryptedFile(
const { iv, stream } = await NNCrypto.createEncryptionStream(key);
await file
.stream()
.pipeThrough(new ChunkedStream(CHUNK_SIZE))
.pipeThrough(
new ChunkedStream(
CHUNK_SIZE,
isFeatureSupported("opfs") ? "copy" : "nocopy"
)
)
.pipeThrough(new IntoChunks(file.size))
.pipeThrough(stream)
.pipeThrough(
@@ -500,11 +517,17 @@ async function downloadFile(filename: string, requestOptions: RequestOptions) {
);
})
)
.pipeThrough(new ChunkedStream(chunkSize + ABYTES))
.pipeThrough(
new ChunkedStream(
chunkSize + ABYTES,
isFeatureSupported("opfs") ? "copy" : "nocopy"
)
)
.pipeTo(fileHandle.writeable);
return true;
} catch (e) {
console.error(e);
showError(toS3Error(e), "Could not download file");
reportProgress(undefined, { type: "download", hash: filename });
return false;
@@ -542,8 +565,6 @@ export async function decryptFile(
}
async function saveFile(filename: string, fileMetadata: FileMetadata) {
if (!fileMetadata) return false;
const { name, type, isUploaded } = fileMetadata;
const decrypted = await decryptFile(filename, fileMetadata);

View File

@@ -21,4 +21,7 @@ import { INNCrypto } from "@notesnook/crypto";
import CryptoWorker from "./nncrypto.worker?worker";
import { wrap } from "comlink";
export const NNCrypto = wrap<INNCrypto>(new CryptoWorker()) as INNCrypto;
export const NNCrypto =
IS_DESKTOP_APP && window.NativeNNCrypto
? new window.NativeNNCrypto()
: (wrap<INNCrypto>(new CryptoWorker()) as INNCrypto);

View File

@@ -0,0 +1,119 @@
/*
This file is part of the Notesnook project (https://notesnook.com/)
Copyright (C) 2023 Streetwriters (Private) Limited
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import { IFileStorage } from "@notesnook/streamable-fs/dist/src/interfaces";
import { File } from "@notesnook/streamable-fs/dist/src/types";
import { IndexedDBKVStore } from "./key-value";
import { expose, transfer } from "comlink";
class OriginPrivateFileStore implements IFileStorage {
private storage: IndexedDBKVStore;
constructor(
name: string,
private readonly directory: FileSystemDirectoryHandle
) {
this.storage = new IndexedDBKVStore(name, name);
}
async clear(): Promise<void> {
for await (const [name] of this.directory) {
await this.directory.removeEntry(name, { recursive: true });
}
await this.storage.clear();
}
setMetadata(filename: string, metadata: File): Promise<void> {
return this.storage.set(filename, metadata);
}
getMetadata(filename: string): Promise<File | undefined> {
return this.storage.get(filename);
}
deleteMetadata(filename: string): Promise<void> {
return this.storage.delete(filename);
}
async writeChunk(chunkName: string, data: Uint8Array): Promise<void> {
const file = await this.directory.getFileHandle(chunkName, {
create: true
});
const syncHandle = await file.createSyncAccessHandle();
syncHandle.write(data);
syncHandle.close();
}
async deleteChunk(chunkName: string) {
try {
await this.directory.removeEntry(chunkName);
} catch (e) {
console.error("Failed to delete chunk", e);
}
}
async readChunk(chunkName: string): Promise<Uint8Array | undefined> {
try {
const file = await this.directory.getFileHandle(chunkName);
const syncHandle = await file.createSyncAccessHandle();
const buffer = new Uint8Array(syncHandle.getSize());
syncHandle.read(buffer);
syncHandle.close();
return buffer;
} catch (e) {
console.error("Failed to read chunk", e);
return;
}
}
}
const fileStores: Map<string, OriginPrivateFileStore> = new Map();
const workerModule = {
async create(name: string, directoryName: string) {
const root = await navigator.storage.getDirectory();
const directoryHandle = await root.getDirectoryHandle(directoryName, {
create: true
});
fileStores.set(
directoryName,
new OriginPrivateFileStore(name, directoryHandle)
);
},
clear(directoryName: string) {
return fileStores.get(directoryName)?.clear();
},
setMetadata(directoryName: string, filename: string, metadata: File) {
return fileStores.get(directoryName)?.setMetadata(filename, metadata);
},
getMetadata(directoryName: string, filename: string) {
return fileStores.get(directoryName)?.getMetadata(filename);
},
deleteMetadata(directoryName: string, filename: string) {
return fileStores.get(directoryName)?.deleteMetadata(filename);
},
writeChunk(directoryName: string, chunkName: string, data: ArrayBuffer) {
return fileStores
.get(directoryName)
?.writeChunk(chunkName, new Uint8Array(data));
},
deleteChunk(directoryName: string, chunkName: string) {
return fileStores.get(directoryName)?.deleteChunk(chunkName);
},
async readChunk(directoryName: string, chunkName: string) {
const chunk = await fileStores.get(directoryName)?.readChunk(chunkName);
return chunk ? transfer(chunk, [chunk.buffer]) : undefined;
}
};
expose(workerModule);
export type OriginPrivateFileStoreWorkerType = typeof workerModule;

View File

@@ -0,0 +1,60 @@
/*
This file is part of the Notesnook project (https://notesnook.com/)
Copyright (C) 2023 Streetwriters (Private) Limited
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
const FEATURE_CHECKS = {
opfs: false,
cache: false
};
async function isOPFSSupported() {
const hasGetDirectory =
"getDirectory" in window.navigator.storage &&
typeof window.navigator.storage.getDirectory === "function";
return (
hasGetDirectory &&
window.navigator.storage
.getDirectory()
.then(() => (FEATURE_CHECKS.opfs = true))
.catch(() => (FEATURE_CHECKS.opfs = false))
);
}
async function isCacheSupported() {
const hasCacheStorage =
"CacheStorage" in window &&
"caches" in window &&
window.caches instanceof CacheStorage;
return (
hasCacheStorage &&
window.caches
.has("something")
.then((f) => (FEATURE_CHECKS.cache = true))
.catch((a) => (FEATURE_CHECKS.cache = false))
);
}
export async function initializeFeatureChecks() {
await Promise.allSettled([isOPFSSupported(), isCacheSupported()]);
}
function isFeatureSupported(key: keyof typeof FEATURE_CHECKS) {
return FEATURE_CHECKS[key];
}
export { isFeatureSupported };

View File

@@ -20,7 +20,7 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
import { Chunk } from "@notesnook/crypto";
export class ChunkedStream extends TransformStream<Uint8Array, Uint8Array> {
constructor(chunkSize: number) {
constructor(chunkSize: number, mode: "nocopy" | "copy") {
let backBuffer: Uint8Array | null = null;
super({
start() {},
@@ -38,7 +38,13 @@ export class ChunkedStream extends TransformStream<Uint8Array, Uint8Array> {
const start = backBuffer.length - remainingBytes;
const end = start + chunkSize;
controller.enqueue(backBuffer.subarray(start, end));
// TODO: find a way to support sending the chunked
// buffer to web workers without copying.
controller.enqueue(
mode === "copy"
? new Uint8Array(backBuffer.buffer.slice(start, end))
: backBuffer.subarray(start, end)
);
remainingBytes -= chunkSize;
}