mirror of
https://github.com/streetwriters/notesnook.git
synced 2025-12-16 19:57:52 +01:00
web: fully move to using web streams everywhere
This commit is contained in:
committed by
Abdullah Atta
parent
6f88b70937
commit
24e13f488e
40
apps/web/__tests__/base64-stream.test.ts
Normal file
40
apps/web/__tests__/base64-stream.test.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
/*
|
||||
This file is part of the Notesnook project (https://notesnook.com/)
|
||||
|
||||
Copyright (C) 2023 Streetwriters (Private) Limited
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import "./bootstrap";
|
||||
import { test } from "vitest";
|
||||
import { Base64DecoderStream } from "../src/interfaces/base64-decoder-stream";
|
||||
import { consumeReadableStream } from "../src/interfaces/stream-utils";
|
||||
import { createReadStream, readFileSync } from "fs";
|
||||
import { Readable } from "stream";
|
||||
|
||||
test("streamed base64 decoder should output same as non-streamed", async (t) => {
|
||||
const expected = readFileSync(__filename, "base64");
|
||||
const fileStream = Readable.toWeb(
|
||||
createReadStream(__filename)
|
||||
) as ReadableStream<Uint8Array>;
|
||||
|
||||
t.expect(
|
||||
(
|
||||
await consumeReadableStream(
|
||||
fileStream.pipeThrough(new Base64DecoderStream("base64"))
|
||||
)
|
||||
).join()
|
||||
).toBe(expected);
|
||||
});
|
||||
34
apps/web/__tests__/bootstrap.ts
Normal file
34
apps/web/__tests__/bootstrap.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
/*
|
||||
This file is part of the Notesnook project (https://notesnook.com/)
|
||||
|
||||
Copyright (C) 2023 Streetwriters (Private) Limited
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import {
|
||||
TransformStream,
|
||||
ReadableStream,
|
||||
WritableStream
|
||||
} from "node:stream/web";
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
globalThis.TransformStream = TransformStream;
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
globalThis.ReadableStream = ReadableStream;
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
globalThis.WritableStream = WritableStream;
|
||||
43
apps/web/__tests__/chunked-stream.test.ts
Normal file
43
apps/web/__tests__/chunked-stream.test.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
/*
|
||||
This file is part of the Notesnook project (https://notesnook.com/)
|
||||
|
||||
Copyright (C) 2023 Streetwriters (Private) Limited
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import "./bootstrap";
|
||||
import { test } from "vitest";
|
||||
import { ChunkedStream } from "../src/interfaces/chunked-stream";
|
||||
import { toAsyncIterator } from "@notesnook-importer/core/dist/src/utils/stream";
|
||||
|
||||
test("chunked stream should create equal sized chunks", async (t) => {
|
||||
const { readable, writable } = new ChunkedStream(512);
|
||||
const lengths: number[] = [];
|
||||
|
||||
setTimeout(async () => {
|
||||
for await (const chunk of toAsyncIterator(readable)) {
|
||||
lengths.push(chunk.length);
|
||||
}
|
||||
});
|
||||
const writer = writable.getWriter();
|
||||
await writer.write(Buffer.alloc(411));
|
||||
await writer.write(Buffer.alloc(411));
|
||||
await writer.write(Buffer.alloc(411));
|
||||
await writer.write(Buffer.alloc(815));
|
||||
await writer.write(Buffer.alloc(12));
|
||||
await writer.close();
|
||||
|
||||
t.expect(lengths).toMatchObject([512, 512, 512, 512, 12]);
|
||||
});
|
||||
1715
apps/web/package-lock.json
generated
1715
apps/web/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -32,7 +32,7 @@
|
||||
"@theme-ui/core": "^0.14.7",
|
||||
"allotment": "^1.12.1",
|
||||
"async-mutex": "^0.3.2",
|
||||
"axios": "^0.21.4",
|
||||
"axios": "^1.3.4",
|
||||
"clipboard-polyfill": "^3.0.3",
|
||||
"comlink": "^4.3.1",
|
||||
"cronosjs": "^1.7.1",
|
||||
@@ -85,6 +85,7 @@
|
||||
"env-cmd": "^10.1.0",
|
||||
"file-loader": "^6.2.0",
|
||||
"find-process": "^1.4.4",
|
||||
"happy-dom": "^8.9.0",
|
||||
"ip": "^1.1.8",
|
||||
"lorem-ipsum": "^2.0.4",
|
||||
"otplib": "^12.0.1",
|
||||
@@ -94,6 +95,7 @@
|
||||
"react-dom": "17.0.2",
|
||||
"source-map-explorer": "^2.5.2",
|
||||
"typescript": "^4.8.2",
|
||||
"vitest": "^0.29.8",
|
||||
"webpack-bundle-analyzer": "^4.8.0",
|
||||
"worker-loader": "^3.0.8"
|
||||
},
|
||||
|
||||
154
apps/web/public/stream-saver-sw.js
Normal file
154
apps/web/public/stream-saver-sw.js
Normal file
@@ -0,0 +1,154 @@
|
||||
/*
|
||||
This file is part of the Notesnook project (https://notesnook.com/)
|
||||
|
||||
Copyright (C) 2023 Streetwriters (Private) Limited
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
/* eslint-disable no-restricted-globals */
|
||||
|
||||
self.addEventListener("install", () => {
|
||||
self.skipWaiting();
|
||||
});
|
||||
|
||||
self.addEventListener("activate", (event) => {
|
||||
event.waitUntil(self.clients.claim());
|
||||
});
|
||||
|
||||
const map = new Map();
|
||||
|
||||
// This should be called once per download
|
||||
// Each event has a dataChannel that the data will be piped through
|
||||
self.onmessage = (event) => {
|
||||
// We send a heartbeat every x second to keep the
|
||||
// service worker alive if a transferable stream is not sent
|
||||
if (event.data === "ping") {
|
||||
return;
|
||||
}
|
||||
|
||||
const data = event.data;
|
||||
const downloadUrl =
|
||||
data.url ||
|
||||
self.registration.scope +
|
||||
Math.random() +
|
||||
"/" +
|
||||
(typeof data === "string" ? data : data.filename);
|
||||
const port = event.ports[0];
|
||||
const metadata = new Array(3); // [stream, data, port]
|
||||
|
||||
metadata[1] = data;
|
||||
metadata[2] = port;
|
||||
|
||||
if (event.data.transferringReadable) {
|
||||
port.onmessage = (evt) => {
|
||||
port.onmessage = null;
|
||||
metadata[0] = evt.data.readableStream;
|
||||
};
|
||||
} else {
|
||||
metadata[0] = createStream(port);
|
||||
}
|
||||
|
||||
map.set(downloadUrl, metadata);
|
||||
port.postMessage({ download: downloadUrl });
|
||||
};
|
||||
|
||||
function createStream(port) {
|
||||
// ReadableStream is only supported by chrome 52
|
||||
return new ReadableStream({
|
||||
start(controller) {
|
||||
// When we receive data on the messageChannel, we write
|
||||
port.onmessage = ({ data }) => {
|
||||
if (data === "end") {
|
||||
return controller.close();
|
||||
}
|
||||
|
||||
if (data === "abort") {
|
||||
controller.error("Aborted the download");
|
||||
return;
|
||||
}
|
||||
|
||||
controller.enqueue(data);
|
||||
};
|
||||
},
|
||||
cancel(reason) {
|
||||
console.log("user aborted", reason);
|
||||
port.postMessage({ abort: true });
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
self.onfetch = (event) => {
|
||||
const url = event.request.url;
|
||||
|
||||
// this only works for Firefox
|
||||
if (url.endsWith("/ping")) {
|
||||
return event.respondWith(new Response("pong"));
|
||||
}
|
||||
|
||||
const metadata = map.get(url);
|
||||
if (!metadata) return null;
|
||||
|
||||
const [stream, data, port] = metadata;
|
||||
|
||||
map.delete(url);
|
||||
|
||||
// Not comfortable letting any user control all headers
|
||||
// so we only copy over the length & disposition
|
||||
const responseHeaders = new Headers({
|
||||
"Content-Type": "application/octet-stream; charset=utf-8",
|
||||
|
||||
// To be on the safe side, The link can be opened in a iframe.
|
||||
// but octet-stream should stop it.
|
||||
"Content-Security-Policy": "default-src 'none'",
|
||||
"X-Content-Security-Policy": "default-src 'none'",
|
||||
"X-WebKit-CSP": "default-src 'none'",
|
||||
"X-XSS-Protection": "1; mode=block"
|
||||
});
|
||||
|
||||
let headers = new Headers(data.headers || {});
|
||||
|
||||
if (headers.has("Content-Length")) {
|
||||
responseHeaders.set("Content-Length", headers.get("Content-Length"));
|
||||
}
|
||||
|
||||
if (headers.has("Content-Disposition")) {
|
||||
responseHeaders.set(
|
||||
"Content-Disposition",
|
||||
headers.get("Content-Disposition")
|
||||
);
|
||||
}
|
||||
|
||||
// data, data.filename and size should not be used anymore
|
||||
if (data.size) {
|
||||
console.warn("Depricated");
|
||||
responseHeaders.set("Content-Length", data.size);
|
||||
}
|
||||
|
||||
let fileName = typeof data === "string" ? data : data.filename;
|
||||
if (fileName) {
|
||||
console.warn("Depricated");
|
||||
// Make filename RFC5987 compatible
|
||||
fileName = encodeURIComponent(fileName)
|
||||
.replace(/['()]/g, escape)
|
||||
.replace(/\*/g, "%2A");
|
||||
responseHeaders.set(
|
||||
"Content-Disposition",
|
||||
"attachment; filename*=UTF-8''" + fileName
|
||||
);
|
||||
}
|
||||
|
||||
event.respondWith(new Response(stream, { headers: responseHeaders }));
|
||||
|
||||
port.postMessage({ debug: "Download started" });
|
||||
};
|
||||
66
apps/web/src/common/attachment-stream.ts
Normal file
66
apps/web/src/common/attachment-stream.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
/*
|
||||
This file is part of the Notesnook project (https://notesnook.com/)
|
||||
|
||||
Copyright (C) 2023 Streetwriters (Private) Limited
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import { decryptFile } from "../interfaces/fs";
|
||||
import { db } from "./db";
|
||||
import { ZipFile } from "../utils/zip-stream";
|
||||
|
||||
export const METADATA_FILENAME = "metadata.json";
|
||||
|
||||
export class AttachmentStream extends ReadableStream<ZipFile> {
|
||||
constructor(attachments: Array<any>) {
|
||||
let index = 0;
|
||||
super({
|
||||
start() {},
|
||||
async pull(controller) {
|
||||
const attachment = attachments[index++];
|
||||
|
||||
await db.fs.downloadFile(
|
||||
"all-attachments",
|
||||
attachment.metadata.hash,
|
||||
attachment.chunkSize,
|
||||
attachment.metadata
|
||||
);
|
||||
|
||||
const key = await db.attachments?.decryptKey(attachment.key);
|
||||
const file = await decryptFile(attachment.metadata.hash, {
|
||||
key,
|
||||
iv: attachment.iv,
|
||||
name: attachment.metadata.filename,
|
||||
type: attachment.metadata.type,
|
||||
isUploaded: !!attachment.dateUploaded
|
||||
});
|
||||
|
||||
if (file) {
|
||||
const filePath = `/${attachment.metadata.filename}`;
|
||||
controller.enqueue({
|
||||
path: filePath,
|
||||
data: new Uint8Array(await file.arrayBuffer())
|
||||
});
|
||||
} else {
|
||||
controller.error(new Error("Failed to decrypt file."));
|
||||
}
|
||||
|
||||
if (index === attachments.length) {
|
||||
controller.close();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,108 +0,0 @@
|
||||
/*
|
||||
This file is part of the Notesnook project (https://notesnook.com/)
|
||||
|
||||
Copyright (C) 2023 Streetwriters (Private) Limited
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import { StreamableFS } from "@notesnook/streamable-fs";
|
||||
import FS from "../../interfaces/fs";
|
||||
import { getNNCrypto } from "../../interfaces/nncrypto.stub";
|
||||
import { db } from "../db";
|
||||
import { ZipFile } from "./zip-stream";
|
||||
|
||||
export type PackageMetadata = {
|
||||
version: string;
|
||||
attachments: string[];
|
||||
};
|
||||
|
||||
export const METADATA_FILENAME = "metadata.json";
|
||||
|
||||
export class AttachmentStream extends ReadableStream<ZipFile> {
|
||||
constructor(attachments: Array<any>) {
|
||||
super({
|
||||
async pull(controller) {
|
||||
const token = await db.fs.tokenManager.getAccessToken();
|
||||
let index = 0;
|
||||
for (const attachment of attachments) {
|
||||
if (index > 2) {
|
||||
return controller.close();
|
||||
}
|
||||
const url = `${hosts.API_HOST}/s3?name=${attachment.metadata.hash}`;
|
||||
|
||||
const { execute } = FS.downloadAttachment(attachment.metadata.hash, {
|
||||
metadata: attachment.metadata,
|
||||
url,
|
||||
chunkSize: attachment.chunkSize,
|
||||
headers: { Authorization: `Bearer ${token}` }
|
||||
});
|
||||
await execute();
|
||||
const key = await db.attachments?.decryptKey(attachment.key);
|
||||
const file = await saveFile(attachment.metadata.hash, {
|
||||
key,
|
||||
iv: attachment.iv,
|
||||
name: attachment.metadata.filename,
|
||||
type: attachment.metadata.type,
|
||||
isUploaded: !!attachment.dateUploaded
|
||||
});
|
||||
const filePath = `/${attachment.metadata.hash}`;
|
||||
if (file)
|
||||
controller.enqueue({
|
||||
path: filePath,
|
||||
data: file
|
||||
});
|
||||
index++;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async function saveFile(filename: string, fileMetadata: any) {
|
||||
if (!fileMetadata) return false;
|
||||
const streamablefs = new StreamableFS("streamable-fs");
|
||||
|
||||
const fileHandle = await streamablefs.readFile(filename);
|
||||
if (!fileHandle) return false;
|
||||
const { key, iv, name, type, isUploaded } = fileMetadata;
|
||||
|
||||
const blobParts: Array<any> = [];
|
||||
const reader = fileHandle.getReader();
|
||||
|
||||
const crypto = await getNNCrypto();
|
||||
await crypto.decryptStream(
|
||||
key,
|
||||
iv,
|
||||
{
|
||||
read: async () => {
|
||||
const { value } = await reader.read();
|
||||
return value;
|
||||
},
|
||||
write: async (chunk: any) => {
|
||||
blobParts.push(chunk.data);
|
||||
}
|
||||
},
|
||||
filename
|
||||
);
|
||||
|
||||
if (isUploaded) await streamablefs.deleteFile(filename);
|
||||
return new Blob(blobParts, { type }).arrayBuffer().then((buffer) => {
|
||||
return new Uint8Array(buffer);
|
||||
});
|
||||
}
|
||||
|
||||
const hosts = {
|
||||
API_HOST: "https://api.notesnook.com"
|
||||
};
|
||||
@@ -26,9 +26,10 @@ import Field from "../field";
|
||||
import ListContainer from "../list-container";
|
||||
import Dialog from "./dialog";
|
||||
import Placeholder from "../placeholders";
|
||||
import { AttachmentStream } from "../../common/attachments/attachment-stream";
|
||||
import { ZipStream } from "../../common/attachments/zip-stream";
|
||||
import { createWriteStream } from "../../common/attachments/stream-saver";
|
||||
import { AttachmentStream } from "../../common/attachment-stream";
|
||||
import { ZipStream } from "../../utils/zip-stream";
|
||||
import { createWriteStream } from "../../utils/stream-saver";
|
||||
import { register } from "../../utils/mitm";
|
||||
|
||||
function AttachmentsDialog({ onClose }) {
|
||||
const attachments = useStore((store) => store.attachments);
|
||||
@@ -51,14 +52,10 @@ function AttachmentsDialog({ onClose }) {
|
||||
positiveButton={{
|
||||
text: "Download All Attachments",
|
||||
onClick: async () => {
|
||||
if (navigator.serviceWorker) {
|
||||
console.log(
|
||||
"attachment-dialog",
|
||||
await new AttachmentStream(attachments)
|
||||
.pipeThrough(new ZipStream())
|
||||
.pipeTo(createWriteStream("notesnook-importer.zip"))
|
||||
);
|
||||
}
|
||||
await register();
|
||||
await new AttachmentStream(attachments)
|
||||
.pipeThrough(new ZipStream())
|
||||
.pipeTo(createWriteStream("attachments.zip"));
|
||||
}
|
||||
}}
|
||||
show
|
||||
|
||||
56
apps/web/src/interfaces/base64-decoder-stream.ts
Normal file
56
apps/web/src/interfaces/base64-decoder-stream.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
/*
|
||||
This file is part of the Notesnook project (https://notesnook.com/)
|
||||
|
||||
Copyright (C) 2023 Streetwriters (Private) Limited
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
export class Base64DecoderStream extends TransformStream<Uint8Array, string> {
|
||||
constructor(encoding: "base64url" | "base64" = "base64") {
|
||||
let backBuffer: Uint8Array | null = null;
|
||||
super({
|
||||
start() {},
|
||||
transform(chunk, controller) {
|
||||
let part = backBuffer
|
||||
? Buffer.concat(
|
||||
[Buffer.from(backBuffer.buffer), Buffer.from(chunk.buffer)],
|
||||
backBuffer.length + chunk.length
|
||||
)
|
||||
: Buffer.from(chunk.buffer);
|
||||
|
||||
const remaining = part.length % 3;
|
||||
if (remaining) {
|
||||
backBuffer = part.subarray(part.length - remaining);
|
||||
part = part.subarray(0, part.length - remaining);
|
||||
} else {
|
||||
backBuffer = null;
|
||||
}
|
||||
|
||||
controller.enqueue(toBase64(part, encoding));
|
||||
},
|
||||
flush(controller) {
|
||||
if (backBuffer)
|
||||
controller.enqueue(toBase64(Buffer.from(backBuffer), encoding));
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function toBase64(bytes: Buffer, encoding: "base64url" | "base64") {
|
||||
const result = Buffer.isEncoding(encoding)
|
||||
? bytes.toString(encoding)
|
||||
: bytes.toString("base64");
|
||||
return result;
|
||||
}
|
||||
71
apps/web/src/interfaces/chunked-stream.ts
Normal file
71
apps/web/src/interfaces/chunked-stream.ts
Normal file
@@ -0,0 +1,71 @@
|
||||
/*
|
||||
This file is part of the Notesnook project (https://notesnook.com/)
|
||||
|
||||
Copyright (C) 2023 Streetwriters (Private) Limited
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import { Chunk } from "@notesnook/crypto";
|
||||
|
||||
export class ChunkedStream extends TransformStream<Uint8Array, Uint8Array> {
|
||||
constructor(chunkSize: number) {
|
||||
let backBuffer: Uint8Array | null = null;
|
||||
super({
|
||||
start() {},
|
||||
transform(chunk, controller) {
|
||||
backBuffer = backBuffer
|
||||
? Buffer.concat(
|
||||
[Buffer.from(backBuffer.buffer), Buffer.from(chunk.buffer)],
|
||||
backBuffer.length + chunk.length
|
||||
)
|
||||
: Buffer.from(chunk.buffer);
|
||||
|
||||
if (backBuffer.length >= chunkSize) {
|
||||
let remainingBytes = backBuffer.length;
|
||||
while (remainingBytes > chunkSize) {
|
||||
const start = backBuffer.length - remainingBytes;
|
||||
const end = start + chunkSize;
|
||||
controller.enqueue(backBuffer.subarray(start, end));
|
||||
remainingBytes -= chunkSize;
|
||||
}
|
||||
|
||||
backBuffer =
|
||||
remainingBytes > 0
|
||||
? backBuffer.subarray(
|
||||
backBuffer.length - remainingBytes,
|
||||
backBuffer.length
|
||||
)
|
||||
: null;
|
||||
}
|
||||
},
|
||||
flush(controller) {
|
||||
if (backBuffer) controller.enqueue(backBuffer);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export class IntoChunks extends TransformStream<Uint8Array, Chunk> {
|
||||
constructor(totalSize: number) {
|
||||
let size = 0;
|
||||
super({
|
||||
start() {},
|
||||
transform(chunk, controller) {
|
||||
size += chunk.length;
|
||||
controller.enqueue({ data: chunk, final: size === totalSize });
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -20,7 +20,7 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
import "web-streams-polyfill/dist/ponyfill";
|
||||
import localforage from "localforage";
|
||||
import { xxhash64, createXXHash64 } from "hash-wasm";
|
||||
import axios from "axios";
|
||||
import axios, { AxiosProgressEvent } from "axios";
|
||||
import { AppEventManager, AppEvents } from "../common/app-events";
|
||||
import { StreamableFS } from "@notesnook/streamable-fs";
|
||||
import { getNNCrypto } from "./nncrypto.stub";
|
||||
@@ -30,6 +30,13 @@ import { saveAs } from "file-saver";
|
||||
import { showToast } from "../utils/toast";
|
||||
import { db } from "../common/db";
|
||||
import { getFileNameWithExtension } from "@notesnook/core/utils/filename";
|
||||
import { ChunkedStream, IntoChunks } from "./chunked-stream";
|
||||
import { ProgressStream } from "./progress-stream";
|
||||
import { consumeReadableStream } from "./stream-utils";
|
||||
import { Base64DecoderStream } from "./base64-decoder-stream";
|
||||
import { toBlob } from "@notesnook-importer/core/dist/src/utils/stream";
|
||||
import { Cipher, OutputFormat, SerializedKey } from "@notesnook/crypto";
|
||||
import { IDataType } from "hash-wasm/dist/lib/util";
|
||||
|
||||
const ABYTES = 17;
|
||||
const CHUNK_SIZE = 512 * 1024;
|
||||
@@ -39,12 +46,11 @@ const UPLOAD_PART_REQUIRED_CHUNKS = Math.ceil(
|
||||
);
|
||||
const streamablefs = new StreamableFS("streamable-fs");
|
||||
|
||||
/**
|
||||
* @param {File} file
|
||||
* @param {import("nncrypto/dist/src/types").SerializedKey} key
|
||||
* @param {string} hash
|
||||
*/
|
||||
async function writeEncryptedFile(file, key, hash) {
|
||||
async function writeEncryptedFile(
|
||||
file: File,
|
||||
key: SerializedKey,
|
||||
hash: string
|
||||
) {
|
||||
const crypto = await getNNCrypto();
|
||||
|
||||
if (!localforage.supports(localforage.INDEXEDDB))
|
||||
@@ -52,38 +58,29 @@ async function writeEncryptedFile(file, key, hash) {
|
||||
|
||||
if (await streamablefs.exists(hash)) await streamablefs.deleteFile(hash);
|
||||
|
||||
let offset = 0;
|
||||
let encrypted = 0;
|
||||
// let offset = 0;
|
||||
// let encrypted = 0;
|
||||
const fileHandle = await streamablefs.createFile(hash, file.size, file.type);
|
||||
sendAttachmentsProgressEvent("encrypt", hash, 1, 0);
|
||||
|
||||
const iv = await crypto.encryptStream(
|
||||
key,
|
||||
{
|
||||
read: async () => {
|
||||
let end = Math.min(offset + CHUNK_SIZE, file.size);
|
||||
if (offset === end) return;
|
||||
const chunk = new Uint8Array(
|
||||
await file.slice(offset, end).arrayBuffer()
|
||||
);
|
||||
offset = end;
|
||||
const isFinal = offset === file.size;
|
||||
return {
|
||||
final: isFinal,
|
||||
data: chunk
|
||||
};
|
||||
},
|
||||
write: async (chunk) => {
|
||||
encrypted += chunk.data.length - ABYTES;
|
||||
const { iv, stream } = await crypto.createEncryptionStream(key);
|
||||
await file
|
||||
.stream()
|
||||
.pipeThrough(new ChunkedStream(CHUNK_SIZE))
|
||||
.pipeThrough(new IntoChunks(file.size))
|
||||
.pipeThrough(stream)
|
||||
.pipeThrough(
|
||||
new ProgressStream((totalRead, done) =>
|
||||
reportProgress(
|
||||
{ total: file.size, loaded: encrypted },
|
||||
{
|
||||
total: file.size,
|
||||
loaded: done ? file.size : totalRead
|
||||
},
|
||||
{ type: "encrypt", hash }
|
||||
);
|
||||
await fileHandle.write(chunk.data);
|
||||
}
|
||||
},
|
||||
file.name
|
||||
);
|
||||
)
|
||||
)
|
||||
)
|
||||
.pipeTo(fileHandle.writeable);
|
||||
|
||||
sendAttachmentsProgressEvent("encrypt", hash, 1);
|
||||
|
||||
@@ -91,7 +88,7 @@ async function writeEncryptedFile(file, key, hash) {
|
||||
chunkSize: CHUNK_SIZE,
|
||||
iv: iv,
|
||||
length: file.size,
|
||||
salt: key.salt,
|
||||
salt: key.salt!,
|
||||
alg: "xcha-stream"
|
||||
};
|
||||
}
|
||||
@@ -103,7 +100,11 @@ async function writeEncryptedFile(file, key, hash) {
|
||||
* 3. We encrypt the Uint8Array
|
||||
* 4. We save the encrypted Uint8Array
|
||||
*/
|
||||
async function writeEncryptedBase64(metadata) {
|
||||
async function writeEncryptedBase64(metadata: {
|
||||
data: string;
|
||||
key: SerializedKey;
|
||||
mimeType?: string;
|
||||
}) {
|
||||
const { data, key, mimeType } = metadata;
|
||||
|
||||
const bytes = new Uint8Array(Buffer.from(data, "base64"));
|
||||
@@ -122,33 +123,18 @@ async function writeEncryptedBase64(metadata) {
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} data the base64 data
|
||||
* @returns
|
||||
*/
|
||||
function hashBase64(data) {
|
||||
function hashBase64(data: string) {
|
||||
return hashBuffer(Buffer.from(data, "base64"));
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {import("hash-wasm/dist/lib/util").IDataType} data
|
||||
* @returns
|
||||
*/
|
||||
async function hashBuffer(data) {
|
||||
async function hashBuffer(data: IDataType) {
|
||||
return {
|
||||
hash: await xxhash64(data),
|
||||
type: "xxh64"
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {ReadableStreamReader<Uint8Array>} reader
|
||||
* @returns
|
||||
*/
|
||||
async function hashStream(reader) {
|
||||
async function hashStream(reader: ReadableStreamDefaultReader<Uint8Array>) {
|
||||
const hasher = await createXXHash64();
|
||||
hasher.init();
|
||||
|
||||
@@ -162,42 +148,51 @@ async function hashStream(reader) {
|
||||
return { type: "xxh64", hash: hasher.digest("hex") };
|
||||
}
|
||||
|
||||
async function readEncrypted(filename, key, cipherData) {
|
||||
async function readEncrypted(
|
||||
filename: string,
|
||||
key: SerializedKey,
|
||||
cipherData: Cipher & { outputType: OutputFormat }
|
||||
) {
|
||||
const fileHandle = await streamablefs.readFile(filename);
|
||||
if (!fileHandle) {
|
||||
console.error(`File not found. (File hash: ${filename})`);
|
||||
return null;
|
||||
}
|
||||
|
||||
const reader = fileHandle.getReader();
|
||||
const plainText = new Uint8Array(fileHandle.file.size);
|
||||
let offset = 0;
|
||||
|
||||
const crypto = await getNNCrypto();
|
||||
await crypto.decryptStream(
|
||||
const decryptionStream = await crypto.createDecryptionStream(
|
||||
key,
|
||||
cipherData.iv,
|
||||
{
|
||||
read: async () => {
|
||||
const { value } = await reader.read();
|
||||
return value;
|
||||
},
|
||||
write: async (chunk) => {
|
||||
plainText.set(chunk.data, offset);
|
||||
offset += chunk.data.length;
|
||||
}
|
||||
},
|
||||
filename
|
||||
cipherData.iv
|
||||
);
|
||||
|
||||
return cipherData.outputType === "base64"
|
||||
? Buffer.from(plainText).toString("base64")
|
||||
: cipherData.outputType === "text"
|
||||
? new TextDecoder().decode(plainText)
|
||||
: plainText;
|
||||
return cipherData.outputType === "base64" || cipherData.outputType === "text"
|
||||
? (
|
||||
await consumeReadableStream(
|
||||
fileHandle.readable
|
||||
.pipeThrough(decryptionStream)
|
||||
.pipeThrough(
|
||||
cipherData.outputType === "text"
|
||||
? new globalThis.TextDecoderStream()
|
||||
: new Base64DecoderStream()
|
||||
)
|
||||
)
|
||||
).join()
|
||||
: new Uint8Array(
|
||||
Buffer.concat(
|
||||
await consumeReadableStream(
|
||||
fileHandle.readable.pipeThrough(decryptionStream)
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
async function uploadFile(filename, requestOptions) {
|
||||
type RequestOptions = {
|
||||
headers: Record<string, string>;
|
||||
signal: AbortSignal;
|
||||
url: string;
|
||||
chunkSize: number;
|
||||
};
|
||||
|
||||
async function uploadFile(filename: string, requestOptions: RequestOptions) {
|
||||
const fileHandle = await streamablefs.readFile(filename);
|
||||
if (!fileHandle)
|
||||
throw new Error(`File stream not found. (File hash: ${filename})`);
|
||||
@@ -205,12 +200,15 @@ async function uploadFile(filename, requestOptions) {
|
||||
fileHandle.file.chunks / UPLOAD_PART_REQUIRED_CHUNKS
|
||||
);
|
||||
|
||||
let {
|
||||
uploadedChunks = [],
|
||||
uploadedBytes = 0,
|
||||
uploaded = false,
|
||||
uploadId = ""
|
||||
} = fileHandle.file.additionalData || {};
|
||||
const additionalData = (fileHandle.file.additionalData || {}) as {
|
||||
uploadedBytes?: number;
|
||||
uploadId?: string;
|
||||
uploaded?: boolean;
|
||||
uploadedChunks?: { PartNumber: number; ETag: string }[];
|
||||
};
|
||||
|
||||
const { uploadedChunks = [], uploaded = false } = additionalData;
|
||||
let { uploadedBytes = 0, uploadId = "" } = additionalData;
|
||||
|
||||
try {
|
||||
if (uploaded) {
|
||||
@@ -218,14 +216,14 @@ async function uploadFile(filename, requestOptions) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const { headers, cancellationToken } = requestOptions;
|
||||
const { headers, signal } = requestOptions;
|
||||
|
||||
const initiateMultiPartUpload = await axios
|
||||
.get(
|
||||
`${hosts.API_HOST}/s3/multipart?name=${filename}&parts=${TOTAL_PARTS}&uploadId=${uploadId}`,
|
||||
{
|
||||
headers,
|
||||
cancelToken: cancellationToken
|
||||
signal
|
||||
}
|
||||
)
|
||||
.catch((e) => {
|
||||
@@ -237,7 +235,7 @@ async function uploadFile(filename, requestOptions) {
|
||||
|
||||
await fileHandle.addAdditionalData("uploadId", uploadId);
|
||||
|
||||
const onUploadProgress = (ev) => {
|
||||
const onUploadProgress = (ev: AxiosProgressEvent) => {
|
||||
reportProgress(
|
||||
{
|
||||
total: fileHandle.file.size + ABYTES,
|
||||
@@ -262,7 +260,7 @@ async function uploadFile(filename, requestOptions) {
|
||||
url,
|
||||
method: "PUT",
|
||||
headers: { "Content-Type": "" },
|
||||
cancelToken: cancellationToken,
|
||||
signal,
|
||||
data,
|
||||
onUploadProgress
|
||||
})
|
||||
@@ -294,7 +292,7 @@ async function uploadFile(filename, requestOptions) {
|
||||
},
|
||||
{
|
||||
headers,
|
||||
cancelToken: cancellationToken
|
||||
signal
|
||||
}
|
||||
)
|
||||
.catch((e) => {
|
||||
@@ -311,21 +309,24 @@ async function uploadFile(filename, requestOptions) {
|
||||
return true;
|
||||
} catch (e) {
|
||||
reportProgress(undefined, { type: "upload", hash: filename });
|
||||
if (e.handle) e.handle();
|
||||
if (e instanceof S3Error) e.handle();
|
||||
else handleS3Error(e);
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async function checkUpload(filename) {
|
||||
async function checkUpload(filename: string) {
|
||||
if ((await getUploadedFileSize(filename)) <= 0) {
|
||||
const error = `Upload verification failed: file size is 0. Please upload this file again. (File hash: ${filename})`;
|
||||
throw new Error(error);
|
||||
}
|
||||
}
|
||||
|
||||
function reportProgress(ev, { type, hash }) {
|
||||
function reportProgress(
|
||||
ev: { total?: number; loaded?: number } | undefined,
|
||||
{ type, hash }: { type: string; hash: string }
|
||||
) {
|
||||
AppEventManager.publish(AppEvents.UPDATE_ATTACHMENT_PROGRESS, {
|
||||
type,
|
||||
hash,
|
||||
@@ -334,8 +335,8 @@ function reportProgress(ev, { type, hash }) {
|
||||
});
|
||||
}
|
||||
|
||||
async function downloadFile(filename, requestOptions) {
|
||||
const { url, headers, chunkSize, cancellationToken } = requestOptions;
|
||||
async function downloadFile(filename: string, requestOptions: RequestOptions) {
|
||||
const { url, headers, chunkSize, signal } = requestOptions;
|
||||
if (await streamablefs.exists(filename)) return true;
|
||||
|
||||
try {
|
||||
@@ -344,47 +345,59 @@ async function downloadFile(filename, requestOptions) {
|
||||
{ type: "download", hash: filename }
|
||||
);
|
||||
|
||||
const signedUrlResponse = await axios.get(url, {
|
||||
headers,
|
||||
responseType: "text"
|
||||
const signedUrl = (
|
||||
await axios.get(url, {
|
||||
headers,
|
||||
responseType: "text"
|
||||
})
|
||||
).data;
|
||||
|
||||
const response = await fetch(signedUrl, {
|
||||
signal
|
||||
});
|
||||
|
||||
const signedUrl = signedUrlResponse.data;
|
||||
const response = await axios.get(signedUrl, {
|
||||
responseType: "arraybuffer",
|
||||
cancelToken: cancellationToken,
|
||||
onDownloadProgress: (ev) =>
|
||||
reportProgress(ev, { type: "download", hash: filename })
|
||||
});
|
||||
|
||||
const contentType = response.headers["content-type"];
|
||||
const contentType = response.headers.get("content-type");
|
||||
if (contentType === "application/xml") {
|
||||
const error = parseS3Error(response.data);
|
||||
const error = parseS3Error(await response.text());
|
||||
if (error.Code !== "Unknown") {
|
||||
throw new Error(`[${error.Code}] ${error.Message}`);
|
||||
}
|
||||
}
|
||||
|
||||
const contentLength = response.headers["content-length"];
|
||||
if (contentLength === "0") {
|
||||
const contentLength = parseInt(
|
||||
response.headers.get("content-length") || "0"
|
||||
);
|
||||
if (contentLength === 0 || isNaN(contentLength)) {
|
||||
const error = `File length is 0. Please upload this file again from the attachment manager. (File hash: ${filename})`;
|
||||
await db.attachments.markAsFailed(filename, error);
|
||||
await db.attachments?.markAsFailed(filename, error);
|
||||
throw new Error(error);
|
||||
}
|
||||
|
||||
const distributor = new ChunkDistributor(chunkSize + ABYTES);
|
||||
distributor.fill(new Uint8Array(response.data));
|
||||
distributor.close();
|
||||
if (!response.body) {
|
||||
const error = `The download response does not contain a body. Please upload this file again from the attachment manager. (File hash: ${filename})`;
|
||||
await db.attachments?.markAsFailed(filename, error);
|
||||
throw new Error(error);
|
||||
}
|
||||
|
||||
const fileHandle = await streamablefs.createFile(
|
||||
filename,
|
||||
response.data.byteLength,
|
||||
contentLength,
|
||||
"application/octet-stream"
|
||||
);
|
||||
|
||||
for (let chunk of distributor.chunks) {
|
||||
await fileHandle.write(chunk.data);
|
||||
}
|
||||
await response.body
|
||||
.pipeThrough(
|
||||
new ProgressStream((totalRead, done) => {
|
||||
reportProgress(
|
||||
{
|
||||
total: contentLength,
|
||||
loaded: done ? contentLength : totalRead
|
||||
},
|
||||
{ type: "download", hash: filename }
|
||||
);
|
||||
})
|
||||
)
|
||||
.pipeThrough(new ChunkedStream(chunkSize + ABYTES))
|
||||
.pipeTo(fileHandle.writeable);
|
||||
|
||||
return true;
|
||||
} catch (e) {
|
||||
@@ -394,96 +407,45 @@ async function downloadFile(filename, requestOptions) {
|
||||
}
|
||||
}
|
||||
|
||||
async function downloadAttachment(filename, requestOptions) {
|
||||
const { url, headers, chunkSize, cancellationToken } = requestOptions;
|
||||
if (await streamablefs.exists(filename)) return true;
|
||||
|
||||
try {
|
||||
const signedUrlResponse = await axios.get(url, {
|
||||
headers,
|
||||
responseType: "text"
|
||||
});
|
||||
|
||||
const signedUrl = signedUrlResponse.data;
|
||||
const response = await axios.get(signedUrl, {
|
||||
responseType: "arraybuffer",
|
||||
cancelToken: cancellationToken
|
||||
});
|
||||
|
||||
const contentType = response.headers["content-type"];
|
||||
if (contentType === "application/xml") {
|
||||
const error = parseS3Error(response.data);
|
||||
if (error.Code !== "Unknown") {
|
||||
throw new Error(`[${error.Code}] ${error.Message}`);
|
||||
}
|
||||
}
|
||||
|
||||
const contentLength = response.headers["content-length"];
|
||||
if (contentLength === "0") {
|
||||
const error = `File length is 0. Please upload this file again from the attachment manager. (File hash: ${filename})`;
|
||||
await db.attachments.markAsFailed(filename, error);
|
||||
throw new Error(error);
|
||||
}
|
||||
|
||||
const distributor = new ChunkDistributor(chunkSize + ABYTES);
|
||||
distributor.fill(new Uint8Array(response.data));
|
||||
distributor.close();
|
||||
|
||||
const fileHandle = await streamablefs.createFile(
|
||||
filename,
|
||||
response.data.byteLength,
|
||||
"application/octet-stream"
|
||||
);
|
||||
|
||||
for (let chunk of distributor.chunks) {
|
||||
await fileHandle.write(chunk.data);
|
||||
}
|
||||
|
||||
return true;
|
||||
} catch (e) {
|
||||
handleS3Error(e, "Could not download file");
|
||||
reportProgress(undefined, { type: "download", hash: filename });
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function exists(filename) {
|
||||
function exists(filename: string) {
|
||||
return streamablefs.exists(filename);
|
||||
}
|
||||
|
||||
async function saveFile(filename, fileMetadata) {
|
||||
type FileMetadata = {
|
||||
key: SerializedKey;
|
||||
iv: string;
|
||||
name: string;
|
||||
type: string;
|
||||
isUploaded: boolean;
|
||||
};
|
||||
export async function decryptFile(
|
||||
filename: string,
|
||||
fileMetadata: FileMetadata
|
||||
) {
|
||||
if (!fileMetadata) return false;
|
||||
|
||||
const fileHandle = await streamablefs.readFile(filename);
|
||||
if (!fileHandle) return false;
|
||||
|
||||
const { key, iv, name, type, isUploaded } = fileMetadata;
|
||||
|
||||
const blobParts = [];
|
||||
const reader = fileHandle.getReader();
|
||||
const { key, iv } = fileMetadata;
|
||||
|
||||
const crypto = await getNNCrypto();
|
||||
await crypto.decryptStream(
|
||||
key,
|
||||
iv,
|
||||
{
|
||||
read: async () => {
|
||||
const { value } = await reader.read();
|
||||
return value;
|
||||
},
|
||||
write: async (chunk) => {
|
||||
blobParts.push(chunk.data);
|
||||
}
|
||||
},
|
||||
filename
|
||||
);
|
||||
const decryptionStream = await crypto.createDecryptionStream(key, iv);
|
||||
return await toBlob(fileHandle.readable.pipeThrough(decryptionStream));
|
||||
}
|
||||
|
||||
saveAs(new Blob(blobParts, { type }), getFileNameWithExtension(name, type));
|
||||
async function saveFile(filename: string, fileMetadata: FileMetadata) {
|
||||
if (!fileMetadata) return false;
|
||||
|
||||
const { name, type, isUploaded } = fileMetadata;
|
||||
|
||||
const decrypted = await decryptFile(filename, fileMetadata);
|
||||
if (decrypted) saveAs(decrypted, getFileNameWithExtension(name, type));
|
||||
|
||||
if (isUploaded) await streamablefs.deleteFile(filename);
|
||||
}
|
||||
|
||||
async function deleteFile(filename, requestOptions) {
|
||||
async function deleteFile(filename: string, requestOptions: RequestOptions) {
|
||||
if (!requestOptions) return await streamablefs.deleteFile(filename);
|
||||
if (!requestOptions && !(await streamablefs.exists(filename))) return true;
|
||||
|
||||
@@ -502,10 +464,10 @@ async function deleteFile(filename, requestOptions) {
|
||||
}
|
||||
}
|
||||
|
||||
async function getUploadedFileSize(filename) {
|
||||
async function getUploadedFileSize(filename: string) {
|
||||
try {
|
||||
const url = `${hosts.API_HOST}/s3?name=${filename}`;
|
||||
const token = await db.user.tokenManager.getAccessToken();
|
||||
const token = await db.user?.tokenManager.getAccessToken();
|
||||
|
||||
const attachmentInfo = await axios.head(url, {
|
||||
headers: { Authorization: `Bearer ${token}` }
|
||||
@@ -528,13 +490,13 @@ const FS = {
|
||||
readEncrypted,
|
||||
uploadFile: cancellable(uploadFile),
|
||||
downloadFile: cancellable(downloadFile),
|
||||
downloadAttachment: cancellable(downloadAttachment),
|
||||
deleteFile,
|
||||
saveFile,
|
||||
exists,
|
||||
writeEncryptedFile,
|
||||
clearFileStorage,
|
||||
getUploadedFileSize,
|
||||
decryptFile,
|
||||
|
||||
hashBase64,
|
||||
hashBuffer,
|
||||
@@ -542,106 +504,25 @@ const FS = {
|
||||
};
|
||||
export default FS;
|
||||
|
||||
function isSuccessStatusCode(statusCode) {
|
||||
function isSuccessStatusCode(statusCode: number) {
|
||||
return statusCode >= 200 && statusCode <= 299;
|
||||
}
|
||||
|
||||
function cancellable(operation) {
|
||||
return function (filename, requestOptions) {
|
||||
const source = axios.CancelToken.source();
|
||||
requestOptions.cancellationToken = source.token;
|
||||
function cancellable(
|
||||
operation: (filename: string, requestOptions: RequestOptions) => any
|
||||
) {
|
||||
return function (filename: string, requestOptions: RequestOptions) {
|
||||
const abortController = new AbortController();
|
||||
requestOptions.signal = abortController.signal;
|
||||
return {
|
||||
execute: () => operation(filename, requestOptions),
|
||||
cancel: (message) => {
|
||||
source.cancel(message);
|
||||
cancel: (message: string) => {
|
||||
abortController.abort(message);
|
||||
}
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
class ChunkDistributor {
|
||||
/**
|
||||
* @typedef {{length: number, data: Uint8Array, final: boolean}} Chunk
|
||||
*/
|
||||
constructor(chunkSize) {
|
||||
this.chunkSize = chunkSize;
|
||||
this.chunks = [];
|
||||
this.filledCount = 0;
|
||||
this.done = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {Chunk}
|
||||
*/
|
||||
get lastChunk() {
|
||||
return this.chunks[this.chunks.length - 1];
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {boolean}
|
||||
*/
|
||||
get isLastChunkFilled() {
|
||||
return this.lastChunk.length === this.chunkSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {Chunk}
|
||||
*/
|
||||
get firstChunk() {
|
||||
const chunk = this.chunks.shift();
|
||||
if (chunk.data.length === this.chunkSize) this.filledCount--;
|
||||
return chunk;
|
||||
}
|
||||
|
||||
close() {
|
||||
if (!this.lastChunk)
|
||||
throw new Error("No data available in this distributor.");
|
||||
this.lastChunk.data = this.lastChunk.data.slice(0, this.lastChunk.length);
|
||||
this.lastChunk.final = true;
|
||||
this.done = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} data
|
||||
*/
|
||||
fill(data) {
|
||||
if (this.done || !data || !data.length) return;
|
||||
|
||||
const dataLength = data.length;
|
||||
const totalBlocks = Math.ceil(dataLength / this.chunkSize);
|
||||
|
||||
for (let i = 0; i < totalBlocks; ++i) {
|
||||
const start = i * this.chunkSize;
|
||||
|
||||
if (this.lastChunk && !this.isLastChunkFilled) {
|
||||
const needed = this.chunkSize - this.lastChunk.length;
|
||||
const end = Math.min(start + needed, dataLength);
|
||||
const chunk = data.slice(start, end);
|
||||
|
||||
this.lastChunk.data.set(chunk, this.lastChunk.length);
|
||||
this.lastChunk.length += chunk.length;
|
||||
|
||||
if (this.lastChunk.length === this.chunkSize) this.filledCount++;
|
||||
|
||||
if (end !== dataLength) {
|
||||
this.fill(data.slice(end));
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
const end = Math.min(start + this.chunkSize, dataLength);
|
||||
let chunk = data.slice(start, end);
|
||||
|
||||
const buffer = new Uint8Array(this.chunkSize);
|
||||
buffer.set(chunk, 0);
|
||||
|
||||
this.chunks.push({ data: buffer, final: false, length: chunk.length });
|
||||
if (chunk.length === this.chunkSize) this.filledCount++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function parseS3Error(data) {
|
||||
function parseS3Error(data: ArrayBuffer | unknown) {
|
||||
if (!(data instanceof ArrayBuffer)) {
|
||||
return {
|
||||
Code: "UNKNOWN",
|
||||
@@ -655,28 +536,29 @@ function parseS3Error(data) {
|
||||
if (!ErrorElement)
|
||||
return { Code: "Unknown", Message: "An unknown error occured." };
|
||||
|
||||
const error = {};
|
||||
const error: Record<string, string> = {};
|
||||
for (const child of ErrorElement.children) {
|
||||
error[child.tagName] = child.textContent;
|
||||
if (child.textContent) error[child.tagName] = child.textContent;
|
||||
}
|
||||
return error;
|
||||
}
|
||||
|
||||
function handleS3Error(e, message) {
|
||||
function handleS3Error(e: unknown, message?: unknown) {
|
||||
if (axios.isAxiosError(e) && e.response?.data) {
|
||||
const error = parseS3Error(e.response.data);
|
||||
showToast("error", `${message}: [${error.Code}] ${error.Message}`);
|
||||
} else if (message) {
|
||||
} else if (message && e instanceof Error) {
|
||||
showToast("error", `${message}: ${e.message}`);
|
||||
} else {
|
||||
} else if (e instanceof Error) {
|
||||
showToast("error", e.message);
|
||||
} else {
|
||||
showToast("error", JSON.stringify(e));
|
||||
}
|
||||
}
|
||||
|
||||
class S3Error extends Error {
|
||||
constructor(message, error) {
|
||||
constructor(message: string, readonly error: Error) {
|
||||
super(message);
|
||||
this.error = error;
|
||||
}
|
||||
|
||||
handle() {
|
||||
@@ -17,24 +17,18 @@ You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
type Message<T> = {
|
||||
type: string;
|
||||
data?: T;
|
||||
};
|
||||
|
||||
export function sendEventWithResult<T>(type: string): Promise<T> {
|
||||
return new Promise<T>((resolve) => {
|
||||
// eslint-disable-next-line no-restricted-globals
|
||||
addEventListener(
|
||||
"message",
|
||||
(ev: MessageEvent<Message<T>>) => {
|
||||
const { type: messageType, data } = ev.data;
|
||||
if (messageType === type && data) {
|
||||
resolve(data);
|
||||
}
|
||||
export class ProgressStream extends TransformStream<Uint8Array, Uint8Array> {
|
||||
constructor(report: (totalRead: number, done?: boolean) => void) {
|
||||
let totalRead = 0;
|
||||
super({
|
||||
start() {},
|
||||
transform(chunk, controller) {
|
||||
controller.enqueue(chunk);
|
||||
report((totalRead += chunk.length));
|
||||
},
|
||||
{ once: true }
|
||||
);
|
||||
postMessage({ type });
|
||||
});
|
||||
flush() {
|
||||
report(totalRead, true);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
29
apps/web/src/interfaces/stream-utils.ts
Normal file
29
apps/web/src/interfaces/stream-utils.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
/*
|
||||
This file is part of the Notesnook project (https://notesnook.com/)
|
||||
|
||||
Copyright (C) 2023 Streetwriters (Private) Limited
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
import { toAsyncIterator } from "@notesnook-importer/core/dist/src/utils/stream";
|
||||
|
||||
export async function consumeReadableStream<T>(
|
||||
stream: ReadableStream<T>
|
||||
): Promise<T[]> {
|
||||
const chunks: T[] = [];
|
||||
for await (const chunk of toAsyncIterator(stream)) {
|
||||
chunks.push(chunk);
|
||||
}
|
||||
return chunks;
|
||||
}
|
||||
@@ -40,7 +40,7 @@ class AttachmentStore extends BaseStore {
|
||||
const percent = Math.round((loaded / total) * 100);
|
||||
const status =
|
||||
percent < 100 ? { type, loaded, total, progress: percent } : null;
|
||||
if (!status) return this.refresh();
|
||||
if (!status) this.refresh();
|
||||
state.attachments[index] = {
|
||||
...state.attachments[index],
|
||||
status
|
||||
|
||||
@@ -26,7 +26,8 @@ function registerWorker() {
|
||||
.getRegistration("./")
|
||||
.then((swReg) => {
|
||||
return (
|
||||
swReg || navigator.serviceWorker.register("sw.js", { scope: "./" })
|
||||
swReg ||
|
||||
navigator.serviceWorker.register("stream-saver-sw.js", { scope: "./" })
|
||||
);
|
||||
})
|
||||
.then((swReg) => {
|
||||
@@ -90,7 +91,7 @@ export function postMessage(
|
||||
data.pathname = data.pathname.replace(/^\/+/g, "");
|
||||
|
||||
// remove protocol
|
||||
let org = origin.replace(/(^\w+:|^)\/\//, "");
|
||||
const org = origin.replace(/(^\w+:|^)\/\//, "");
|
||||
|
||||
// set the absolute pathname to the download url.
|
||||
data.url = new URL(`${scope + org}/${data.pathname}`).toString();
|
||||
@@ -27,48 +27,17 @@ let useBlobFallback =
|
||||
"safari" in globalThis ||
|
||||
"WebKitPoint" in globalThis;
|
||||
|
||||
type IFrameContainer = HTMLIFrameElement & {
|
||||
loaded: boolean;
|
||||
isIFrame: boolean;
|
||||
remove: () => void;
|
||||
addEventListener: HTMLIFrameElement["addEventListener"];
|
||||
dispatchEvent: HTMLIFrameElement["dispatchEvent"];
|
||||
removeEventListener: HTMLIFrameElement["removeEventListener"];
|
||||
postMessage(
|
||||
message: any,
|
||||
targetOrigin: string,
|
||||
transfer?: Transferable[] | undefined
|
||||
): void;
|
||||
};
|
||||
|
||||
/**
|
||||
* create a hidden iframe and append it to the DOM (body)
|
||||
*
|
||||
* @param {string} src page to load
|
||||
* @return {HTMLIFrameElement} page to load
|
||||
*/
|
||||
function makeIframe(src: string, doc = true): IFrameContainer {
|
||||
function makeIframe(src: string, doc = true) {
|
||||
if (!src) throw new Error("meh");
|
||||
|
||||
const iframe = document.createElement("iframe") as IFrameContainer;
|
||||
const iframe = document.createElement("iframe");
|
||||
iframe.hidden = true;
|
||||
if (doc) iframe.srcdoc = src;
|
||||
else iframe.src = src;
|
||||
iframe.name = "iframe";
|
||||
iframe.loaded = false;
|
||||
iframe.isIFrame = true;
|
||||
iframe.postMessage = (message, targetOrigin, transfer) =>
|
||||
iframe.contentWindow?.postMessage(message, targetOrigin, transfer);
|
||||
|
||||
iframe.addEventListener(
|
||||
"load",
|
||||
() => {
|
||||
iframe.loaded = true;
|
||||
},
|
||||
{ once: true }
|
||||
);
|
||||
document.body.appendChild(iframe);
|
||||
return iframe;
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -85,7 +54,6 @@ function checkSupportsTransferable() {
|
||||
// Transferable stream was first enabled in chrome v73 behind a flag
|
||||
const { readable } = new TransformStream();
|
||||
const mc = new MessageChannel();
|
||||
// @ts-ignore
|
||||
mc.port1.postMessage(readable, [readable]);
|
||||
mc.port1.close();
|
||||
mc.port2.close();
|
||||
@@ -139,7 +107,6 @@ export function createWriteStream(
|
||||
ts = new TransformStream();
|
||||
const readableStream = ts.readable;
|
||||
|
||||
// @ts-ignore
|
||||
channel.port1.postMessage({ readableStream }, [readableStream]);
|
||||
}
|
||||
channel.port1.onmessage = async (evt) => {
|
||||
@@ -26,8 +26,9 @@ export class ZipStream extends TransformStream<ZipFile, Uint8Array> {
|
||||
super({
|
||||
start(controller) {
|
||||
zipper.ondata = (err, data) => {
|
||||
if (err) controller.error(err);
|
||||
else controller.enqueue(data);
|
||||
if (err) {
|
||||
controller.error(err);
|
||||
} else controller.enqueue(data);
|
||||
};
|
||||
},
|
||||
transform(chunk) {
|
||||
27
apps/web/vitest.config.ts
Normal file
27
apps/web/vitest.config.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
/*
|
||||
This file is part of the Notesnook project (https://notesnook.com/)
|
||||
|
||||
Copyright (C) 2023 Streetwriters (Private) Limited
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import { defineConfig } from "vitest/config";
|
||||
|
||||
export default defineConfig({
|
||||
test: {
|
||||
environment: "happy-dom",
|
||||
dir: "./__tests__/"
|
||||
}
|
||||
});
|
||||
14
bugs.md
Normal file
14
bugs.md
Normal file
@@ -0,0 +1,14 @@
|
||||
Mobile:
|
||||
|
||||
1. Select all notes in a topic -> Click on add button
|
||||
2. Edit button in a notebook doesn't work
|
||||
3. Edit notebook sheet doesn't close after saving edits
|
||||
4. Removing notes from a topic doesn't update the notes count in the list automatically
|
||||
5. Notes inside a notebook/topic keep showing the notebook tag at the top
|
||||
6.
|
||||
|
||||
Web:
|
||||
|
||||
1. When a note is in a single topic, its impossible to move it to its parent notebook
|
||||
2. When a note is in a single notebook, its impossible to remove it
|
||||
3. Disable sync -> Refresh app -> Enable sync & notice how the sync status is not updated nor does the sync run automatically
|
||||
@@ -56,6 +56,6 @@
|
||||
},
|
||||
"license": "GPL-3.0-or-later",
|
||||
"volta": {
|
||||
"node": "16.15.1"
|
||||
"node": "18.15.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,10 +23,9 @@ import {
|
||||
OutputFormat,
|
||||
Cipher,
|
||||
EncryptionKey,
|
||||
Chunk
|
||||
} from "@notesnook/crypto/dist/src/types";
|
||||
INNCrypto
|
||||
} from "@notesnook/crypto";
|
||||
import { NNCryptoWorkerModule } from "./src/worker";
|
||||
import { INNCrypto, IStreamable } from "@notesnook/crypto/dist/src/interfaces";
|
||||
import { wrap } from "comlink";
|
||||
|
||||
export class NNCryptoWorker implements INNCrypto {
|
||||
@@ -92,80 +91,92 @@ export class NNCryptoWorker implements INNCrypto {
|
||||
return this.workermodule.exportKey(password, salt);
|
||||
}
|
||||
|
||||
async encryptStream(
|
||||
key: SerializedKey,
|
||||
stream: IStreamable,
|
||||
streamId?: string
|
||||
): Promise<string> {
|
||||
if (!streamId) throw new Error("streamId is required.");
|
||||
async createEncryptionStream(key: SerializedKey) {
|
||||
await this.init();
|
||||
if (!this.workermodule) throw new Error("Worker module is not ready.");
|
||||
if (!this.worker) throw new Error("Worker is not ready.");
|
||||
|
||||
const eventListener = await this.createWorkerStream(
|
||||
streamId,
|
||||
stream,
|
||||
() => {
|
||||
if (this.worker)
|
||||
this.worker.removeEventListener("message", eventListener);
|
||||
}
|
||||
);
|
||||
this.worker.addEventListener("message", eventListener);
|
||||
const iv = await this.workermodule.createEncryptionStream(streamId, key);
|
||||
this.worker.removeEventListener("message", eventListener);
|
||||
return iv;
|
||||
return this.workermodule.createEncryptionStream(key);
|
||||
}
|
||||
|
||||
async decryptStream(
|
||||
key: SerializedKey,
|
||||
iv: string,
|
||||
stream: IStreamable,
|
||||
streamId?: string
|
||||
): Promise<void> {
|
||||
if (!streamId) throw new Error("streamId is required.");
|
||||
async createDecryptionStream(key: SerializedKey, iv: string) {
|
||||
await this.init();
|
||||
if (!this.workermodule) throw new Error("Worker module is not ready.");
|
||||
if (!this.worker) throw new Error("Worker is not ready.");
|
||||
|
||||
const eventListener = await this.createWorkerStream(
|
||||
streamId,
|
||||
stream,
|
||||
() => {
|
||||
if (this.worker)
|
||||
this.worker.removeEventListener("message", eventListener);
|
||||
}
|
||||
);
|
||||
this.worker.addEventListener("message", eventListener);
|
||||
await this.workermodule.createDecryptionStream(streamId, iv, key);
|
||||
this.worker.removeEventListener("message", eventListener);
|
||||
const { stream } = await this.workermodule.createDecryptionStream(key, iv);
|
||||
return stream;
|
||||
}
|
||||
// async encryptStream(
|
||||
// key: SerializedKey,
|
||||
// stream: IStreamable,
|
||||
// streamId?: string
|
||||
// ): Promise<string> {
|
||||
// if (!streamId) throw new Error("streamId is required.");
|
||||
// await this.init();
|
||||
// if (!this.workermodule) throw new Error("Worker module is not ready.");
|
||||
// if (!this.worker) throw new Error("Worker is not ready.");
|
||||
|
||||
private async createWorkerStream(
|
||||
streamId: string,
|
||||
stream: IStreamable,
|
||||
done: () => void
|
||||
): Promise<EventListenerObject> {
|
||||
const readEventType = `${streamId}:read`;
|
||||
const writeEventType = `${streamId}:write`;
|
||||
let finished = false;
|
||||
return {
|
||||
handleEvent: async (ev: MessageEvent) => {
|
||||
if (finished) return;
|
||||
// const eventListener = await this.createWorkerStream(
|
||||
// streamId,
|
||||
// stream,
|
||||
// () => {
|
||||
// if (this.worker)
|
||||
// this.worker.removeEventListener("message", eventListener);
|
||||
// }
|
||||
// );
|
||||
// this.worker.addEventListener("message", eventListener);
|
||||
// const iv = await this.workermodule.createEncryptionStream(streamId, key);
|
||||
// this.worker.removeEventListener("message", eventListener);
|
||||
// return iv;
|
||||
// }
|
||||
|
||||
const { type } = ev.data;
|
||||
if (type === readEventType) {
|
||||
const chunk = await stream.read();
|
||||
if (!chunk || !this.worker || !chunk.data) return;
|
||||
this.worker.postMessage({ type, data: chunk }, [chunk.data.buffer]);
|
||||
} else if (type === writeEventType) {
|
||||
const chunk = ev.data.data as Chunk;
|
||||
await stream.write(chunk);
|
||||
if (chunk.final) {
|
||||
finished = true;
|
||||
done();
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
// async decryptStream(
|
||||
// key: SerializedKey,
|
||||
// iv: string,
|
||||
// stream: IStreamable,
|
||||
// streamId?: string
|
||||
// ): Promise<void> {
|
||||
// if (!streamId) throw new Error("streamId is required.");
|
||||
// await this.init();
|
||||
// if (!this.workermodule) throw new Error("Worker module is not ready.");
|
||||
// if (!this.worker) throw new Error("Worker is not ready.");
|
||||
|
||||
// const eventListener = await this.createWorkerStream(
|
||||
// streamId,
|
||||
// stream,
|
||||
// () => {
|
||||
// if (this.worker)
|
||||
// this.worker.removeEventListener("message", eventListener);
|
||||
// }
|
||||
// );
|
||||
// this.worker.addEventListener("message", eventListener);
|
||||
// await this.workermodule.createDecryptionStream(streamId, iv, key);
|
||||
// this.worker.removeEventListener("message", eventListener);
|
||||
// }
|
||||
|
||||
// private async createWorkerStream(
|
||||
// streamId: string,
|
||||
// stream: IStreamable,
|
||||
// done: () => void
|
||||
// ): Promise<EventListenerObject> {
|
||||
// const readEventType = `${streamId}:read`;
|
||||
// const writeEventType = `${streamId}:write`;
|
||||
// let finished = false;
|
||||
// return {
|
||||
// handleEvent: async (ev: MessageEvent) => {
|
||||
// if (finished) return;
|
||||
|
||||
// const { type } = ev.data;
|
||||
// if (type === readEventType) {
|
||||
// const chunk = await stream.read();
|
||||
// if (!chunk || !this.worker || !chunk.data) return;
|
||||
// this.worker.postMessage({ type, data: chunk }, [chunk.data.buffer]);
|
||||
// } else if (type === writeEventType) {
|
||||
// const chunk = ev.data.data as Chunk;
|
||||
// await stream.write(chunk);
|
||||
// if (chunk.final) {
|
||||
// finished = true;
|
||||
// done();
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// };
|
||||
// }
|
||||
}
|
||||
|
||||
@@ -23,8 +23,7 @@ import {
|
||||
Plaintext,
|
||||
SerializedKey
|
||||
} from "@notesnook/crypto/dist/src/types";
|
||||
import { expose } from "comlink";
|
||||
import WorkerStream from "./workerstream";
|
||||
import { expose, transfer } from "comlink";
|
||||
import { NNCrypto } from "@notesnook/crypto";
|
||||
|
||||
let crypto: NNCrypto | null = null;
|
||||
@@ -63,17 +62,19 @@ const module = {
|
||||
const crypto = await loadNNCrypto();
|
||||
return crypto.decrypt(key, cipherData, outputFormat);
|
||||
},
|
||||
createEncryptionStream: async function (id: string, key: SerializedKey) {
|
||||
createEncryptionStream: async function (key: SerializedKey) {
|
||||
const crypto = await loadNNCrypto();
|
||||
return crypto.createEncryptionStream(key, new WorkerStream(id));
|
||||
const stream = await crypto.createEncryptionStream(key);
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
return transfer(stream, [stream.stream]);
|
||||
},
|
||||
createDecryptionStream: async function (
|
||||
id: string,
|
||||
iv: string,
|
||||
key: SerializedKey
|
||||
) {
|
||||
createDecryptionStream: async function (key: SerializedKey, iv: string) {
|
||||
const crypto = await loadNNCrypto();
|
||||
return crypto.createDecryptionStream(iv, key, new WorkerStream(id));
|
||||
const obj = { stream: await crypto.createDecryptionStream(key, iv) };
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
return transfer(obj, [obj.stream]);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,64 +0,0 @@
|
||||
/*
|
||||
This file is part of the Notesnook project (https://notesnook.com/)
|
||||
|
||||
Copyright (C) 2023 Streetwriters (Private) Limited
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import { IStreamable } from "@notesnook/crypto/dist/src/interfaces";
|
||||
import { Chunk } from "@notesnook/crypto/dist/src/types";
|
||||
import { sendEventWithResult } from "./utils";
|
||||
|
||||
export default class WorkerStream
|
||||
extends ReadableStream<Chunk>
|
||||
implements IStreamable
|
||||
{
|
||||
private id: string;
|
||||
private reader?: ReadableStreamReader<Chunk>;
|
||||
|
||||
constructor(streamId: string) {
|
||||
super(new WorkerStreamSource(streamId));
|
||||
this.id = streamId;
|
||||
}
|
||||
|
||||
async read(): Promise<Chunk | undefined> {
|
||||
if (!this.reader) this.reader = this.getReader();
|
||||
const { value } = await this.reader.read();
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} chunk
|
||||
*/
|
||||
async write(chunk: Chunk): Promise<void> {
|
||||
if (!chunk.data) return;
|
||||
postMessage({ type: `${this.id}:write`, data: chunk }, [chunk.data.buffer]);
|
||||
}
|
||||
}
|
||||
|
||||
class WorkerStreamSource implements UnderlyingSource<Chunk> {
|
||||
private id: string;
|
||||
constructor(streamId: string) {
|
||||
this.id = streamId;
|
||||
}
|
||||
|
||||
start() {}
|
||||
|
||||
async pull(controller: ReadableStreamController<Chunk>) {
|
||||
const chunk = await sendEventWithResult<Chunk>(`${this.id}:read`);
|
||||
controller.enqueue(chunk);
|
||||
if (chunk.final) controller.close();
|
||||
}
|
||||
}
|
||||
@@ -20,7 +20,7 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
import { ready } from "libsodium-wrappers";
|
||||
import Decryption from "./src/decryption";
|
||||
import Encryption from "./src/encryption";
|
||||
import { INNCrypto, IStreamable } from "./src/interfaces";
|
||||
import { INNCrypto } from "./src/interfaces";
|
||||
import KeyUtils from "./src/keyutils";
|
||||
import Password from "./src/password";
|
||||
import {
|
||||
@@ -28,8 +28,7 @@ import {
|
||||
EncryptionKey,
|
||||
OutputFormat,
|
||||
Plaintext,
|
||||
SerializedKey,
|
||||
Chunk
|
||||
SerializedKey
|
||||
} from "./src/types";
|
||||
|
||||
export class NNCrypto implements INNCrypto {
|
||||
@@ -74,73 +73,69 @@ export class NNCrypto implements INNCrypto {
|
||||
return KeyUtils.exportKey(password, salt);
|
||||
}
|
||||
|
||||
async createEncryptionStream(
|
||||
key: SerializedKey,
|
||||
stream: IStreamable
|
||||
): Promise<string> {
|
||||
async createEncryptionStream(key: SerializedKey) {
|
||||
await this.init();
|
||||
const encryptionStream = Encryption.createStream(key);
|
||||
return Encryption.createStream(key);
|
||||
|
||||
// // eslint-disable-next-line no-constant-condition
|
||||
// while (true) {
|
||||
// const chunk = await stream.read();
|
||||
// if (!chunk) break;
|
||||
|
||||
// const { data, final } = chunk;
|
||||
// if (!data) break;
|
||||
|
||||
// const encryptedChunk: Chunk = {
|
||||
// data: encryptionStream.write(data, final),
|
||||
// final
|
||||
// };
|
||||
// await stream.write(encryptedChunk);
|
||||
|
||||
// if (final) break;
|
||||
// }
|
||||
// return encryptionStream.header;
|
||||
}
|
||||
|
||||
async createDecryptionStream(key: SerializedKey, iv: string) {
|
||||
await this.init();
|
||||
return Decryption.createStream(iv, key);
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
const chunk = await stream.read();
|
||||
if (!chunk) break;
|
||||
// while (true) {
|
||||
// const chunk = await stream.read();
|
||||
// if (!chunk) break;
|
||||
|
||||
const { data, final } = chunk;
|
||||
if (!data) break;
|
||||
// const { data, final } = chunk;
|
||||
// if (!data) break;
|
||||
|
||||
const encryptedChunk: Chunk = {
|
||||
data: encryptionStream.write(data, final),
|
||||
final
|
||||
};
|
||||
await stream.write(encryptedChunk);
|
||||
// const decryptedChunk: Chunk = {
|
||||
// data: decryptionStream.read(data),
|
||||
// final
|
||||
// };
|
||||
// await stream.write(decryptedChunk);
|
||||
|
||||
if (final) break;
|
||||
}
|
||||
return encryptionStream.header;
|
||||
// if (final) break;
|
||||
// }
|
||||
}
|
||||
|
||||
async createDecryptionStream(
|
||||
iv: string,
|
||||
key: SerializedKey,
|
||||
stream: IStreamable
|
||||
) {
|
||||
await this.init();
|
||||
const decryptionStream = Decryption.createStream(iv, key);
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
const chunk = await stream.read();
|
||||
if (!chunk) break;
|
||||
// async encryptStream(
|
||||
// key: SerializedKey,
|
||||
// stream: IStreamable,
|
||||
// _streamId?: string
|
||||
// ): Promise<string> {
|
||||
// await this.init();
|
||||
// return await this.createEncryptionStream(key, stream);
|
||||
// }
|
||||
|
||||
const { data, final } = chunk;
|
||||
if (!data) break;
|
||||
|
||||
const decryptedChunk: Chunk = {
|
||||
data: decryptionStream.read(data),
|
||||
final
|
||||
};
|
||||
await stream.write(decryptedChunk);
|
||||
|
||||
if (final) break;
|
||||
}
|
||||
}
|
||||
|
||||
async encryptStream(
|
||||
key: SerializedKey,
|
||||
stream: IStreamable,
|
||||
_streamId?: string
|
||||
): Promise<string> {
|
||||
await this.init();
|
||||
return await this.createEncryptionStream(key, stream);
|
||||
}
|
||||
|
||||
async decryptStream(
|
||||
key: SerializedKey,
|
||||
iv: string,
|
||||
stream: IStreamable,
|
||||
_streamId?: string
|
||||
): Promise<void> {
|
||||
await this.init();
|
||||
await this.createDecryptionStream(iv, key, stream);
|
||||
}
|
||||
// async decryptStream(
|
||||
// key: SerializedKey,
|
||||
// iv: string,
|
||||
// stream: IStreamable,
|
||||
// _streamId?: string
|
||||
// ): Promise<void> {
|
||||
// await this.init();
|
||||
// await this.createDecryptionStream(iv, key, stream);
|
||||
// }
|
||||
}
|
||||
|
||||
export * from "./src/types";
|
||||
export * from "./src/interfaces";
|
||||
|
||||
@@ -24,18 +24,12 @@ import {
|
||||
to_base64,
|
||||
from_base64,
|
||||
base64_variants,
|
||||
StateAddress,
|
||||
to_string,
|
||||
from_hex
|
||||
from_hex,
|
||||
crypto_secretstream_xchacha20poly1305_TAG_FINAL
|
||||
} from "libsodium-wrappers";
|
||||
import KeyUtils from "./keyutils";
|
||||
import {
|
||||
Cipher,
|
||||
EncryptionKey,
|
||||
OutputFormat,
|
||||
Plaintext,
|
||||
SerializedKey
|
||||
} from "./types";
|
||||
import { Cipher, OutputFormat, Plaintext, SerializedKey } from "./types";
|
||||
|
||||
export default class Decryption {
|
||||
private static transformInput(cipherData: Cipher): Uint8Array {
|
||||
@@ -88,26 +82,47 @@ export default class Decryption {
|
||||
};
|
||||
}
|
||||
|
||||
static createStream(header: string, key: SerializedKey): DecryptionStream {
|
||||
return new DecryptionStream(header, KeyUtils.transform(key));
|
||||
}
|
||||
}
|
||||
|
||||
class DecryptionStream {
|
||||
state: StateAddress;
|
||||
constructor(header: string, key: EncryptionKey) {
|
||||
this.state = crypto_secretstream_xchacha20poly1305_init_pull(
|
||||
static createStream(
|
||||
header: string,
|
||||
key: SerializedKey
|
||||
): TransformStream<Uint8Array, Uint8Array> {
|
||||
const { key: _key } = KeyUtils.transform(key);
|
||||
const state = crypto_secretstream_xchacha20poly1305_init_pull(
|
||||
from_base64(header),
|
||||
key.key
|
||||
_key
|
||||
);
|
||||
}
|
||||
|
||||
read(chunk: Uint8Array): Uint8Array {
|
||||
const { message } = crypto_secretstream_xchacha20poly1305_pull(
|
||||
this.state,
|
||||
chunk,
|
||||
null
|
||||
);
|
||||
return message;
|
||||
return new TransformStream<Uint8Array, Uint8Array>({
|
||||
start() {},
|
||||
transform(chunk, controller) {
|
||||
const { message, tag } = crypto_secretstream_xchacha20poly1305_pull(
|
||||
state,
|
||||
chunk,
|
||||
null
|
||||
);
|
||||
controller.enqueue(message);
|
||||
if (tag === crypto_secretstream_xchacha20poly1305_TAG_FINAL)
|
||||
controller.terminate();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// class DecryptionStream {
|
||||
// state: StateAddress;
|
||||
// constructor(header: string, key: EncryptionKey) {
|
||||
// this.state = crypto_secretstream_xchacha20poly1305_init_pull(
|
||||
// from_base64(header),
|
||||
// key.key
|
||||
// );
|
||||
// }
|
||||
|
||||
// read(chunk: Uint8Array): Uint8Array {
|
||||
// const { message } = crypto_secretstream_xchacha20poly1305_pull(
|
||||
// this.state,
|
||||
// chunk,
|
||||
// null
|
||||
// );
|
||||
// return message;
|
||||
// }
|
||||
// }
|
||||
|
||||
@@ -27,17 +27,10 @@ import {
|
||||
crypto_secretstream_xchacha20poly1305_TAG_MESSAGE,
|
||||
to_base64,
|
||||
from_base64,
|
||||
base64_variants,
|
||||
StateAddress
|
||||
base64_variants
|
||||
} from "libsodium-wrappers";
|
||||
import KeyUtils from "./keyutils";
|
||||
import {
|
||||
Cipher,
|
||||
EncryptionKey,
|
||||
OutputFormat,
|
||||
Plaintext,
|
||||
SerializedKey
|
||||
} from "./types";
|
||||
import { Chunk, Cipher, OutputFormat, Plaintext, SerializedKey } from "./types";
|
||||
|
||||
const encoder = new TextEncoder();
|
||||
export default class Encryption {
|
||||
@@ -89,35 +82,62 @@ export default class Encryption {
|
||||
};
|
||||
}
|
||||
|
||||
static createStream(key: SerializedKey): EncryptionStream {
|
||||
return new EncryptionStream(KeyUtils.transform(key));
|
||||
}
|
||||
}
|
||||
|
||||
class EncryptionStream {
|
||||
state: StateAddress;
|
||||
header: string;
|
||||
constructor(key: EncryptionKey) {
|
||||
static createStream(key: SerializedKey): {
|
||||
iv: string;
|
||||
stream: TransformStream<Chunk, Uint8Array>;
|
||||
} {
|
||||
const { key: _key } = KeyUtils.transform(key);
|
||||
const { state, header } = crypto_secretstream_xchacha20poly1305_init_push(
|
||||
key.key,
|
||||
_key,
|
||||
"base64"
|
||||
);
|
||||
this.state = state;
|
||||
this.header = header;
|
||||
}
|
||||
|
||||
write(chunk: Uint8Array, final?: boolean): Uint8Array {
|
||||
return crypto_secretstream_xchacha20poly1305_push(
|
||||
this.state,
|
||||
chunk,
|
||||
null,
|
||||
final
|
||||
? crypto_secretstream_xchacha20poly1305_TAG_FINAL
|
||||
: crypto_secretstream_xchacha20poly1305_TAG_MESSAGE
|
||||
);
|
||||
return {
|
||||
iv: header,
|
||||
stream: new TransformStream<Chunk, Uint8Array>({
|
||||
start() {},
|
||||
transform(chunk, controller) {
|
||||
controller.enqueue(
|
||||
crypto_secretstream_xchacha20poly1305_push(
|
||||
state,
|
||||
chunk.data,
|
||||
null,
|
||||
chunk.final
|
||||
? crypto_secretstream_xchacha20poly1305_TAG_FINAL
|
||||
: crypto_secretstream_xchacha20poly1305_TAG_MESSAGE
|
||||
)
|
||||
);
|
||||
if (chunk.final) controller.terminate();
|
||||
}
|
||||
})
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// class EncryptionStream {
|
||||
// state: StateAddress;
|
||||
// header: string;
|
||||
// constructor(key: EncryptionKey) {
|
||||
// const { state, header } = crypto_secretstream_xchacha20poly1305_init_push(
|
||||
// key.key,
|
||||
// "base64"
|
||||
// );
|
||||
// this.state = state;
|
||||
// this.header = header;
|
||||
// }
|
||||
|
||||
// write(chunk: Uint8Array, final?: boolean): Uint8Array {
|
||||
// return crypto_secretstream_xchacha20poly1305_push(
|
||||
// this.state,
|
||||
// chunk,
|
||||
// null,
|
||||
// final
|
||||
// ? crypto_secretstream_xchacha20poly1305_TAG_FINAL
|
||||
// : crypto_secretstream_xchacha20poly1305_TAG_MESSAGE
|
||||
// );
|
||||
// }
|
||||
// }
|
||||
|
||||
function getAlgorithm(base64Variant: base64_variants) {
|
||||
//Template: encryptionAlgorithm-kdfAlgorithm-base64variant
|
||||
return `xcha-argon2i13-${base64Variant}`;
|
||||
|
||||
@@ -50,16 +50,12 @@ export interface INNCrypto {
|
||||
|
||||
exportKey(password: string, salt?: string): Promise<SerializedKey>;
|
||||
|
||||
encryptStream(
|
||||
key: SerializedKey,
|
||||
stream: IStreamable,
|
||||
streamId?: string
|
||||
): Promise<string>;
|
||||
createEncryptionStream(
|
||||
key: SerializedKey
|
||||
): Promise<{ iv: string; stream: TransformStream<Chunk, Uint8Array> }>;
|
||||
|
||||
decryptStream(
|
||||
createDecryptionStream(
|
||||
key: SerializedKey,
|
||||
iv: string,
|
||||
stream: IStreamable,
|
||||
streamId?: string
|
||||
): Promise<void>;
|
||||
iv: string
|
||||
): Promise<TransformStream<Uint8Array, Uint8Array>>;
|
||||
}
|
||||
|
||||
@@ -17,28 +17,36 @@ You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import { Chunk } from "@notesnook/crypto/dist/src/types";
|
||||
import FileStreamSource from "./filestreamsource";
|
||||
import { File } from "./types";
|
||||
|
||||
export default class FileHandle extends ReadableStream<Chunk> {
|
||||
export default class FileHandle {
|
||||
private storage: LocalForage;
|
||||
private file: File;
|
||||
public file: File;
|
||||
|
||||
constructor(storage: LocalForage, file: File) {
|
||||
super(new FileStreamSource(storage, file));
|
||||
|
||||
this.file = file;
|
||||
this.storage = storage;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {Uint8Array} chunk
|
||||
*/
|
||||
async write(chunk: Uint8Array) {
|
||||
await this.storage.setItem(this.getChunkKey(this.file.chunks++), chunk);
|
||||
await this.storage.setItem(this.file.filename, this.file);
|
||||
get readable() {
|
||||
return new ReadableStream(new FileStreamSource(this.storage, this.file));
|
||||
}
|
||||
|
||||
get writeable() {
|
||||
return new WritableStream<Uint8Array>({
|
||||
write: async (chunk, controller) => {
|
||||
if (controller.signal.aborted) return;
|
||||
|
||||
await this.storage.setItem(this.getChunkKey(this.file.chunks++), chunk);
|
||||
await this.storage.setItem(this.file.filename, this.file);
|
||||
},
|
||||
abort: async () => {
|
||||
for (let i = 0; i < this.file.chunks; ++i) {
|
||||
await this.storage.removeItem(this.getChunkKey(i));
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async addAdditionalData<T>(key: string, value: T) {
|
||||
|
||||
@@ -18,7 +18,6 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import { File } from "./types";
|
||||
import { Chunk } from "@notesnook/crypto/dist/src/types";
|
||||
|
||||
export default class FileStreamSource {
|
||||
private storage: LocalForage;
|
||||
@@ -32,16 +31,12 @@ export default class FileStreamSource {
|
||||
|
||||
start() {}
|
||||
|
||||
async pull(controller: ReadableStreamDefaultController<Chunk>) {
|
||||
async pull(controller: ReadableStreamDefaultController<Uint8Array>) {
|
||||
const data = await this.readChunk(this.offset++);
|
||||
|
||||
if (data) controller.enqueue(data);
|
||||
|
||||
const isFinalChunk = this.offset === this.file.chunks;
|
||||
|
||||
if (data)
|
||||
controller.enqueue({
|
||||
data,
|
||||
final: isFinalChunk
|
||||
});
|
||||
|
||||
if (isFinalChunk || !data) controller.close();
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user