Files
notesnook/apps/web/src/interfaces/fs.js

423 lines
11 KiB
JavaScript
Raw Normal View History

2021-10-15 11:16:03 +05:00
import "web-streams-polyfill/dist/ponyfill";
2021-09-15 02:16:27 +05:00
import localforage from "localforage";
2021-10-15 11:16:03 +05:00
import { xxhash64, createXXHash64 } from "hash-wasm";
2021-09-20 12:10:08 +05:00
import axios from "axios";
import { AppEventManager, AppEvents } from "../common";
// eslint-disable-next-line import/no-webpack-loader-syntax
import "worker-loader!nncryptoworker/dist/src/worker.js";
import { StreamableFS } from "streamablefs";
import NNCrypto from "./nncrypto.stub";
2021-10-15 11:16:03 +05:00
import hosts from "notes-core/utils/constants";
import StreamSaver from "streamsaver";
StreamSaver.mitm = "/downloader.html";
2021-09-15 02:16:27 +05:00
2021-10-15 11:16:03 +05:00
const ABYTES = 17;
const CHUNK_SIZE = 5 * 1024 * 1024;
const ENCRYPTED_CHUNK_SIZE = CHUNK_SIZE + ABYTES;
const crypto = new NNCrypto("/static/js/bundle.worker.js");
const streamablefs = new StreamableFS("streamable-fs");
2021-09-15 02:16:27 +05:00
/**
* @param {File} file
2021-10-15 11:16:03 +05:00
* @param {import("nncrypto/dist/src/types").SerializedKey} key
* @param {string} hash
2021-09-15 02:16:27 +05:00
*/
2021-10-15 11:16:03 +05:00
async function writeEncryptedFile(file, key, hash) {
if (!localforage.supports(localforage.INDEXEDDB))
throw new Error("This browser does not support IndexedDB.");
let offset = 0;
const fileHandle = await streamablefs.createFile(hash, file.size, file.type);
const iv = await crypto.encryptStream(
key,
{
read: async () => {
let end = Math.min(offset + CHUNK_SIZE, file.size);
if (offset === end) return;
const chunk = new Uint8Array(
await file.slice(offset, end).arrayBuffer()
);
offset = end;
const isFinal = offset === file.size;
return {
final: isFinal,
data: chunk,
};
},
write: (chunk) => fileHandle.write(chunk),
},
file.name
);
return {
iv: iv,
length: file.size,
salt: key.salt,
alg: "xcha-stream",
};
}
/**
* We perform 4 steps here:
* 1. We convert base64 to Uint8Array (if we get base64, that is)
* 2. We hash the Uint8Array.
* 3. We encrypt the Uint8Array
* 4. We save the encrypted Uint8Array
*/
async function writeEncrypted(filename, { data, type, key, hash }) {
if (type === "base64") data = new Uint8Array(Buffer.from(data, "base64"));
2021-09-15 02:16:27 +05:00
if (!hash) hash = await hashBuffer(data);
if (!filename) filename = hash;
2021-10-15 11:16:03 +05:00
const blob = new Blob([data], { type });
return await writeEncryptedFile(blob, key, hash);
2021-09-15 02:16:27 +05:00
}
/**
*
* @param {import("hash-wasm/dist/lib/util").IDataType} data
* @returns
*/
2021-09-15 02:16:27 +05:00
async function hashBuffer(data) {
return {
2021-10-15 11:16:03 +05:00
hash: await xxhash64(data),
type: "xxh64",
2021-09-15 02:16:27 +05:00
};
}
/**
*
* @param {ReadableStreamReader<Uint8Array>} reader
* @returns
*/
async function hashStream(reader) {
2021-10-15 11:16:03 +05:00
const hasher = await createXXHash64();
hasher.init();
while (true) {
const { value } = await reader.read();
if (!value) break;
hasher.update(value);
}
2021-10-15 11:16:03 +05:00
return { type: "xxh64", hash: hasher.digest("hex") };
}
2021-09-15 02:16:27 +05:00
async function readEncrypted(filename, key, cipherData) {
2021-09-20 12:10:08 +05:00
console.log("Reading encrypted file", filename);
2021-10-15 11:16:03 +05:00
const fileHandle = await streamablefs.readFile(filename);
if (!fileHandle) {
console.error(`File not found. Filename: ${filename}`);
return null;
}
2021-09-15 02:16:27 +05:00
2021-10-15 11:16:03 +05:00
const reader = fileHandle.getReader();
const plainText = new Uint8Array(
fileHandle.file.size + fileHandle.file.chunks * ABYTES
);
let offset = 0;
await crypto.decryptStream(
key,
cipherData.iv,
{
read: async () => {
const { value } = await reader.read();
return value;
},
write: async (chunk) => {
if (!chunk) return;
plainText.set(chunk, offset);
offset += chunk.length;
},
},
filename
);
return cipherData.outputType === "base64"
? Buffer.from(plainText).toString("base64")
: plainText;
2021-09-15 02:16:27 +05:00
}
2021-09-20 12:10:08 +05:00
async function uploadFile(filename, requestOptions) {
console.log("Request to upload file", filename, requestOptions);
2021-10-15 11:16:03 +05:00
const fileHandle = await streamablefs.readFile(filename);
if (!fileHandle)
throw new Error(`File stream not found. Filename: ${filename}`);
2021-09-20 12:10:08 +05:00
2021-10-15 11:16:03 +05:00
let {
uploadedChunks = [],
uploadedBytes = 0,
uploaded = false,
uploadId = "",
} = fileHandle.file.additionalData || {};
2021-09-20 12:10:08 +05:00
2021-10-15 11:16:03 +05:00
if (uploaded) return true;
const { headers, cancellationToken } = requestOptions;
const initiateMultiPartUpload = await axios.get(
`${hosts.API_HOST}/s3/multipart?name=${filename}&parts=${fileHandle.file.chunks}&uploadId=${uploadId}`,
{
headers,
cancelToken: cancellationToken,
}
);
if (!isSuccessStatusCode(initiateMultiPartUpload.status))
throw new Error("Could not initiate multi-part upload.");
uploadId = initiateMultiPartUpload.data.uploadId;
const { parts } = initiateMultiPartUpload.data;
await fileHandle.addAdditionalData("uploadId", uploadId);
function onUploadProgress(ev) {
reportProgress(
{
total: fileHandle.file.size,
loaded: uploadedBytes + ev.loaded,
},
{
type: "upload",
hash: filename,
2021-10-15 11:16:03 +05:00
}
);
}
for (let i = uploadedChunks.length; i < parts.length; ++i) {
const url = parts[i];
const chunk = await fileHandle.readChunk(i);
if (!chunk) throw new Error(`Chunk at offset ${i} not found.`);
const response = await axios.request({
url,
method: "PUT",
headers: { "Content-Type": "" },
cancelToken: cancellationToken,
data: new Blob([chunk.buffer]),
onUploadProgress,
});
if (!isSuccessStatusCode(response.status) || !response.headers.etag)
throw new Error(`Failed to upload chunk at offset ${i}.`);
uploadedBytes += chunk.length;
uploadedChunks.push({
PartNumber: i + 1,
ETag: JSON.parse(response.headers.etag),
});
await fileHandle.addAdditionalData("uploadedChunks", uploadedChunks);
await fileHandle.addAdditionalData("uploadedBytes", uploadedBytes);
}
const completeMultiPartUpload = await axios.post(
`${hosts.API_HOST}/s3/multipart`,
{
Key: filename,
UploadId: uploadId,
PartETags: uploadedChunks,
2021-09-20 12:10:08 +05:00
},
2021-10-15 11:16:03 +05:00
{
headers,
cancelToken: cancellationToken,
}
);
if (!isSuccessStatusCode(completeMultiPartUpload.status))
throw new Error("Could not complete multi-part upload.");
2021-09-20 12:10:08 +05:00
2021-10-15 11:16:03 +05:00
await fileHandle.addAdditionalData("uploaded", true);
return true;
}
function reportProgress(ev, { type, hash }) {
AppEventManager.publish(AppEvents.UPDATE_ATTACHMENT_PROGRESS, {
type,
hash,
total: ev.total,
loaded: ev.loaded,
});
2021-09-20 12:10:08 +05:00
}
async function downloadFile(filename, requestOptions) {
const { url, headers, cancellationToken } = requestOptions;
2021-09-20 12:10:08 +05:00
console.log("Request to download file", filename, url, headers);
2021-10-15 11:16:03 +05:00
if (await streamablefs.exists(filename)) return true;
2021-09-20 12:10:08 +05:00
const response = await axios.get(url, {
headers: headers,
2021-10-15 11:16:03 +05:00
responseType: "arraybuffer",
cancelToken: cancellationToken,
2021-10-15 11:16:03 +05:00
onDownloadProgress: (ev) =>
reportProgress(ev, { type: "download", hash: filename }),
2021-09-20 12:10:08 +05:00
});
2021-10-15 11:16:03 +05:00
2021-09-20 12:10:08 +05:00
console.log("File downloaded", filename, url, response);
if (!isSuccessStatusCode(response.status)) return false;
2021-10-15 11:16:03 +05:00
const distributor = new ChunkDistributor(ENCRYPTED_CHUNK_SIZE);
distributor.fill(new Uint8Array(response.data));
distributor.close();
const fileHandle = await streamablefs.createFile(
filename,
response.data.byteLength,
"application/octet-stream"
);
for (let chunk of distributor.chunks) {
fileHandle.write(chunk.data);
}
2021-09-20 12:10:08 +05:00
return true;
}
2021-10-15 11:16:03 +05:00
function exists(filename) {
return streamablefs.exists(filename);
}
2021-10-15 11:16:03 +05:00
async function saveFile(filename, { key, iv, name, size }) {
const fileHandle = await streamablefs.readFile(filename);
if (!fileHandle) return false;
const writerStream = StreamSaver.createWriteStream(name, {
size,
});
2021-10-15 11:16:03 +05:00
const reader = fileHandle.getReader();
const writer = writerStream.getWriter();
await writer.ready;
await crypto.decryptStream(
key,
iv,
{
read: async () => {
const { value } = await reader.read();
return value;
},
write: async (chunk) => {
await writer.ready;
if (!chunk) writer.close();
else await writer.write(chunk);
},
},
filename
);
}
const FS = {
writeEncrypted,
readEncrypted,
uploadFile: cancellable(uploadFile),
downloadFile: cancellable(downloadFile),
2021-10-15 11:16:03 +05:00
saveFile,
exists,
hashBuffer,
hashStream,
writeEncryptedFile,
};
2021-09-15 02:16:27 +05:00
export default FS;
2021-09-20 12:10:08 +05:00
function isSuccessStatusCode(statusCode) {
return statusCode >= 200 && statusCode <= 299;
}
function cancellable(operation) {
return function (filename, requestOptions) {
const source = axios.CancelToken.source();
requestOptions.cancellationToken = source.token;
return {
execute: () => operation(filename, requestOptions),
2021-10-15 11:16:03 +05:00
cancel: (message) => {
source.cancel(message);
},
};
};
}
2021-10-15 11:16:03 +05:00
class ChunkDistributor {
/**
* @typedef {{length: number, data: Uint8Array, final: boolean}} Chunk
*/
constructor(chunkSize) {
this.chunkSize = chunkSize;
this.chunks = [];
this.filledCount = 0;
this.done = false;
}
/**
* @returns {Chunk}
*/
get lastChunk() {
return this.chunks[this.chunks.length - 1];
}
/**
* @returns {boolean}
*/
get isLastChunkFilled() {
return this.lastChunk.length === this.chunkSize;
}
/**
* @returns {Chunk}
*/
get firstChunk() {
const chunk = this.chunks.shift();
if (chunk.data.length === this.chunkSize) this.filledCount--;
return chunk;
}
close() {
this.lastChunk.data = this.lastChunk.data.slice(0, this.lastChunk.length);
this.lastChunk.final = true;
this.done = true;
}
/**
* @param {Uint8Array} data
*/
fill(data) {
if (this.done || !data || !data.length) return;
const dataLength = data.length;
const totalBlocks = Math.ceil(dataLength / this.chunkSize);
for (let i = 0; i < totalBlocks; ++i) {
const start = i * this.chunkSize;
if (this.lastChunk && !this.isLastChunkFilled) {
const needed = this.chunkSize - this.lastChunk.length;
const end = Math.min(start + needed, dataLength);
const chunk = data.slice(start, end);
this.lastChunk.data.set(chunk, this.lastChunk.length);
this.lastChunk.length += chunk.length;
if (this.lastChunk.length === this.chunkSize) this.filledCount++;
if (end !== dataLength) {
this.fill(data.slice(end));
break;
}
} else {
const end = Math.min(start + this.chunkSize, dataLength);
let chunk = data.slice(start, end);
const buffer = new Uint8Array(this.chunkSize);
buffer.set(chunk, 0);
this.chunks.push({ data: buffer, final: false, length: chunk.length });
if (chunk.length === this.chunkSize) this.filledCount++;
}
}
}
}