2021-10-15 11:16:03 +05:00
|
|
|
import "web-streams-polyfill/dist/ponyfill";
|
2021-09-15 02:16:27 +05:00
|
|
|
import localforage from "localforage";
|
2021-10-15 11:16:03 +05:00
|
|
|
import { xxhash64, createXXHash64 } from "hash-wasm";
|
2021-09-20 12:10:08 +05:00
|
|
|
import axios from "axios";
|
2021-09-26 11:46:50 +05:00
|
|
|
import { AppEventManager, AppEvents } from "../common";
|
2021-10-10 18:56:05 +05:00
|
|
|
import { StreamableFS } from "streamablefs";
|
2021-10-21 12:44:34 +05:00
|
|
|
import { getNNCrypto } from "./nncrypto.stub";
|
2021-10-15 11:16:03 +05:00
|
|
|
import hosts from "notes-core/utils/constants";
|
2021-10-16 09:07:54 +05:00
|
|
|
import { sendAttachmentsProgressEvent } from "notes-core/common";
|
2021-10-21 09:51:03 +05:00
|
|
|
import { saveAs } from "file-saver";
|
2021-11-18 15:17:50 +05:00
|
|
|
import { showToast } from "../utils/toast";
|
2021-09-15 02:16:27 +05:00
|
|
|
|
2021-10-15 11:16:03 +05:00
|
|
|
const ABYTES = 17;
|
2021-10-16 14:06:46 +05:00
|
|
|
const CHUNK_SIZE = 512 * 1024;
|
2021-10-15 11:16:03 +05:00
|
|
|
const ENCRYPTED_CHUNK_SIZE = CHUNK_SIZE + ABYTES;
|
2021-10-18 14:03:23 +05:00
|
|
|
const UPLOAD_PART_REQUIRED_CHUNKS = Math.ceil(
|
|
|
|
|
(5 * 1024 * 1024) / ENCRYPTED_CHUNK_SIZE
|
|
|
|
|
);
|
2021-10-10 18:56:05 +05:00
|
|
|
const streamablefs = new StreamableFS("streamable-fs");
|
2021-09-15 02:16:27 +05:00
|
|
|
|
|
|
|
|
/**
|
2021-10-10 18:56:05 +05:00
|
|
|
* @param {File} file
|
2021-10-15 11:16:03 +05:00
|
|
|
* @param {import("nncrypto/dist/src/types").SerializedKey} key
|
|
|
|
|
* @param {string} hash
|
2021-09-15 02:16:27 +05:00
|
|
|
*/
|
2021-10-15 11:16:03 +05:00
|
|
|
async function writeEncryptedFile(file, key, hash) {
|
2021-10-21 12:44:34 +05:00
|
|
|
const crypto = await getNNCrypto();
|
|
|
|
|
|
2021-10-10 18:56:05 +05:00
|
|
|
if (!localforage.supports(localforage.INDEXEDDB))
|
|
|
|
|
throw new Error("This browser does not support IndexedDB.");
|
|
|
|
|
|
2021-10-16 09:07:54 +05:00
|
|
|
if (await streamablefs.exists(hash)) await streamablefs.deleteFile(hash);
|
2021-10-10 18:56:05 +05:00
|
|
|
|
2021-10-16 09:07:54 +05:00
|
|
|
let offset = 0;
|
|
|
|
|
let encrypted = 0;
|
2021-10-10 18:56:05 +05:00
|
|
|
const fileHandle = await streamablefs.createFile(hash, file.size, file.type);
|
2021-10-26 12:16:13 +05:00
|
|
|
sendAttachmentsProgressEvent("encrypt", hash, 1, 0);
|
2021-10-16 09:07:54 +05:00
|
|
|
|
2021-10-10 18:56:05 +05:00
|
|
|
const iv = await crypto.encryptStream(
|
|
|
|
|
key,
|
|
|
|
|
{
|
|
|
|
|
read: async () => {
|
|
|
|
|
let end = Math.min(offset + CHUNK_SIZE, file.size);
|
|
|
|
|
if (offset === end) return;
|
|
|
|
|
const chunk = new Uint8Array(
|
|
|
|
|
await file.slice(offset, end).arrayBuffer()
|
|
|
|
|
);
|
|
|
|
|
offset = end;
|
|
|
|
|
const isFinal = offset === file.size;
|
|
|
|
|
return {
|
|
|
|
|
final: isFinal,
|
|
|
|
|
data: chunk,
|
|
|
|
|
};
|
|
|
|
|
},
|
2021-10-28 21:17:09 +05:00
|
|
|
write: async (chunk) => {
|
|
|
|
|
encrypted += chunk.data.length - ABYTES;
|
2021-10-16 09:07:54 +05:00
|
|
|
reportProgress(
|
|
|
|
|
{ total: file.size, loaded: encrypted },
|
|
|
|
|
{ type: "encrypt", hash }
|
|
|
|
|
);
|
2021-10-28 21:17:09 +05:00
|
|
|
await fileHandle.write(chunk.data);
|
2021-10-16 09:07:54 +05:00
|
|
|
},
|
2021-10-10 18:56:05 +05:00
|
|
|
},
|
|
|
|
|
file.name
|
|
|
|
|
);
|
|
|
|
|
|
2021-10-26 12:16:13 +05:00
|
|
|
sendAttachmentsProgressEvent("encrypt", hash, 1);
|
2021-10-16 09:07:54 +05:00
|
|
|
|
2021-10-10 18:56:05 +05:00
|
|
|
return {
|
2021-10-30 13:59:11 +05:00
|
|
|
chunkSize: CHUNK_SIZE,
|
2021-10-10 18:56:05 +05:00
|
|
|
iv: iv,
|
|
|
|
|
length: file.size,
|
|
|
|
|
salt: key.salt,
|
|
|
|
|
alg: "xcha-stream",
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* We perform 4 steps here:
|
|
|
|
|
* 1. We convert base64 to Uint8Array (if we get base64, that is)
|
|
|
|
|
* 2. We hash the Uint8Array.
|
|
|
|
|
* 3. We encrypt the Uint8Array
|
|
|
|
|
* 4. We save the encrypted Uint8Array
|
|
|
|
|
*/
|
2021-10-26 12:16:13 +05:00
|
|
|
async function writeEncrypted(filename, { data, type, key }) {
|
2021-10-04 14:04:29 +05:00
|
|
|
if (type === "base64") data = new Uint8Array(Buffer.from(data, "base64"));
|
2021-09-15 02:16:27 +05:00
|
|
|
|
2021-10-26 12:16:13 +05:00
|
|
|
const { hash, type: hashType } = await hashBuffer(data);
|
2021-10-10 18:56:05 +05:00
|
|
|
if (!filename) filename = hash;
|
|
|
|
|
|
2021-10-26 12:16:13 +05:00
|
|
|
const file = new File([data], filename, { type });
|
|
|
|
|
const result = await writeEncryptedFile(file, key, hash);
|
|
|
|
|
return {
|
|
|
|
|
...result,
|
|
|
|
|
hash,
|
|
|
|
|
hashType,
|
|
|
|
|
};
|
2021-09-15 02:16:27 +05:00
|
|
|
}
|
|
|
|
|
|
2021-10-04 14:04:29 +05:00
|
|
|
/**
|
|
|
|
|
*
|
|
|
|
|
* @param {import("hash-wasm/dist/lib/util").IDataType} data
|
|
|
|
|
* @returns
|
|
|
|
|
*/
|
2021-09-15 02:16:27 +05:00
|
|
|
async function hashBuffer(data) {
|
|
|
|
|
return {
|
2021-10-15 11:16:03 +05:00
|
|
|
hash: await xxhash64(data),
|
|
|
|
|
type: "xxh64",
|
2021-09-15 02:16:27 +05:00
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
|
2021-10-10 18:56:05 +05:00
|
|
|
/**
|
|
|
|
|
*
|
|
|
|
|
* @param {ReadableStreamReader<Uint8Array>} reader
|
|
|
|
|
* @returns
|
|
|
|
|
*/
|
|
|
|
|
async function hashStream(reader) {
|
2021-10-15 11:16:03 +05:00
|
|
|
const hasher = await createXXHash64();
|
2021-10-10 18:56:05 +05:00
|
|
|
hasher.init();
|
|
|
|
|
|
|
|
|
|
while (true) {
|
|
|
|
|
const { value } = await reader.read();
|
|
|
|
|
if (!value) break;
|
|
|
|
|
hasher.update(value);
|
|
|
|
|
}
|
|
|
|
|
|
2021-10-15 11:16:03 +05:00
|
|
|
return { type: "xxh64", hash: hasher.digest("hex") };
|
2021-10-10 18:56:05 +05:00
|
|
|
}
|
|
|
|
|
|
2021-09-15 02:16:27 +05:00
|
|
|
async function readEncrypted(filename, key, cipherData) {
|
2021-10-15 11:16:03 +05:00
|
|
|
const fileHandle = await streamablefs.readFile(filename);
|
|
|
|
|
if (!fileHandle) {
|
2021-09-26 11:46:50 +05:00
|
|
|
console.error(`File not found. Filename: ${filename}`);
|
2021-10-10 18:56:05 +05:00
|
|
|
return null;
|
2021-09-26 11:46:50 +05:00
|
|
|
}
|
2021-09-15 02:16:27 +05:00
|
|
|
|
2021-10-15 11:16:03 +05:00
|
|
|
const reader = fileHandle.getReader();
|
2021-10-28 21:17:09 +05:00
|
|
|
const plainText = new Uint8Array(fileHandle.file.size);
|
2021-10-15 11:16:03 +05:00
|
|
|
let offset = 0;
|
2021-10-21 12:44:34 +05:00
|
|
|
|
|
|
|
|
const crypto = await getNNCrypto();
|
2021-10-15 11:16:03 +05:00
|
|
|
await crypto.decryptStream(
|
|
|
|
|
key,
|
|
|
|
|
cipherData.iv,
|
|
|
|
|
{
|
|
|
|
|
read: async () => {
|
|
|
|
|
const { value } = await reader.read();
|
|
|
|
|
return value;
|
|
|
|
|
},
|
|
|
|
|
write: async (chunk) => {
|
2021-10-28 21:38:51 +05:00
|
|
|
plainText.set(chunk.data, offset);
|
2021-10-28 21:17:09 +05:00
|
|
|
offset += chunk.data.length;
|
2021-10-15 11:16:03 +05:00
|
|
|
},
|
|
|
|
|
},
|
|
|
|
|
filename
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
return cipherData.outputType === "base64"
|
|
|
|
|
? Buffer.from(plainText).toString("base64")
|
|
|
|
|
: plainText;
|
2021-09-15 02:16:27 +05:00
|
|
|
}
|
|
|
|
|
|
2021-09-20 12:10:08 +05:00
|
|
|
async function uploadFile(filename, requestOptions) {
|
2021-10-15 11:16:03 +05:00
|
|
|
const fileHandle = await streamablefs.readFile(filename);
|
|
|
|
|
if (!fileHandle)
|
|
|
|
|
throw new Error(`File stream not found. Filename: ${filename}`);
|
2021-10-18 14:03:23 +05:00
|
|
|
const TOTAL_PARTS = Math.ceil(
|
|
|
|
|
fileHandle.file.chunks / UPLOAD_PART_REQUIRED_CHUNKS
|
|
|
|
|
);
|
2021-09-20 12:10:08 +05:00
|
|
|
|
2021-10-15 11:16:03 +05:00
|
|
|
let {
|
|
|
|
|
uploadedChunks = [],
|
|
|
|
|
uploadedBytes = 0,
|
|
|
|
|
uploaded = false,
|
|
|
|
|
uploadId = "",
|
|
|
|
|
} = fileHandle.file.additionalData || {};
|
2021-09-20 12:10:08 +05:00
|
|
|
|
2021-10-15 11:16:03 +05:00
|
|
|
if (uploaded) return true;
|
|
|
|
|
|
|
|
|
|
const { headers, cancellationToken } = requestOptions;
|
|
|
|
|
|
|
|
|
|
const initiateMultiPartUpload = await axios.get(
|
2021-10-18 14:03:23 +05:00
|
|
|
`${hosts.API_HOST}/s3/multipart?name=${filename}&parts=${TOTAL_PARTS}&uploadId=${uploadId}`,
|
2021-10-15 11:16:03 +05:00
|
|
|
{
|
|
|
|
|
headers,
|
|
|
|
|
cancelToken: cancellationToken,
|
|
|
|
|
}
|
|
|
|
|
);
|
|
|
|
|
if (!isSuccessStatusCode(initiateMultiPartUpload.status))
|
|
|
|
|
throw new Error("Could not initiate multi-part upload.");
|
|
|
|
|
|
|
|
|
|
uploadId = initiateMultiPartUpload.data.uploadId;
|
|
|
|
|
const { parts } = initiateMultiPartUpload.data;
|
|
|
|
|
|
|
|
|
|
await fileHandle.addAdditionalData("uploadId", uploadId);
|
|
|
|
|
|
|
|
|
|
function onUploadProgress(ev) {
|
|
|
|
|
reportProgress(
|
|
|
|
|
{
|
2021-10-18 14:03:23 +05:00
|
|
|
total: fileHandle.file.size + ABYTES,
|
2021-10-15 11:16:03 +05:00
|
|
|
loaded: uploadedBytes + ev.loaded,
|
|
|
|
|
},
|
|
|
|
|
{
|
2021-09-26 11:46:50 +05:00
|
|
|
type: "upload",
|
|
|
|
|
hash: filename,
|
2021-10-15 11:16:03 +05:00
|
|
|
}
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
|
2021-10-18 14:03:23 +05:00
|
|
|
for (let i = uploadedChunks.length; i < TOTAL_PARTS; ++i) {
|
2021-10-29 11:16:42 +05:00
|
|
|
const blob = await fileHandle.readChunks(
|
|
|
|
|
i * UPLOAD_PART_REQUIRED_CHUNKS,
|
|
|
|
|
UPLOAD_PART_REQUIRED_CHUNKS
|
|
|
|
|
);
|
2021-10-15 11:16:03 +05:00
|
|
|
const url = parts[i];
|
2021-11-03 20:16:31 +05:00
|
|
|
const data = await blob.arrayBuffer();
|
2021-10-15 11:16:03 +05:00
|
|
|
const response = await axios.request({
|
|
|
|
|
url,
|
|
|
|
|
method: "PUT",
|
|
|
|
|
headers: { "Content-Type": "" },
|
|
|
|
|
cancelToken: cancellationToken,
|
2021-11-03 20:16:31 +05:00
|
|
|
data,
|
2021-10-15 11:16:03 +05:00
|
|
|
onUploadProgress,
|
|
|
|
|
});
|
|
|
|
|
if (!isSuccessStatusCode(response.status) || !response.headers.etag)
|
2021-10-18 14:03:23 +05:00
|
|
|
throw new Error(`Failed to upload part at offset ${i}.`);
|
2021-10-15 11:16:03 +05:00
|
|
|
|
2021-10-18 14:03:23 +05:00
|
|
|
uploadedBytes += blob.size;
|
2021-10-15 11:16:03 +05:00
|
|
|
uploadedChunks.push({
|
|
|
|
|
PartNumber: i + 1,
|
|
|
|
|
ETag: JSON.parse(response.headers.etag),
|
|
|
|
|
});
|
|
|
|
|
await fileHandle.addAdditionalData("uploadedChunks", uploadedChunks);
|
|
|
|
|
await fileHandle.addAdditionalData("uploadedBytes", uploadedBytes);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const completeMultiPartUpload = await axios.post(
|
|
|
|
|
`${hosts.API_HOST}/s3/multipart`,
|
|
|
|
|
{
|
|
|
|
|
Key: filename,
|
|
|
|
|
UploadId: uploadId,
|
|
|
|
|
PartETags: uploadedChunks,
|
2021-09-20 12:10:08 +05:00
|
|
|
},
|
2021-10-15 11:16:03 +05:00
|
|
|
{
|
|
|
|
|
headers,
|
|
|
|
|
cancelToken: cancellationToken,
|
|
|
|
|
}
|
|
|
|
|
);
|
|
|
|
|
if (!isSuccessStatusCode(completeMultiPartUpload.status))
|
|
|
|
|
throw new Error("Could not complete multi-part upload.");
|
2021-09-20 12:10:08 +05:00
|
|
|
|
2021-10-15 11:16:03 +05:00
|
|
|
await fileHandle.addAdditionalData("uploaded", true);
|
2021-10-28 13:39:16 +05:00
|
|
|
// Keep the images cached; delete everything else.
|
|
|
|
|
if (!fileHandle.file.type?.startsWith("image/")) {
|
|
|
|
|
await streamablefs.deleteFile(filename);
|
|
|
|
|
}
|
2021-10-15 11:16:03 +05:00
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
function reportProgress(ev, { type, hash }) {
|
|
|
|
|
AppEventManager.publish(AppEvents.UPDATE_ATTACHMENT_PROGRESS, {
|
|
|
|
|
type,
|
|
|
|
|
hash,
|
2021-10-20 11:46:52 +05:00
|
|
|
total: ev?.total || 1,
|
|
|
|
|
loaded: ev?.loaded || 1,
|
2021-10-15 11:16:03 +05:00
|
|
|
});
|
2021-09-20 12:10:08 +05:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async function downloadFile(filename, requestOptions) {
|
2021-11-19 12:35:38 +05:00
|
|
|
const { url, headers, chunkSize, cancellationToken } = requestOptions;
|
2021-10-15 11:16:03 +05:00
|
|
|
if (await streamablefs.exists(filename)) return true;
|
2021-09-20 12:10:08 +05:00
|
|
|
|
2021-10-20 11:46:52 +05:00
|
|
|
try {
|
2021-11-20 15:08:40 +05:00
|
|
|
reportProgress(
|
|
|
|
|
{ total: 100, loaded: 0 },
|
|
|
|
|
{ type: "download", hash: filename }
|
|
|
|
|
);
|
|
|
|
|
|
2021-10-28 12:54:06 +05:00
|
|
|
const signedUrlResponse = await axios.get(url, {
|
|
|
|
|
headers,
|
|
|
|
|
responseType: "text",
|
|
|
|
|
});
|
|
|
|
|
if (!isSuccessStatusCode(signedUrlResponse.status)) return false;
|
|
|
|
|
|
|
|
|
|
const signedUrl = signedUrlResponse.data;
|
|
|
|
|
const response = await axios.get(signedUrl, {
|
2021-10-20 11:46:52 +05:00
|
|
|
responseType: "arraybuffer",
|
|
|
|
|
cancelToken: cancellationToken,
|
|
|
|
|
onDownloadProgress: (ev) =>
|
|
|
|
|
reportProgress(ev, { type: "download", hash: filename }),
|
|
|
|
|
});
|
2021-11-18 15:17:50 +05:00
|
|
|
|
2021-11-13 12:06:03 +05:00
|
|
|
const contentLength =
|
|
|
|
|
response.headers["content-length"] || response.headers["Content-Length"];
|
|
|
|
|
if (!isSuccessStatusCode(response.status) || contentLength === "0") {
|
|
|
|
|
console.error("Abort: file length is 0.", filename);
|
|
|
|
|
return false;
|
|
|
|
|
}
|
2021-10-15 11:16:03 +05:00
|
|
|
|
2021-10-30 13:59:11 +05:00
|
|
|
const distributor = new ChunkDistributor(chunkSize + ABYTES);
|
2021-10-20 11:46:52 +05:00
|
|
|
distributor.fill(new Uint8Array(response.data));
|
|
|
|
|
distributor.close();
|
2021-10-15 11:16:03 +05:00
|
|
|
|
2021-10-20 11:46:52 +05:00
|
|
|
const fileHandle = await streamablefs.createFile(
|
|
|
|
|
filename,
|
|
|
|
|
response.data.byteLength,
|
|
|
|
|
"application/octet-stream"
|
|
|
|
|
);
|
2021-10-15 11:16:03 +05:00
|
|
|
|
2021-10-20 11:46:52 +05:00
|
|
|
for (let chunk of distributor.chunks) {
|
|
|
|
|
await fileHandle.write(chunk.data);
|
|
|
|
|
}
|
2021-10-15 11:16:03 +05:00
|
|
|
|
2021-10-20 11:46:52 +05:00
|
|
|
return true;
|
|
|
|
|
} catch (e) {
|
2021-11-18 15:17:50 +05:00
|
|
|
showToast("error", `Could not download file: ${e.message}`);
|
2021-10-20 11:46:52 +05:00
|
|
|
console.error(e);
|
|
|
|
|
reportProgress(undefined, { type: "download", hash: filename });
|
|
|
|
|
return false;
|
|
|
|
|
}
|
2021-09-20 12:10:08 +05:00
|
|
|
}
|
|
|
|
|
|
2021-10-15 11:16:03 +05:00
|
|
|
function exists(filename) {
|
|
|
|
|
return streamablefs.exists(filename);
|
|
|
|
|
}
|
2021-09-21 12:28:11 +05:00
|
|
|
|
2021-10-21 09:51:03 +05:00
|
|
|
async function saveFile(filename, { key, iv, name, type }) {
|
2021-10-15 11:16:03 +05:00
|
|
|
const fileHandle = await streamablefs.readFile(filename);
|
|
|
|
|
if (!fileHandle) return false;
|
|
|
|
|
|
2021-10-21 09:51:03 +05:00
|
|
|
const blobParts = [];
|
2021-10-15 11:16:03 +05:00
|
|
|
const reader = fileHandle.getReader();
|
|
|
|
|
|
2021-10-21 12:44:34 +05:00
|
|
|
const crypto = await getNNCrypto();
|
2021-10-15 11:16:03 +05:00
|
|
|
await crypto.decryptStream(
|
|
|
|
|
key,
|
|
|
|
|
iv,
|
|
|
|
|
{
|
|
|
|
|
read: async () => {
|
|
|
|
|
const { value } = await reader.read();
|
|
|
|
|
return value;
|
|
|
|
|
},
|
|
|
|
|
write: async (chunk) => {
|
2021-10-28 21:17:09 +05:00
|
|
|
blobParts.push(chunk.data);
|
2021-10-15 11:16:03 +05:00
|
|
|
},
|
|
|
|
|
},
|
|
|
|
|
filename
|
|
|
|
|
);
|
2021-10-21 09:51:03 +05:00
|
|
|
saveAs(new Blob(blobParts, { type }), name);
|
2021-10-20 11:46:52 +05:00
|
|
|
await streamablefs.deleteFile(filename);
|
2021-09-26 11:46:50 +05:00
|
|
|
}
|
|
|
|
|
|
2021-10-26 09:31:21 +05:00
|
|
|
async function deleteFile(filename, requestOptions) {
|
|
|
|
|
if (!requestOptions) return await streamablefs.deleteFile(filename);
|
|
|
|
|
|
|
|
|
|
const { url, headers } = requestOptions;
|
|
|
|
|
if (!(await streamablefs.exists(filename))) return true;
|
|
|
|
|
|
|
|
|
|
const response = await axios.delete(url, {
|
|
|
|
|
headers: headers,
|
|
|
|
|
});
|
|
|
|
|
const result = isSuccessStatusCode(response.status);
|
|
|
|
|
if (result) await streamablefs.deleteFile(filename);
|
|
|
|
|
return result;
|
|
|
|
|
}
|
|
|
|
|
|
2021-11-02 14:35:24 +05:00
|
|
|
function clearFileStorage() {
|
|
|
|
|
return streamablefs.clear();
|
|
|
|
|
}
|
|
|
|
|
|
2021-09-21 12:28:11 +05:00
|
|
|
const FS = {
|
|
|
|
|
writeEncrypted,
|
|
|
|
|
readEncrypted,
|
2021-09-29 09:54:36 +05:00
|
|
|
uploadFile: cancellable(uploadFile),
|
|
|
|
|
downloadFile: cancellable(downloadFile),
|
2021-10-26 09:31:21 +05:00
|
|
|
deleteFile,
|
2021-10-15 11:16:03 +05:00
|
|
|
saveFile,
|
2021-09-26 11:46:50 +05:00
|
|
|
exists,
|
2021-10-04 14:04:29 +05:00
|
|
|
hashBuffer,
|
2021-10-10 18:56:05 +05:00
|
|
|
hashStream,
|
|
|
|
|
writeEncryptedFile,
|
2021-11-02 14:35:24 +05:00
|
|
|
clearFileStorage,
|
2021-09-21 12:28:11 +05:00
|
|
|
};
|
2021-09-15 02:16:27 +05:00
|
|
|
export default FS;
|
2021-09-20 12:10:08 +05:00
|
|
|
|
|
|
|
|
function isSuccessStatusCode(statusCode) {
|
|
|
|
|
return statusCode >= 200 && statusCode <= 299;
|
|
|
|
|
}
|
2021-09-29 09:54:36 +05:00
|
|
|
|
|
|
|
|
function cancellable(operation) {
|
|
|
|
|
return function (filename, requestOptions) {
|
2021-10-04 14:04:29 +05:00
|
|
|
const source = axios.CancelToken.source();
|
2021-09-29 09:54:36 +05:00
|
|
|
requestOptions.cancellationToken = source.token;
|
|
|
|
|
return {
|
|
|
|
|
execute: () => operation(filename, requestOptions),
|
2021-10-15 11:16:03 +05:00
|
|
|
cancel: (message) => {
|
|
|
|
|
source.cancel(message);
|
|
|
|
|
},
|
2021-09-29 09:54:36 +05:00
|
|
|
};
|
|
|
|
|
};
|
|
|
|
|
}
|
2021-10-15 11:16:03 +05:00
|
|
|
|
|
|
|
|
class ChunkDistributor {
|
|
|
|
|
/**
|
|
|
|
|
* @typedef {{length: number, data: Uint8Array, final: boolean}} Chunk
|
|
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
constructor(chunkSize) {
|
|
|
|
|
this.chunkSize = chunkSize;
|
|
|
|
|
this.chunks = [];
|
|
|
|
|
this.filledCount = 0;
|
|
|
|
|
this.done = false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* @returns {Chunk}
|
|
|
|
|
*/
|
|
|
|
|
get lastChunk() {
|
|
|
|
|
return this.chunks[this.chunks.length - 1];
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* @returns {boolean}
|
|
|
|
|
*/
|
|
|
|
|
get isLastChunkFilled() {
|
|
|
|
|
return this.lastChunk.length === this.chunkSize;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* @returns {Chunk}
|
|
|
|
|
*/
|
|
|
|
|
get firstChunk() {
|
|
|
|
|
const chunk = this.chunks.shift();
|
|
|
|
|
if (chunk.data.length === this.chunkSize) this.filledCount--;
|
|
|
|
|
return chunk;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
close() {
|
2021-10-30 13:59:11 +05:00
|
|
|
if (!this.lastChunk)
|
|
|
|
|
throw new Error("No data available in this distributor.");
|
2021-10-15 11:16:03 +05:00
|
|
|
this.lastChunk.data = this.lastChunk.data.slice(0, this.lastChunk.length);
|
|
|
|
|
this.lastChunk.final = true;
|
|
|
|
|
this.done = true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* @param {Uint8Array} data
|
|
|
|
|
*/
|
|
|
|
|
fill(data) {
|
|
|
|
|
if (this.done || !data || !data.length) return;
|
|
|
|
|
|
|
|
|
|
const dataLength = data.length;
|
|
|
|
|
const totalBlocks = Math.ceil(dataLength / this.chunkSize);
|
|
|
|
|
|
|
|
|
|
for (let i = 0; i < totalBlocks; ++i) {
|
|
|
|
|
const start = i * this.chunkSize;
|
|
|
|
|
|
|
|
|
|
if (this.lastChunk && !this.isLastChunkFilled) {
|
|
|
|
|
const needed = this.chunkSize - this.lastChunk.length;
|
|
|
|
|
const end = Math.min(start + needed, dataLength);
|
|
|
|
|
const chunk = data.slice(start, end);
|
|
|
|
|
|
|
|
|
|
this.lastChunk.data.set(chunk, this.lastChunk.length);
|
|
|
|
|
this.lastChunk.length += chunk.length;
|
|
|
|
|
|
|
|
|
|
if (this.lastChunk.length === this.chunkSize) this.filledCount++;
|
|
|
|
|
|
|
|
|
|
if (end !== dataLength) {
|
|
|
|
|
this.fill(data.slice(end));
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
const end = Math.min(start + this.chunkSize, dataLength);
|
|
|
|
|
let chunk = data.slice(start, end);
|
|
|
|
|
|
|
|
|
|
const buffer = new Uint8Array(this.chunkSize);
|
|
|
|
|
buffer.set(chunk, 0);
|
|
|
|
|
|
|
|
|
|
this.chunks.push({ data: buffer, final: false, length: chunk.length });
|
|
|
|
|
if (chunk.length === this.chunkSize) this.filledCount++;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|