fix: use file-saver instead of stream-saver for downloads

This commit is contained in:
thecodrr
2021-10-21 09:51:03 +05:00
parent 5e7b311d0b
commit d8339b09b7
6 changed files with 9 additions and 348 deletions

View File

@@ -23,6 +23,7 @@
"emotion-theming": "^10.0.19",
"eventsource": "^1.0.7",
"fast-sort": "^2.1.1",
"file-saver": "^2.0.5",
"framer-motion": "^3.3.0",
"hash-wasm": "^4.9.0",
"hotkeys-js": "^3.8.3",
@@ -49,7 +50,6 @@
"react-virtuoso": "^1.9.3",
"rebass": "^4.0.7",
"streamablefs": "file:packages/streamablefs",
"streamsaver": "^2.0.5",
"timeago-react": "^3.0.2",
"tinymce": "5.8.1",
"uzip": "^0.20201231.0",

View File

@@ -1,189 +0,0 @@
<!--
mitm.html is the lite "man in the middle"
This is only meant to signal the opener's messageChannel to
the service worker - when that is done this mitm can be closed
but it's better to keep it alive since this also stops the sw
from restarting
The service worker is capable of intercepting all request and fork their
own "fake" response - wish we are going to craft
when the worker then receives a stream then the worker will tell the opener
to open up a link that will start the download
-->
<script>
// This will prevent the sw from restarting
let keepAlive = () => {
keepAlive = () => {};
var ping =
location.href.substr(0, location.href.lastIndexOf("/")) + "/ping";
var interval = setInterval(() => {
if (sw) {
sw.postMessage("ping");
} else {
console.log("SENDING FETCH");
fetch(ping).then((res) => res.text(!res.ok && clearInterval(interval)));
}
}, 10000);
};
// message event is the first thing we need to setup a listner for
// don't want the opener to do a random timeout - instead they can listen for
// the ready event
// but since we need to wait for the Service Worker registration, we store the
// message for later
let messages = [];
window.onmessage = (evt) => messages.push(evt);
let sw = null;
let scope = "";
function registerWorker() {
return navigator.serviceWorker
.getRegistration("./")
.then((swReg) => {
return (
swReg ||
navigator.serviceWorker.register("downloader.sw.js", { scope: "./" })
);
})
.then((swReg) => {
const swRegTmp = swReg.installing || swReg.waiting;
scope = swReg.scope;
return (
(sw = swReg.active) ||
new Promise((resolve) => {
swRegTmp.addEventListener(
"statechange",
(fn = () => {
if (swRegTmp.state === "activated") {
swRegTmp.removeEventListener("statechange", fn);
sw = swReg.active;
resolve();
}
})
);
})
);
});
}
// Now that we have the Service Worker registered we can process messages
function onMessage(event) {
let { data, ports, origin } = event;
// It's important to have a messageChannel, don't want to interfere
// with other simultaneous downloads
if (!ports || !ports.length) {
throw new TypeError("[StreamSaver] You didn't send a messageChannel");
}
if (typeof data !== "object") {
throw new TypeError("[StreamSaver] You didn't send a object");
}
// the default public service worker for StreamSaver is shared among others.
// so all download links needs to be prefixed to avoid any other conflict
data.origin = origin;
// if we ever (in some feature versoin of streamsaver) would like to
// redirect back to the page of who initiated a http request
data.referrer = data.referrer || document.referrer || origin;
// pass along version for possible backwards compatibility in sw.js
data.streamSaverVersion = new URLSearchParams(location.search).get(
"version"
);
if (data.streamSaverVersion === "1.2.0") {
console.warn("[StreamSaver] please update streamsaver");
}
/** @since v2.0.0 */
if (!data.headers) {
console.warn(
"[StreamSaver] pass `data.headers` that you would like to pass along to the service worker\nit should be a 2D array or a key/val object that fetch's Headers api accepts"
);
} else {
// test if it's correct
// should thorw a typeError if not
new Headers(data.headers);
}
/** @since v2.0.0 */
if (typeof data.filename === "string") {
console.warn(
"[StreamSaver] You shouldn't send `data.filename` anymore. It should be included in the Content-Disposition header option"
);
// Do what File constructor do with fileNames
data.filename = data.filename.replace(/\//g, ":");
}
/** @since v2.0.0 */
if (data.size) {
console.warn(
"[StreamSaver] You shouldn't send `data.size` anymore. It should be included in the content-length header option"
);
}
/** @since v2.0.0 */
if (data.readableStream) {
console.warn(
"[StreamSaver] You should send the readableStream in the messageChannel, not throught mitm"
);
}
/** @since v2.0.0 */
if (!data.pathname) {
console.warn(
"[StreamSaver] Please send `data.pathname` (eg: /pictures/summer.jpg)"
);
data.pathname = Math.random().toString().slice(-6) + "/" + data.filename;
}
// remove all leading slashes
data.pathname = data.pathname.replace(/^\/+/g, "");
// remove protocol
let org = origin.replace(/(^\w+:|^)\/\//, "");
// set the absolute pathname to the download url.
data.url = new URL(`${scope + org}/${data.pathname}`).toString();
if (!data.url.startsWith(`${scope + org}/`)) {
throw new TypeError("[StreamSaver] bad `data.pathname`");
}
// This sends the message data as well as transferring
// messageChannel.port2 to the service worker. The service worker can
// then use the transferred port to reply via postMessage(), which
// will in turn trigger the onmessage handler on messageChannel.port1.
const transferable = data.readableStream
? [ports[0], data.readableStream]
: [ports[0]];
if (!(data.readableStream || data.transferringReadable)) {
keepAlive();
}
return sw.postMessage(data, transferable);
}
if (window.opener) {
// The opener can't listen to onload event, so we need to help em out!
// (telling them that we are ready to accept postMessage's)
window.opener.postMessage("StreamSaver::loadedPopup", "*");
}
if (navigator.serviceWorker) {
registerWorker().then(() => {
window.onmessage = onMessage;
messages.forEach(window.onmessage);
});
} else {
// FF can ping sw with fetch from a secure hidden iframe
// shouldn't really be possible?
keepAlive();
}
</script>

View File

@@ -1,144 +0,0 @@
/* eslint-disable no-restricted-globals */
self.addEventListener("install", () => {
self.skipWaiting();
});
self.addEventListener("activate", (event) => {
event.waitUntil(self.clients.claim());
});
const map = new Map();
// This should be called once per download
// Each event has a dataChannel that the data will be piped through
self.onmessage = (event) => {
console.log("EVENT", event);
// We send a heartbeat every x second to keep the
// service worker alive if a transferable stream is not sent
if (event.data === "ping") {
return;
}
const data = event.data;
const downloadUrl =
data.url ||
self.registration.scope +
Math.random() +
"/" +
(typeof data === "string" ? data : data.filename);
const port = event.ports[0];
const metadata = new Array(3); // [stream, data, port]
metadata[1] = data;
metadata[2] = port;
// Note to self:
// old streamsaver v1.2.0 might still use `readableStream`...
// but v2.0.0 will always transfer the stream through MessageChannel #94
if (event.data.readableStream) {
metadata[0] = event.data.readableStream;
} else if (event.data.transferringReadable) {
port.onmessage = (evt) => {
port.onmessage = null;
metadata[0] = evt.data.readableStream;
};
} else {
metadata[0] = createStream(port);
}
map.set(downloadUrl, metadata);
port.postMessage({ download: downloadUrl });
};
function createStream(port) {
// ReadableStream is only supported by chrome 52
return new ReadableStream({
start(controller) {
// When we receive data on the messageChannel, we write
port.onmessage = ({ data }) => {
if (data === "end") {
return controller.close();
}
if (data === "abort") {
console.log("user aborted");
controller.error("Aborted the download");
return;
}
controller.enqueue(data);
};
},
cancel() {
console.log("user aborted");
},
});
}
self.onfetch = (event) => {
const url = event.request.url;
// this only works for Firefox
if (url.endsWith("/ping")) {
return event.respondWith(new Response("pong"));
}
const hijacke = map.get(url);
if (!hijacke) return null;
const [stream, data, port] = hijacke;
console.log("Eevnent", stream);
map.delete(url);
// Not comfortable letting any user control all headers
// so we only copy over the length & disposition
const responseHeaders = new Headers({
"Content-Type": "application/octet-stream; charset=utf-8",
// To be on the safe side, The link can be opened in a iframe.
// but octet-stream should stop it.
"Content-Security-Policy": "default-src 'none'",
"X-Content-Security-Policy": "default-src 'none'",
"X-WebKit-CSP": "default-src 'none'",
"X-XSS-Protection": "1; mode=block",
});
let headers = new Headers(data.headers || {});
if (headers.has("Content-Length")) {
responseHeaders.set("Content-Length", headers.get("Content-Length"));
}
if (headers.has("Content-Disposition")) {
responseHeaders.set(
"Content-Disposition",
headers.get("Content-Disposition")
);
}
// data, data.filename and size should not be used anymore
if (data.size) {
console.warn("Depricated");
responseHeaders.set("Content-Length", data.size);
}
let fileName = typeof data === "string" ? data : data.filename;
if (fileName) {
console.warn("Depricated");
// Make filename RFC5987 compatible
fileName = encodeURIComponent(fileName)
.replace(/['()]/g, escape)
.replace(/\*/g, "%2A");
responseHeaders.set(
"Content-Disposition",
"attachment; filename*=UTF-8''" + fileName
);
}
event.respondWith(new Response(stream, { headers: responseHeaders }));
port.postMessage({ debug: "Download started" });
};

View File

@@ -246,7 +246,7 @@ function TinyMCE(props) {
key: await db.user.getEncryptionKey(),
iv: attachment.iv,
name: attachment.metadata.filename,
size: attachment.length,
type: attachment.metadata.type,
});
},
}}

View File

@@ -301,7 +301,7 @@ function Properties({ noteId }) {
key: await db.user.getEncryptionKey(),
iv: attachment.iv,
name: attachment.metadata.filename,
size: attachment.length,
type: attachment.metadata.type,
});
}
}}

View File

@@ -8,9 +8,8 @@ import "worker-loader?filename=static/workers/nncrypto.worker.js!nncryptoworker/
import { StreamableFS } from "streamablefs";
import NNCrypto from "./nncrypto.stub";
import hosts from "notes-core/utils/constants";
import StreamSaver from "streamsaver";
import { sendAttachmentsProgressEvent } from "notes-core/common";
StreamSaver.mitm = "/downloader.html";
import { saveAs } from "file-saver";
const ABYTES = 17;
const CHUNK_SIZE = 512 * 1024;
@@ -301,17 +300,12 @@ function exists(filename) {
return streamablefs.exists(filename);
}
async function saveFile(filename, { key, iv, name, size }) {
async function saveFile(filename, { key, iv, name, type }) {
const fileHandle = await streamablefs.readFile(filename);
if (!fileHandle) return false;
const writerStream = StreamSaver.createWriteStream(name, {
size,
});
const blobParts = [];
const reader = fileHandle.getReader();
const writer = writerStream.getWriter();
await writer.ready;
await crypto.decryptStream(
key,
@@ -322,13 +316,13 @@ async function saveFile(filename, { key, iv, name, size }) {
return value;
},
write: async (chunk) => {
await writer.ready;
if (!chunk) writer.close();
else await writer.write(chunk);
if (!chunk) return;
else blobParts.push(chunk);
},
},
filename
);
saveAs(new Blob(blobParts, { type }), name);
await streamablefs.deleteFile(filename);
}