feat: add verification checks to backup & restore

This commit is contained in:
thecodrr
2020-10-03 11:59:20 +05:00
parent 4773114bd3
commit b3b8d6715d
4 changed files with 97 additions and 19 deletions

View File

@@ -17,24 +17,29 @@ beforeEach(async () => {
test("export backup", () => test("export backup", () =>
noteTest().then(() => noteTest().then(() =>
notebookTest().then(async ({ db }) => { notebookTest().then(async ({ db }) => {
const exp = await db.backup.export(); const exp = await db.backup.export("node");
expect(JSON.parse(exp).t).toBeGreaterThan(0); let backup = JSON.parse(exp);
expect(backup.type).toBe("node");
expect(backup.date).toBeGreaterThan(0);
}) })
)); ));
test("export encrypted backup", () => test("export encrypted backup", () =>
noteTest().then(() => noteTest().then(() =>
notebookTest().then(async ({ db }) => { notebookTest().then(async ({ db }) => {
const exp = await db.backup.export(true); const exp = await db.backup.export("node", true);
expect(JSON.parse(exp).iv).toBe("some iv"); let backup = JSON.parse(exp);
expect(backup.type).toBe("node");
expect(backup.date).toBeGreaterThan(0);
expect(backup.data.iv).toBe("some iv");
}) })
)); ));
test("import backup", () => test("import backup", () =>
noteTest().then(() => noteTest().then(() =>
notebookTest().then(async ({ db, id }) => { notebookTest().then(async ({ db, id }) => {
const exp = await db.backup.export(); const exp = await db.backup.export("node");
await db.context.clear(); StorageInterface.clear();
await db.backup.import(exp); await db.backup.import(exp);
expect(db.notebooks.notebook(id).data.id).toBe(id); expect(db.notebooks.notebook(id).data.id).toBe(id);
}) })
@@ -43,9 +48,22 @@ test("import backup", () =>
test("import encrypted backup", () => test("import encrypted backup", () =>
noteTest().then(() => noteTest().then(() =>
notebookTest().then(async ({ db, id }) => { notebookTest().then(async ({ db, id }) => {
const exp = await db.backup.export(true); const exp = await db.backup.export("node", true);
await db.context.clear(); StorageInterface.clear();
await db.backup.import(exp); await db.backup.import(exp);
expect(db.notebooks.notebook(id).data.id).toBe(id); expect(db.notebooks.notebook(id).data.id).toBe(id);
}) })
)); ));
test("import tempered backup", () =>
noteTest().then(() =>
notebookTest().then(async ({ db, id }) => {
const exp = await db.backup.export("node");
StorageInterface.clear();
const backup = JSON.parse(exp);
backup.data.hello = "world";
await expect(
db.backup.import(JSON.stringify(backup))
).rejects.toThrowError(/tempered/);
})
));

View File

@@ -1,3 +1,8 @@
import Hashes from "jshashes";
const sha256 = new Hashes.SHA256();
const invalidKeys = ["user", "t"];
const validTypes = ["mobile", "web", "node"];
export default class Backup { export default class Backup {
/** /**
* *
@@ -7,28 +12,77 @@ export default class Backup {
this._db = db; this._db = db;
} }
async export(encrypt = false) { /**
const keys = await this._db.context.getAllKeys(); *
* @param {"web"|"mobile"|"node"} type
* @param {boolean} encrypt
*/
async export(type, encrypt = false) {
if (!validTypes.some((t) => t === type))
throw new Error("Invalid type. It must be one of 'mobile' or 'web'.");
const keys = (await this._db.context.getAllKeys()).filter(
(key) => !(key in invalidKeys)
);
const db = Object.fromEntries(await this._db.context.readMulti(keys)); const db = Object.fromEntries(await this._db.context.readMulti(keys));
db.h = sha256.hex(JSON.stringify(db));
if (encrypt) { if (encrypt) {
const key = await this._db.user.key(); const key = await this._db.user.key();
return JSON.stringify( return JSON.stringify({
await this._db.context.encrypt(key, JSON.stringify(db)) type,
); date: Date.now(),
data: await this._db.context.encrypt(key, JSON.stringify(db)),
});
} }
return JSON.stringify(db);
return JSON.stringify({
type,
date: Date.now(),
data: db,
});
} }
/**
*
* @param {string} data the backup data
*/
async import(data) { async import(data) {
if (!data) return;
let backup = JSON.parse(data); let backup = JSON.parse(data);
if (!this._validate(backup)) throw new Error("Invalid backup.");
let db = backup.data;
//check if we have encrypted data //check if we have encrypted data
if (backup.salt && backup.iv) { if (db.salt && db.iv) {
const key = await this._db.user.key(); const key = await this._db.user.key();
backup = JSON.parse(await this._db.context.decrypt(key, backup)); db = JSON.parse(await this._db.context.decrypt(key, db));
} }
for (let key in backup) {
let value = backup[key]; if (!this._verify(db))
throw new Error("Backup file has been tempered, aborting...");
for (let key in db) {
let value = db[key];
await this._db.context.write(key, value); await this._db.context.write(key, value);
} }
} }
_validate(backup) {
return (
!!backup.date &&
!!backup.data &&
!!backup.type &&
validTypes.some((t) => t === backup.type)
);
}
_verify(db) {
const hash = db.h;
delete db.h;
return hash == sha256.hex(JSON.stringify(db));
}
} }

View File

@@ -22,10 +22,11 @@
"dependencies": { "dependencies": {
"fast-sort": "^2.0.1", "fast-sort": "^2.0.1",
"fuzzysearch": "^1.0.3", "fuzzysearch": "^1.0.3",
"jshashes": "^1.0.8",
"no-internet": "^1.5.2", "no-internet": "^1.5.2",
"qclone": "^1.0.4", "qclone": "^1.0.4",
"quill-delta-to-html": "^0.12.0", "quill-delta-to-html": "^0.12.0",
"quill-delta-to-markdown": "https://github.com/streetwriters/quill-delta-to-markdown", "quill-delta-to-markdown": "https://github.com/streetwriters/quill-delta-to-markdown",
"transfun": "^1.0.2" "transfun": "^1.0.2"
} }
} }

View File

@@ -3402,6 +3402,11 @@ jsesc@~0.5.0:
resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d"
integrity sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0= integrity sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0=
jshashes@^1.0.8:
version "1.0.8"
resolved "https://registry.yarnpkg.com/jshashes/-/jshashes-1.0.8.tgz#f60d837428383abf73ab022e1542e6614bd75514"
integrity sha512-btmQZ/w1rj8Lb6nEwvhjM7nBYoj54yaEFo2PWh3RkxZ8qNwuvOxvQYN/JxVuwoMmdIluL+XwYVJ+pEEZoSYybQ==
json-parse-better-errors@^1.0.1: json-parse-better-errors@^1.0.1:
version "1.0.2" version "1.0.2"
resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9" resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9"