mirror of
https://github.com/streetwriters/notesnook.git
synced 2025-12-16 11:47:54 +01:00
core: migrate everything to sqlite
This commit is contained in:
@@ -17,14 +17,8 @@ You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import { TEST_NOTE, loginFakeUser, notebookTest } from "./utils";
|
||||
// import v52Backup from "./__fixtures__/backup.v5.2.json";
|
||||
// import v52BackupCopy from "./__fixtures__/backup.v5.2.copy.json";
|
||||
// import v56BackupCopy from "./__fixtures__/backup.v5.6.json";
|
||||
// import v58BackupCopy from "./__fixtures__/backup.v5.8.json";
|
||||
// import qclone from "qclone";
|
||||
import { TEST_NOTE, databaseTest, loginFakeUser, notebookTest } from "./utils";
|
||||
import { test, expect } from "vitest";
|
||||
// import { getId, makeId } from "../src/utils/id";
|
||||
|
||||
test("export backup", () =>
|
||||
notebookTest().then(async ({ db }) => {
|
||||
@@ -70,34 +64,32 @@ test("export encrypted backup", () =>
|
||||
expect(backup.encrypted).toBe(true);
|
||||
}));
|
||||
|
||||
test("import backup", () =>
|
||||
notebookTest().then(async ({ db, id }) => {
|
||||
await db.notes.add(TEST_NOTE);
|
||||
test("import backup", async () => {
|
||||
const { db, id } = await notebookTest();
|
||||
const exp = [];
|
||||
for await (const file of db.backup.export("node", false)) {
|
||||
exp.push(file);
|
||||
}
|
||||
|
||||
const exp = [];
|
||||
for await (const file of db.backup.export("node", false)) {
|
||||
exp.push(file);
|
||||
}
|
||||
const db2 = await databaseTest();
|
||||
await db2.backup.import(JSON.parse(exp[1].data));
|
||||
expect((await db2.notebooks.notebook(id)).id).toBe(id);
|
||||
});
|
||||
|
||||
await db.storage().clear();
|
||||
await db.backup.import(JSON.parse(exp[1].data));
|
||||
expect(db.notebooks.notebook(id).data.id).toBe(id);
|
||||
}));
|
||||
test("import encrypted backup", async () => {
|
||||
const { db, id } = await notebookTest();
|
||||
await loginFakeUser(db);
|
||||
await db.notes.add(TEST_NOTE);
|
||||
|
||||
test("import encrypted backup", () =>
|
||||
notebookTest().then(async ({ db, id }) => {
|
||||
await loginFakeUser(db);
|
||||
await db.notes.add(TEST_NOTE);
|
||||
const exp = [];
|
||||
for await (const file of db.backup.export("node", true)) {
|
||||
exp.push(file);
|
||||
}
|
||||
|
||||
const exp = [];
|
||||
for await (const file of db.backup.export("node", true)) {
|
||||
exp.push(file);
|
||||
}
|
||||
|
||||
await db.storage().clear();
|
||||
await db.backup.import(JSON.parse(exp[1].data), "password");
|
||||
expect(db.notebooks.notebook(id).data.id).toBe(id);
|
||||
}));
|
||||
const db2 = await databaseTest();
|
||||
await db2.backup.import(JSON.parse(exp[1].data), "password");
|
||||
expect((await db2.notebooks.notebook(id)).id).toBe(id);
|
||||
});
|
||||
|
||||
test("import tempered backup", () =>
|
||||
notebookTest().then(async ({ db }) => {
|
||||
@@ -113,104 +105,3 @@ test("import tempered backup", () =>
|
||||
backup.data += "hello";
|
||||
await expect(db.backup.import(backup)).rejects.toThrow(/tempered/);
|
||||
}));
|
||||
|
||||
// describe.each([
|
||||
// ["v5.2", v52Backup],
|
||||
// ["v5.2 copy", v52BackupCopy],
|
||||
// ["v5.6", v56BackupCopy],
|
||||
// ["v5.8", v58BackupCopy]
|
||||
// ])("testing backup version: %s", (version, data) => {
|
||||
// test(`import ${version} backup`, () => {
|
||||
// return databaseTest().then(async (db) => {
|
||||
// await db.backup.import(qclone(data));
|
||||
|
||||
// expect(db.settings.raw.id).toBeDefined();
|
||||
// expect(db.settings.raw.dateModified).toBeDefined();
|
||||
// expect(db.settings.raw.dateEdited).toBeUndefined();
|
||||
// expect(db.settings.raw.pins).toBeUndefined();
|
||||
|
||||
// expect(
|
||||
// db.notes.all.every((v) => {
|
||||
// const doesNotHaveContent = !v.content;
|
||||
// const doesNotHaveColors = !v.colors; // && (!v.color || v.color.length);
|
||||
// const hasTopicsInAllNotebooks =
|
||||
// !v.notebooks ||
|
||||
// v.notebooks.every((nb) => !!nb.id && !!nb.topics && !nb.topic);
|
||||
// const hasDateModified = v.dateModified > 0;
|
||||
// const doesNotHaveTags = !v.tags;
|
||||
// const doesNotHaveColor = !v.color;
|
||||
// if (!doesNotHaveTags) console.log(v);
|
||||
// return (
|
||||
// doesNotHaveTags &&
|
||||
// doesNotHaveColor &&
|
||||
// doesNotHaveContent &&
|
||||
// !v.notebook &&
|
||||
// hasTopicsInAllNotebooks &&
|
||||
// doesNotHaveColors &&
|
||||
// hasDateModified
|
||||
// );
|
||||
// })
|
||||
// ).toBeTruthy();
|
||||
|
||||
// expect(
|
||||
// db.tags.all.every((t) => makeId(t.title) !== t.id && !t.noteIds)
|
||||
// ).toBeTruthy();
|
||||
|
||||
// expect(
|
||||
// db.colors.all.every(
|
||||
// (t) => makeId(t.title) !== t.id && !t.noteIds && !!t.colorCode
|
||||
// )
|
||||
// ).toBeTruthy();
|
||||
|
||||
// expect(
|
||||
// db.notebooks.all.every((v) => v.title != null && v.dateModified > 0)
|
||||
// ).toBeTruthy();
|
||||
|
||||
// expect(db.notebooks.all.every((v) => !v.topics)).toBeTruthy();
|
||||
|
||||
// expect(
|
||||
// db.attachments.all.every((v) => v.dateModified > 0 && !v.dateEdited)
|
||||
// ).toBeTruthy();
|
||||
|
||||
// expect(db.attachments.all.every((a) => !a.noteIds)).toBeTruthy();
|
||||
|
||||
// if (data.data.settings.pins)
|
||||
// expect(db.shortcuts.all).toHaveLength(data.data.settings.pins.length);
|
||||
|
||||
// const allContent = await db.content.all();
|
||||
// expect(
|
||||
// allContent.every((v) => v.type === "tiptap" || v.deleted)
|
||||
// ).toBeTruthy();
|
||||
// expect(allContent.every((v) => !v.persistDateEdited)).toBeTruthy();
|
||||
// expect(allContent.every((v) => v.dateModified > 0)).toBeTruthy();
|
||||
|
||||
// expect(
|
||||
// allContent.every(
|
||||
// (v) =>
|
||||
// !v.data.includes("tox-checklist") &&
|
||||
// !v.data.includes("tox-checklist--checked")
|
||||
// )
|
||||
// ).toBeTruthy();
|
||||
// });
|
||||
// });
|
||||
|
||||
// test(`verify indices of ${version} backup`, () => {
|
||||
// return databaseTest().then(async (db) => {
|
||||
// await db.backup.import(qclone(data));
|
||||
|
||||
// const keys = await db.storage().getAllKeys();
|
||||
// for (let key in data.data) {
|
||||
// const item = data.data[key];
|
||||
// if (item && !item.type && item.deleted) continue;
|
||||
// if (
|
||||
// key.startsWith("_uk_") ||
|
||||
// key === "hasConflicts" ||
|
||||
// key === "monographs" ||
|
||||
// key === "token"
|
||||
// )
|
||||
// continue;
|
||||
// expect(keys.some((k) => k.startsWith(key))).toBeTruthy();
|
||||
// }
|
||||
// });
|
||||
// });
|
||||
// });
|
||||
|
||||
@@ -31,7 +31,7 @@ test("search notes", () =>
|
||||
content: content
|
||||
}).then(async ({ db }) => {
|
||||
await db.notes.add(TEST_NOTE);
|
||||
let filtered = await db.lookup.notes(db.notes.all, "note of the world");
|
||||
let filtered = await db.lookup.notes("note the world");
|
||||
expect(filtered).toHaveLength(1);
|
||||
}));
|
||||
|
||||
@@ -42,8 +42,8 @@ test("search notes with a locked note", () =>
|
||||
const noteId = await db.notes.add(TEST_NOTE);
|
||||
await db.vault.create("password");
|
||||
await db.vault.add(noteId);
|
||||
let filtered = await db.lookup.notes(db.notes.all, "note of the world");
|
||||
expect(filtered).toHaveLength(1);
|
||||
expect(await db.lookup.notes("note the world")).toHaveLength(1);
|
||||
expect(await db.lookup.notes("format")).toHaveLength(0);
|
||||
}));
|
||||
|
||||
test("search notes with an empty note", () =>
|
||||
@@ -54,13 +54,16 @@ test("search notes with an empty note", () =>
|
||||
title: "world is a heavy tune",
|
||||
content: { type: "tiptap", data: "<p><br></p>" }
|
||||
});
|
||||
let filtered = await db.lookup.notes(db.notes.all, "heavy tune");
|
||||
let filtered = await db.lookup.notes("heavy tune");
|
||||
expect(filtered).toHaveLength(1);
|
||||
}));
|
||||
|
||||
test("search notebooks", () =>
|
||||
notebookTest().then(async ({ db }) => {
|
||||
await db.notebooks.add(TEST_NOTEBOOK2);
|
||||
let filtered = db.lookup.notebooks(db.notebooks.all, "Description");
|
||||
let filtered = db.lookup.notebooks(
|
||||
await db.notebooks.all.items(),
|
||||
"Description"
|
||||
);
|
||||
expect(filtered.length).toBeGreaterThan(0);
|
||||
}));
|
||||
|
||||
@@ -171,22 +171,16 @@ test("[5.6] move pins to shortcuts", () =>
|
||||
expect(await migrateItem(item, 5.6, 5.7, "settings", db, "local")).toBe(
|
||||
true
|
||||
);
|
||||
const shortcuts = await db.shortcuts.all.items();
|
||||
expect(item.pins).toBeUndefined();
|
||||
expect(
|
||||
db.shortcuts.all.find(
|
||||
(s) =>
|
||||
s.item.type === "topic" &&
|
||||
s.item.id === "hello" &&
|
||||
s.item.notebookId === "world"
|
||||
)
|
||||
shortcuts.find((s) => s.itemType === "notebook" && s.itemId === "hello")
|
||||
).toBeDefined();
|
||||
expect(
|
||||
db.shortcuts.all.find(
|
||||
(s) => s.item.type === "notebook" && s.item.id === "world"
|
||||
)
|
||||
shortcuts.find((s) => s.itemType === "notebook" && s.itemId === "world")
|
||||
).toBeDefined();
|
||||
expect(
|
||||
db.shortcuts.all.find((s) => s.item.type === "tag" && s.item.id === "tag")
|
||||
shortcuts.find((s) => s.itemType === "tag" && s.itemId === "tag")
|
||||
).toBeDefined();
|
||||
}));
|
||||
|
||||
@@ -279,7 +273,7 @@ describe("[5.9] make tags syncable", () => {
|
||||
databaseTest().then(async (db) => {
|
||||
const noteId = getId();
|
||||
const tags = ["hello", "world", "i am here"];
|
||||
await db.notes.collection.add({
|
||||
await db.legacyNotes.add({
|
||||
type: "note",
|
||||
title: "I am a note",
|
||||
tags,
|
||||
@@ -287,7 +281,7 @@ describe("[5.9] make tags syncable", () => {
|
||||
});
|
||||
|
||||
for (const tag of tags) {
|
||||
await db.tags.collection.add({
|
||||
await db.legacyTags.add({
|
||||
id: makeId(tag),
|
||||
noteIds: [noteId],
|
||||
type: "tag",
|
||||
@@ -302,27 +296,24 @@ describe("[5.9] make tags syncable", () => {
|
||||
});
|
||||
await db.legacySettings.init();
|
||||
|
||||
const note = db.notes.note(noteId);
|
||||
const note = db.legacyNotes.get(noteId);
|
||||
if (!note) throw new Error("Failed to find note.");
|
||||
|
||||
expect(await migrateItem(note.data, 5.9, 6.0, "note", db, "backup")).toBe(
|
||||
expect(await migrateItem(note, 5.9, 6.0, "note", db, "backup")).toBe(
|
||||
true
|
||||
);
|
||||
|
||||
const resolvedTags = db.relations
|
||||
.to({ type: "note", id: noteId }, "tag")
|
||||
.resolved()
|
||||
.sort((a, b) => a.title.localeCompare(b.title));
|
||||
const resolvedTags = (
|
||||
await db.relations.to({ type: "note", id: noteId }, "tag").resolve()
|
||||
).sort((a, b) => a.title.localeCompare(b.title));
|
||||
|
||||
expect(note.data.tags).toBeUndefined();
|
||||
expect(db.tags.all).toHaveLength(3);
|
||||
expect(note.tags).toBeUndefined();
|
||||
expect(await db.tags.all.count()).toBe(3);
|
||||
expect(resolvedTags).toHaveLength(3);
|
||||
expect(resolvedTags[0].title).toBe("hello");
|
||||
expect(resolvedTags[1].title).toBe("I AM GOOD!");
|
||||
expect(resolvedTags[2].title).toBe("i am here");
|
||||
expect(
|
||||
tags.every((t) => !db.tags.collection.exists(makeId(t)))
|
||||
).toBeTruthy();
|
||||
expect(db.legacyTags.items()).toHaveLength(0);
|
||||
}));
|
||||
|
||||
test("migrate old tag item to new one", () =>
|
||||
@@ -367,7 +358,7 @@ describe("[5.9] make tags syncable", () => {
|
||||
databaseTest().then(async (db) => {
|
||||
const noteId = getId();
|
||||
const tags = ["hello", "world", "i am here"];
|
||||
await db.notes.collection.add({
|
||||
await db.legacyNotes.add({
|
||||
type: "note",
|
||||
title: "I am a note",
|
||||
tags,
|
||||
@@ -388,30 +379,27 @@ describe("[5.9] make tags syncable", () => {
|
||||
title: tag
|
||||
};
|
||||
await migrateItem(item, 5.9, 6.0, "tag", db, "backup");
|
||||
await db.tags.collection.add(item);
|
||||
await db.tags.add(item);
|
||||
}
|
||||
|
||||
const note = db.notes.note(noteId);
|
||||
const note = db.legacyNotes.get(noteId);
|
||||
if (!note) throw new Error("Failed to find note.");
|
||||
|
||||
expect(await migrateItem(note.data, 5.9, 6.0, "note", db, "backup")).toBe(
|
||||
expect(await migrateItem(note, 5.9, 6.0, "note", db, "backup")).toBe(
|
||||
true
|
||||
);
|
||||
|
||||
const resolvedTags = db.relations
|
||||
.to({ type: "note", id: noteId }, "tag")
|
||||
.resolved()
|
||||
.sort((a, b) => a.title.localeCompare(b.title));
|
||||
const resolvedTags = (
|
||||
await db.relations.to({ type: "note", id: noteId }, "tag").resolve()
|
||||
).sort((a, b) => a.title.localeCompare(b.title));
|
||||
|
||||
expect(note.data.tags).toBeUndefined();
|
||||
expect(db.tags.all).toHaveLength(3);
|
||||
expect(note.tags).toBeUndefined();
|
||||
expect(await db.tags.all.count()).toBe(3);
|
||||
expect(resolvedTags).toHaveLength(3);
|
||||
expect(resolvedTags[0].title).toBe("hello");
|
||||
expect(resolvedTags[1].title).toBe("I AM GOOD!");
|
||||
expect(resolvedTags[2].title).toBe("i am here");
|
||||
expect(
|
||||
tags.every((t) => !db.tags.collection.exists(makeId(t)))
|
||||
).toBeTruthy();
|
||||
expect(db.legacyTags.items()).toHaveLength(0);
|
||||
}));
|
||||
});
|
||||
|
||||
@@ -419,37 +407,37 @@ describe("[5.9] make colors syncable", () => {
|
||||
test("create colors from notes & link to them using relations", () =>
|
||||
databaseTest().then(async (db) => {
|
||||
const noteId = getId();
|
||||
await db.notes.collection.add({
|
||||
await db.legacyNotes.add({
|
||||
type: "note",
|
||||
title: "I am a note",
|
||||
color: "blue",
|
||||
id: noteId
|
||||
});
|
||||
|
||||
await db.colors.collection.add({
|
||||
await db.legacyColors.add({
|
||||
id: makeId("blue"),
|
||||
noteIds: [noteId],
|
||||
type: "tag",
|
||||
title: "blue"
|
||||
});
|
||||
|
||||
const note = db.notes.note(noteId);
|
||||
const note = db.legacyNotes.get(noteId);
|
||||
if (!note) throw new Error("Failed to find note.");
|
||||
|
||||
expect(await migrateItem(note.data, 5.9, 6.0, "note", db, "backup")).toBe(
|
||||
expect(await migrateItem(note, 5.9, 6.0, "note", db, "backup")).toBe(
|
||||
true
|
||||
);
|
||||
|
||||
const resolvedColors = db.relations
|
||||
const resolvedColors = await db.relations
|
||||
.to({ type: "note", id: noteId }, "color")
|
||||
.resolved();
|
||||
.resolve();
|
||||
|
||||
expect(note.data.color).toBeUndefined();
|
||||
expect(db.colors.all).toHaveLength(1);
|
||||
expect(note.color).toBeUndefined();
|
||||
expect(await db.colors.all.count()).toBe(1);
|
||||
expect(resolvedColors).toHaveLength(1);
|
||||
expect(resolvedColors[0].title).toBe("blue");
|
||||
expect(resolvedColors[0].colorCode).toBe("#2196F3");
|
||||
expect(db.colors.collection.exists(makeId("blue"))).toBeFalsy();
|
||||
expect(db.legacyColors.exists(makeId("blue"))).toBeFalsy();
|
||||
}));
|
||||
|
||||
test("migrate old color item to new one", () =>
|
||||
@@ -501,7 +489,7 @@ describe("[5.9] make colors syncable", () => {
|
||||
test("migrate color before notes", () =>
|
||||
databaseTest().then(async (db) => {
|
||||
const noteId = getId();
|
||||
await db.notes.collection.add({
|
||||
await db.legacyNotes.add({
|
||||
type: "note",
|
||||
title: "I am a note",
|
||||
color: "blue",
|
||||
@@ -522,25 +510,25 @@ describe("[5.9] make colors syncable", () => {
|
||||
title: "blue"
|
||||
};
|
||||
await migrateItem(color, 5.9, 6.0, "tag", db, "backup");
|
||||
await db.colors.collection.add(color);
|
||||
await db.colors.add(color);
|
||||
|
||||
const note = db.notes.note(noteId);
|
||||
const note = db.legacyNotes.get(noteId);
|
||||
if (!note) throw new Error("Failed to find note.");
|
||||
|
||||
expect(await migrateItem(note.data, 5.9, 6.0, "note", db, "backup")).toBe(
|
||||
expect(await migrateItem(note, 5.9, 6.0, "note", db, "backup")).toBe(
|
||||
true
|
||||
);
|
||||
|
||||
const resolvedColors = db.relations
|
||||
const resolvedColors = await db.relations
|
||||
.to({ type: "note", id: noteId }, "color")
|
||||
.resolved();
|
||||
.resolve();
|
||||
|
||||
expect(note.data.color).toBeUndefined();
|
||||
expect(db.colors.all).toHaveLength(1);
|
||||
expect(note.color).toBeUndefined();
|
||||
expect(await db.colors.all.count()).toBe(1);
|
||||
expect(resolvedColors).toHaveLength(1);
|
||||
expect(resolvedColors[0].title).toBe("I AM GOOD!");
|
||||
expect(resolvedColors[0].colorCode).toBe("#2196F3");
|
||||
expect(db.colors.collection.exists(makeId("blue"))).toBeFalsy();
|
||||
expect(db.legacyColors.exists(makeId("blue"))).toBeFalsy();
|
||||
}));
|
||||
});
|
||||
|
||||
@@ -553,15 +541,31 @@ test("[5.9] move attachments.noteIds to relations", () =>
|
||||
};
|
||||
await migrateItem(attachment, 5.9, 6.0, "attachment", db, "backup");
|
||||
|
||||
const linkedNotes = db.relations.from(
|
||||
{ type: "attachment", id: "ATTACHMENT_ID" },
|
||||
"note"
|
||||
);
|
||||
const linkedNotes = await db.relations
|
||||
.from({ type: "attachment", id: "ATTACHMENT_ID" }, "note")
|
||||
.get();
|
||||
expect(attachment.noteIds).toBeUndefined();
|
||||
expect(linkedNotes).toHaveLength(1);
|
||||
expect(linkedNotes[0].to.id).toBe("HELLO_NOTE_ID");
|
||||
expect(linkedNotes[0]).toBe("HELLO_NOTE_ID");
|
||||
}));
|
||||
|
||||
test.todo("[5.9] flatten attachment object", () =>
|
||||
databaseTest().then(async (db) => {
|
||||
// const attachment = {
|
||||
// id: "ATTACHMENT_ID",
|
||||
// type: "attachment",
|
||||
// noteIds: ["HELLO_NOTE_ID"]
|
||||
// };
|
||||
// await migrateItem(attachment, 5.9, 6.0, "attachment", db, "backup");
|
||||
// const linkedNotes = await db.relations
|
||||
// .from({ type: "attachment", id: "ATTACHMENT_ID" }, "note")
|
||||
// .get();
|
||||
// expect(attachment.noteIds).toBeUndefined();
|
||||
// expect(linkedNotes).toHaveLength(1);
|
||||
// expect(linkedNotes[0]).toBe("HELLO_NOTE_ID");
|
||||
})
|
||||
);
|
||||
|
||||
describe("[5.9] move topics out of notebooks & use relations", () => {
|
||||
test("convert topics to subnotebooks", () =>
|
||||
databaseTest().then(async (db) => {
|
||||
@@ -575,17 +579,16 @@ describe("[5.9] move topics out of notebooks & use relations", () => {
|
||||
};
|
||||
await migrateItem(notebook, 5.9, 6.0, "notebook", db, "backup");
|
||||
|
||||
const linkedNotebooks = db.relations.from(
|
||||
{ type: "notebook", id: "parent_notebook" },
|
||||
"notebook"
|
||||
);
|
||||
const linkedNotebooks = await db.relations
|
||||
.from({ type: "notebook", id: "parent_notebook" }, "notebook")
|
||||
.get();
|
||||
expect(notebook.topics).toBeUndefined();
|
||||
expect(linkedNotebooks).toHaveLength(2);
|
||||
expect(linkedNotebooks.some((a) => a.to.id === "topics1")).toBeTruthy();
|
||||
expect(linkedNotebooks.some((a) => a.to.id === "topics2")).toBeTruthy();
|
||||
expect(db.notebooks.all).toHaveLength(2);
|
||||
expect(db.notebooks.notebook("topics1")).toBeDefined();
|
||||
expect(db.notebooks.notebook("topics2")).toBeDefined();
|
||||
expect(linkedNotebooks.some((a) => a === "topics1")).toBeTruthy();
|
||||
expect(linkedNotebooks.some((a) => a === "topics2")).toBeTruthy();
|
||||
expect(await db.notebooks.all.count()).toBe(2);
|
||||
expect(await db.notebooks.notebook("topics1")).toBeDefined();
|
||||
expect(await db.notebooks.notebook("topics2")).toBeDefined();
|
||||
}));
|
||||
|
||||
test("convert topic shortcuts to notebook shortcuts", () =>
|
||||
@@ -600,8 +603,8 @@ describe("[5.9] move topics out of notebooks & use relations", () => {
|
||||
};
|
||||
await migrateItem(shortcut, 5.9, 6.0, "shortcut", db, "backup");
|
||||
|
||||
expect(shortcut.item.type).toBe("notebook");
|
||||
expect(shortcut.item.id).toBe("topics1");
|
||||
expect(shortcut.itemType).toBe("notebook");
|
||||
expect(shortcut.itemId).toBe("topics1");
|
||||
}));
|
||||
|
||||
test("convert topic links in note to relations", () =>
|
||||
@@ -613,13 +616,13 @@ describe("[5.9] move topics out of notebooks & use relations", () => {
|
||||
};
|
||||
await migrateItem(note, 5.9, 6.0, "note", db, "backup");
|
||||
|
||||
const linkedNotebooks = db.relations
|
||||
const linkedNotebooks = await db.relations
|
||||
.to({ type: "note", id: "note1" }, "notebook")
|
||||
.sort((a, b) => a.to.id.localeCompare(b.to.id));
|
||||
.get();
|
||||
expect(note.notebooks).toBeUndefined();
|
||||
expect(linkedNotebooks).toHaveLength(2);
|
||||
expect(linkedNotebooks.some((a) => a.from.id === "topic1")).toBeTruthy();
|
||||
expect(linkedNotebooks.some((a) => a.from.id === "topic2")).toBeTruthy();
|
||||
expect(linkedNotebooks.some((a) => a === "topic1")).toBeTruthy();
|
||||
expect(linkedNotebooks.some((a) => a === "topic2")).toBeTruthy();
|
||||
}));
|
||||
});
|
||||
|
||||
|
||||
@@ -80,9 +80,8 @@ test("restoring an old session should replace note's content", () =>
|
||||
const [, firstVersion] = await db.noteHistory.get(id);
|
||||
await db.noteHistory.restore(firstVersion.id);
|
||||
|
||||
await expect(db.notes.note(id).content()).resolves.toBe(
|
||||
TEST_NOTE.content.data
|
||||
);
|
||||
const contentId = (await db.notes.note(id)).contentId;
|
||||
expect((await db.content.get(contentId)).data).toBe(TEST_NOTE.content.data);
|
||||
}));
|
||||
|
||||
test("date created of session should not change on edit", () =>
|
||||
@@ -152,7 +151,7 @@ test("auto clear sessions if they exceed the limit", () =>
|
||||
await db.noteHistory.cleanup(id, 1);
|
||||
|
||||
sessions = await db.noteHistory.get(id);
|
||||
expect(await db.noteHistory.get(id)).toHaveLength(1);
|
||||
expect(sessions).toHaveLength(1);
|
||||
|
||||
const content = await db.noteHistory.content(sessions[0].id);
|
||||
expect(content.data).toBe(editedContent.data);
|
||||
@@ -163,7 +162,7 @@ test("save a locked note should add a locked session to note history", () =>
|
||||
await db.vault.create("password");
|
||||
await db.vault.add(id);
|
||||
|
||||
const note = db.notes.note(id).data;
|
||||
const note = await db.notes.note(id);
|
||||
const editedContent = { type: "tiptap", data: "<p>hello world</p>" };
|
||||
await db.vault.save({
|
||||
...note,
|
||||
|
||||
@@ -21,35 +21,30 @@ import { notebookTest, TEST_NOTEBOOK } from "./utils";
|
||||
import { test, expect } from "vitest";
|
||||
|
||||
test("add a notebook", () =>
|
||||
notebookTest().then(({ db, id }) => {
|
||||
notebookTest().then(async ({ db, id }) => {
|
||||
expect(id).toBeDefined();
|
||||
let notebook = db.notebooks.notebook(id);
|
||||
const notebook = await db.notebooks.notebook(id);
|
||||
expect(notebook).toBeDefined();
|
||||
expect(notebook.title).toBe(TEST_NOTEBOOK.title);
|
||||
}));
|
||||
|
||||
test("get all notebooks", () =>
|
||||
notebookTest().then(({ db }) => {
|
||||
expect(db.notebooks.all.length).toBeGreaterThan(0);
|
||||
notebookTest().then(async ({ db }) => {
|
||||
expect(await db.notebooks.all.count()).toBeGreaterThan(0);
|
||||
}));
|
||||
|
||||
test("pin a notebook", () =>
|
||||
notebookTest().then(async ({ db, id }) => {
|
||||
let notebook = db.notebooks.notebook(id);
|
||||
await notebook.pin();
|
||||
notebook = db.notebooks.notebook(id);
|
||||
expect(notebook.data.pinned).toBe(true);
|
||||
await db.notebooks.pin(true, id);
|
||||
const notebook = await db.notebooks.notebook(id);
|
||||
expect(notebook.pinned).toBe(true);
|
||||
}));
|
||||
|
||||
test("unpin a notebook", () =>
|
||||
notebookTest().then(async ({ db, id }) => {
|
||||
let notebook = db.notebooks.notebook(id);
|
||||
await notebook.pin();
|
||||
notebook = db.notebooks.notebook(id);
|
||||
expect(notebook.data.pinned).toBe(true);
|
||||
await notebook.pin();
|
||||
notebook = db.notebooks.notebook(id);
|
||||
expect(notebook.data.pinned).toBe(false);
|
||||
await db.notebooks.pin(false, id);
|
||||
const notebook = await db.notebooks.notebook(id);
|
||||
expect(notebook.pinned).toBe(false);
|
||||
}));
|
||||
|
||||
test("updating notebook with empty title should throw", () =>
|
||||
|
||||
@@ -68,18 +68,16 @@ test("add invalid note", () =>
|
||||
expect(db.notes.add({ hello: "world" })).rejects.toThrow();
|
||||
}));
|
||||
|
||||
test.only("add note", () =>
|
||||
test("add note", () =>
|
||||
noteTest().then(async ({ db, id }) => {
|
||||
const note = await db.notes.note$(id);
|
||||
expect(note).toBeDefined();
|
||||
const content = await db.content.get(note!.contentId!);
|
||||
expect(content!.data).toStrictEqual(TEST_NOTE.content.data);
|
||||
expect(await db.notes.exists(id)).toBe(true);
|
||||
}));
|
||||
|
||||
test("get note content", () =>
|
||||
noteTest().then(async ({ db, id }) => {
|
||||
const content = await db.notes.note(id)?.content();
|
||||
expect(content).toStrictEqual(TEST_NOTE.content.data);
|
||||
const note = await db.notes.note(id);
|
||||
const content = await db.content.get(note.contentId);
|
||||
expect(content?.data).toStrictEqual(TEST_NOTE.content.data);
|
||||
}));
|
||||
|
||||
test("delete note", () =>
|
||||
@@ -97,21 +95,21 @@ test("delete note", () =>
|
||||
|
||||
await db.notes.addToNotebook(subNotebookId, id);
|
||||
|
||||
await db.notes.delete(id);
|
||||
await db.notes.moveToTrash(id);
|
||||
|
||||
expect(db.notes.note(id)).toBeUndefined();
|
||||
expect(db.notebooks.totalNotes(notebookId)).toBe(0);
|
||||
expect(db.notebooks.totalNotes(subNotebookId)).toBe(0);
|
||||
expect(await db.notes.note(id)).toBeUndefined();
|
||||
expect(await db.notebooks.totalNotes(notebookId)).toBe(0);
|
||||
expect(await db.notebooks.totalNotes(subNotebookId)).toBe(0);
|
||||
}));
|
||||
|
||||
test("get all notes", () =>
|
||||
noteTest().then(async ({ db }) => {
|
||||
expect(db.notes.all.length).toBeGreaterThan(0);
|
||||
expect(await db.notes.all.count()).toBeGreaterThan(0);
|
||||
}));
|
||||
|
||||
test("note without a title should get a premade title", () =>
|
||||
noteTest().then(async ({ db, id }) => {
|
||||
const note = db.notes.note(id);
|
||||
const note = await db.notes.note(id);
|
||||
expect(note?.title.startsWith("Note ")).toBe(true);
|
||||
}));
|
||||
|
||||
@@ -123,7 +121,7 @@ test("note should get headline from content", () =>
|
||||
data: "<p>This is a very colorful existence.</p>"
|
||||
}
|
||||
}).then(async ({ db, id }) => {
|
||||
const note = db.notes.note(id);
|
||||
const note = await db.notes.note(id);
|
||||
expect(note?.headline).toBe("This is a very colorful existence.");
|
||||
}));
|
||||
|
||||
@@ -135,14 +133,14 @@ test("note should not get headline if there is no p tag", () =>
|
||||
data: `<ol style="list-style-type: decimal;" data-mce-style="list-style-type: decimal;"><li>Hello I won't be a headline :(</li><li>Me too.</li><li>Gold.</li></ol>`
|
||||
}
|
||||
}).then(async ({ db, id }) => {
|
||||
const note = db.notes.note(id);
|
||||
const note = await db.notes.note(id);
|
||||
expect(note?.headline).toBe("");
|
||||
}));
|
||||
|
||||
test("note title should allow trailing space", () =>
|
||||
noteTest({ title: "Hello ", content: TEST_NOTE.content }).then(
|
||||
async ({ db, id }) => {
|
||||
const note = db.notes.note(id);
|
||||
const note = await db.notes.note(id);
|
||||
expect(note?.title).toBe("Hello ");
|
||||
}
|
||||
));
|
||||
@@ -150,7 +148,7 @@ test("note title should allow trailing space", () =>
|
||||
test("note title should not allow newlines", () =>
|
||||
noteTest({ title: "Hello\nhello", content: TEST_NOTE.content }).then(
|
||||
async ({ db, id }) => {
|
||||
const note = db.notes.note(id);
|
||||
const note = await db.notes.note(id);
|
||||
expect(note?.title).toBe("Hello hello");
|
||||
}
|
||||
));
|
||||
@@ -169,53 +167,53 @@ test("update note", () =>
|
||||
// colors: ["red", "blue"]
|
||||
};
|
||||
await db.notes.add(noteData);
|
||||
const note = db.notes.note(id);
|
||||
const note = await db.notes.note(id);
|
||||
const content = await db.content.get(note.contentId);
|
||||
|
||||
expect(note?.title).toBe(noteData.title);
|
||||
expect(await note?.content()).toStrictEqual(noteData.content.data);
|
||||
expect(note?.data.pinned).toBe(true);
|
||||
expect(note?.data.favorite).toBe(true);
|
||||
expect(content.data).toStrictEqual(noteData.content.data);
|
||||
expect(note?.pinned).toBe(true);
|
||||
expect(note?.favorite).toBe(true);
|
||||
}));
|
||||
|
||||
test("get favorite notes", () =>
|
||||
noteTest({
|
||||
...TEST_NOTE,
|
||||
favorite: true
|
||||
}).then(({ db }) => {
|
||||
expect(db.notes.favorites.length).toBeGreaterThan(0);
|
||||
}).then(async ({ db }) => {
|
||||
expect(await db.notes.favorites.count()).toBeGreaterThan(0);
|
||||
}));
|
||||
|
||||
test("get pinned notes", () =>
|
||||
noteTest({
|
||||
...TEST_NOTE,
|
||||
pinned: true
|
||||
}).then(({ db }) => {
|
||||
expect(db.notes.pinned.length).toBeGreaterThan(0);
|
||||
}).then(async ({ db }) => {
|
||||
expect(await db.notes.pinned.count()).toBeGreaterThan(0);
|
||||
}));
|
||||
|
||||
test("get grouped notes by abc", () => groupedTest("abc"));
|
||||
test.todo("get grouped notes by abc", () => groupedTest("abc"));
|
||||
|
||||
test("get grouped notes by month", () => groupedTest("month"));
|
||||
test.todo("get grouped notes by month", () => groupedTest("month"));
|
||||
|
||||
test("get grouped notes by year", () => groupedTest("year"));
|
||||
test.todo("get grouped notes by year", () => groupedTest("year"));
|
||||
|
||||
test("get grouped notes by weak", () => groupedTest("week"));
|
||||
test.todo("get grouped notes by weak", () => groupedTest("week"));
|
||||
|
||||
test("get grouped notes default", () => groupedTest("default"));
|
||||
test.todo("get grouped notes default", () => groupedTest("default"));
|
||||
|
||||
test("pin note", () =>
|
||||
noteTest().then(async ({ db, id }) => {
|
||||
let note = db.notes.note(id);
|
||||
await note?.pin();
|
||||
note = db.notes.note(id);
|
||||
expect(note?.data.pinned).toBe(true);
|
||||
await db.notes.pin(true, id);
|
||||
const note = await db.notes.note(id);
|
||||
expect(note.pinned).toBe(true);
|
||||
}));
|
||||
|
||||
test("favorite note", () =>
|
||||
noteTest().then(async ({ db, id }) => {
|
||||
let note = db.notes.note(id);
|
||||
await note?.favorite();
|
||||
note = db.notes.note(id);
|
||||
expect(note?.data.favorite).toBe(true);
|
||||
await db.notes.favorite(true, id);
|
||||
const note = await db.notes.note(id);
|
||||
expect(note.favorite).toBe(true);
|
||||
}));
|
||||
|
||||
test("add note to subnotebook", () =>
|
||||
@@ -230,10 +228,12 @@ test("add note to subnotebook", () =>
|
||||
);
|
||||
|
||||
expect(
|
||||
db.relations.from({ type: "notebook", id: notebookId }, "notebook")
|
||||
).toHaveLength(1);
|
||||
expect(db.notebooks.totalNotes(subNotebookId)).toBe(1);
|
||||
expect(db.notebooks.totalNotes(notebookId)).toBe(1);
|
||||
await db.relations
|
||||
.from({ type: "notebook", id: notebookId }, "notebook")
|
||||
.count()
|
||||
).toBe(1);
|
||||
expect(await db.notebooks.totalNotes(subNotebookId)).toBe(1);
|
||||
expect(await db.notebooks.totalNotes(notebookId)).toBe(1);
|
||||
}));
|
||||
|
||||
test("duplicate note to topic should not be added", () =>
|
||||
@@ -242,7 +242,7 @@ test("duplicate note to topic should not be added", () =>
|
||||
notebookTitle: "Hello",
|
||||
subNotebookTitle: "Home"
|
||||
});
|
||||
expect(db.notebooks.totalNotes(subNotebookId)).toBe(1);
|
||||
expect(await db.notebooks.totalNotes(subNotebookId)).toBe(1);
|
||||
}));
|
||||
|
||||
test("add the same note to 2 notebooks", () =>
|
||||
@@ -257,16 +257,18 @@ test("add the same note to 2 notebooks", () =>
|
||||
});
|
||||
|
||||
expect(
|
||||
db.relations
|
||||
await db.relations
|
||||
.from({ type: "notebook", id: nb1.subNotebookId }, "note")
|
||||
.has(id)
|
||||
).toBe(true);
|
||||
expect(
|
||||
db.relations
|
||||
await db.relations
|
||||
.from({ type: "notebook", id: nb2.subNotebookId }, "note")
|
||||
.has(id)
|
||||
).toBe(true);
|
||||
expect(db.relations.to({ type: "note", id }, "notebook")).toHaveLength(2);
|
||||
expect(
|
||||
await db.relations.to({ type: "note", id }, "notebook").count()
|
||||
).toBe(2);
|
||||
}));
|
||||
|
||||
test("moving note to same notebook and topic should do nothing", () =>
|
||||
@@ -278,7 +280,9 @@ test("moving note to same notebook and topic should do nothing", () =>
|
||||
|
||||
await db.notes.addToNotebook(subNotebookId, id);
|
||||
|
||||
expect(db.relations.to({ type: "note", id }, "notebook")).toHaveLength(1);
|
||||
expect(
|
||||
await db.relations.to({ type: "note", id }, "notebook").count()
|
||||
).toBe(1);
|
||||
}));
|
||||
|
||||
test("export note to html", () =>
|
||||
@@ -319,31 +323,29 @@ test("deleting a colored note should remove it from that color", () =>
|
||||
);
|
||||
|
||||
expect(
|
||||
db.relations.from({ id: colorId, type: "color" }, "note").has(id)
|
||||
await db.relations.from({ id: colorId, type: "color" }, "note").has(id)
|
||||
).toBe(true);
|
||||
|
||||
await db.notes.delete(id);
|
||||
await db.notes.moveToTrash(id);
|
||||
|
||||
expect(
|
||||
db.relations.from({ id: colorId, type: "color" }, "note").has(id)
|
||||
await db.relations.from({ id: colorId, type: "color" }, "note").has(id)
|
||||
).toBe(false);
|
||||
}));
|
||||
|
||||
test("note's content should follow note's localOnly property", () =>
|
||||
noteTest().then(async ({ db, id }) => {
|
||||
await db.notes.note(id)?.localOnly();
|
||||
let note = db.notes.note(id);
|
||||
if (!note?.contentId) throw new Error("No content in note.");
|
||||
await db.notes.localOnly(true, id);
|
||||
let note = await db.notes.note(id);
|
||||
|
||||
expect(note?.data.localOnly).toBe(true);
|
||||
expect(note.localOnly).toBe(true);
|
||||
let content = await db.content.get(note.contentId);
|
||||
expect(content?.localOnly).toBe(true);
|
||||
|
||||
await db.notes.note(id)?.localOnly();
|
||||
note = db.notes.note(id);
|
||||
if (!note?.contentId) throw new Error("No content in note.");
|
||||
await db.notes.localOnly(false, id);
|
||||
note = await db.notes.note(id);
|
||||
|
||||
expect(note?.data.localOnly).toBe(false);
|
||||
expect(note.localOnly).toBe(false);
|
||||
content = await db.content.get(note.contentId);
|
||||
expect(content?.localOnly).toBe(false);
|
||||
}));
|
||||
@@ -365,11 +367,11 @@ test("note content should not contain image base64 data after save", () =>
|
||||
await loginFakeUser(db);
|
||||
|
||||
await db.notes.add({ id, content: { type: "tiptap", data: IMG_CONTENT } });
|
||||
const note = db.notes.note(id);
|
||||
const content = await note?.content();
|
||||
const note = await db.notes.note(id);
|
||||
const content = await db.content.get(note.contentId);
|
||||
|
||||
expect(content).not.toContain(`src="data:image/png;`);
|
||||
expect(content).not.toContain(`src=`);
|
||||
expect(content.data).not.toContain(`src="data:image/png;`);
|
||||
expect(content.data).not.toContain(`src=`);
|
||||
}));
|
||||
|
||||
test("adding a note with an invalid tag should clean the tag array", () =>
|
||||
@@ -383,6 +385,6 @@ test("adding a note with an invalid tag should clean the tag array", () =>
|
||||
).resolves.toBe("helloworld");
|
||||
|
||||
expect(
|
||||
db.relations.to({ id: "helloworld", type: "note" }, "tag")
|
||||
).toHaveLength(0);
|
||||
await db.relations.to({ id: "helloworld", type: "note" }, "tag").count()
|
||||
).toBe(0);
|
||||
}));
|
||||
|
||||
@@ -23,44 +23,45 @@ import { test, expect } from "vitest";
|
||||
test("create a shortcut of an invalid item should throw", () =>
|
||||
databaseTest().then(async (db) => {
|
||||
await expect(() =>
|
||||
db.shortcuts.add({ item: { type: "HELLO!" } })
|
||||
db.shortcuts.add({ itemType: "HELLO!" })
|
||||
).rejects.toThrow(/cannot create a shortcut/i);
|
||||
}));
|
||||
|
||||
test("create a shortcut of notebook", () =>
|
||||
notebookTest().then(async ({ db, id }) => {
|
||||
await db.shortcuts.add({ item: { type: "notebook", id } });
|
||||
expect(db.shortcuts.exists(id)).toBe(true);
|
||||
expect(db.shortcuts.all[0].item.id).toBe(id);
|
||||
await db.shortcuts.add({ itemType: "notebook", itemId: id });
|
||||
expect(await db.shortcuts.exists(id)).toBe(true);
|
||||
expect(await db.shortcuts.all.has(id)).toBe(true);
|
||||
}));
|
||||
|
||||
test("create a duplicate shortcut of notebook", () =>
|
||||
notebookTest().then(async ({ db, id }) => {
|
||||
await db.shortcuts.add({ item: { type: "notebook", id } });
|
||||
await db.shortcuts.add({ item: { type: "notebook", id } });
|
||||
await db.shortcuts.add({ itemType: "notebook", itemId: id });
|
||||
await db.shortcuts.add({ itemType: "notebook", itemId: id });
|
||||
|
||||
expect(db.shortcuts.all).toHaveLength(1);
|
||||
expect(db.shortcuts.all[0].item.id).toBe(id);
|
||||
expect(await db.shortcuts.all.count()).toBe(1);
|
||||
expect(await db.shortcuts.all.has(id)).toBe(true);
|
||||
}));
|
||||
|
||||
test("pin a tag", () =>
|
||||
databaseTest().then(async (db) => {
|
||||
const tagId = await db.tags.add({ title: "HELLO!" });
|
||||
await db.shortcuts.add({ item: { type: "tag", id: tagId } });
|
||||
await db.shortcuts.add({ itemType: "tag", itemId: tagId });
|
||||
|
||||
expect(db.shortcuts.all).toHaveLength(1);
|
||||
expect(db.shortcuts.all[0].item.id).toBe(tagId);
|
||||
expect(await db.shortcuts.all.count()).toBe(1);
|
||||
expect(await db.shortcuts.all.has(tagId)).toBe(true);
|
||||
}));
|
||||
|
||||
test("remove shortcut", () =>
|
||||
databaseTest().then(async (db) => {
|
||||
const tagId = await db.tags.add({ title: "HELLO!" });
|
||||
const shortcutId = await db.shortcuts.add({
|
||||
item: { type: "tag", id: tagId }
|
||||
itemType: "tag",
|
||||
itemId: tagId
|
||||
});
|
||||
|
||||
expect(db.shortcuts.all).toHaveLength(1);
|
||||
expect(await db.shortcuts.all.count()).toBe(1);
|
||||
|
||||
await db.shortcuts.remove(shortcutId);
|
||||
expect(db.shortcuts.all).toHaveLength(0);
|
||||
expect(await db.shortcuts.all.count()).toBe(0);
|
||||
}));
|
||||
|
||||
@@ -37,8 +37,10 @@ for (const type of ["tag", "color"] as const) {
|
||||
const tagId = await db[collection].add(item("hello"));
|
||||
await db.relations.add({ id: tagId, type }, { id, type: "note" });
|
||||
|
||||
expect(db[collection].all[0].title).toBe("hello");
|
||||
expect(db.relations.from({ id: tagId, type }, "note")).toHaveLength(1);
|
||||
expect((await db[collection][type](tagId)).title).toBe("hello");
|
||||
expect(await db.relations.from({ id: tagId, type }, "note").count()).toBe(
|
||||
1
|
||||
);
|
||||
}));
|
||||
|
||||
test(`${type} 2 notes`, () =>
|
||||
@@ -50,15 +52,17 @@ for (const type of ["tag", "color"] as const) {
|
||||
await db.relations.add({ id: tagId, type }, { id, type: "note" });
|
||||
await db.relations.add({ id: tagId, type }, { id: id2, type: "note" });
|
||||
|
||||
expect(db[collection].all[0].title).toBe("hello");
|
||||
expect(db.relations.from({ id: tagId, type }, "note")).toHaveLength(2);
|
||||
expect((await db[collection][type](tagId)).title).toBe("hello");
|
||||
expect(await db.relations.from({ id: tagId, type }, "note").count()).toBe(
|
||||
2
|
||||
);
|
||||
}));
|
||||
|
||||
test(`rename a ${type}`, () =>
|
||||
databaseTest().then(async (db) => {
|
||||
const tagId = await db[collection].add(item("hello"));
|
||||
await db[collection].add({ id: tagId, title: `hello (new)` });
|
||||
expect(db[collection].all[0].title).toBe("hello (new)");
|
||||
expect((await db[collection][type](tagId)).title).toBe("hello (new)");
|
||||
}));
|
||||
|
||||
test(`remove a ${type}`, () =>
|
||||
@@ -67,16 +71,20 @@ for (const type of ["tag", "color"] as const) {
|
||||
await db.relations.add({ id: tagId, type }, { id, type: "note" });
|
||||
await db[collection].remove(tagId);
|
||||
|
||||
expect(db[collection].all).toHaveLength(0);
|
||||
expect(db.relations.from({ id: tagId, type }, "note")).toHaveLength(0);
|
||||
expect(await db[collection].collection.count()).toBe(0);
|
||||
expect(await db.relations.from({ id: tagId, type }, "note").count()).toBe(
|
||||
0
|
||||
);
|
||||
}));
|
||||
|
||||
test(`invalid characters from ${type} title are removed`, () =>
|
||||
databaseTest().then(async (db) => {
|
||||
await db[collection].add(
|
||||
const tagId = await db[collection].add(
|
||||
item(" \n\n\n\t\t\thello l\n\n\n\t\t ")
|
||||
);
|
||||
expect(db[collection].all[0].title).toBe("hello l");
|
||||
expect((await db[collection][type](tagId)).title).toBe(
|
||||
"hello l"
|
||||
);
|
||||
}));
|
||||
|
||||
test(`remove a note from ${type}`, () =>
|
||||
@@ -85,6 +93,8 @@ for (const type of ["tag", "color"] as const) {
|
||||
await db.relations.add({ id: tagId, type }, { id, type: "note" });
|
||||
|
||||
await db.relations.unlink({ id: tagId, type }, { id, type: "note" });
|
||||
expect(db.relations.from({ id: tagId, type }, "note")).toHaveLength(0);
|
||||
expect(await db.relations.from({ id: tagId, type }, "note").count()).toBe(
|
||||
0
|
||||
);
|
||||
}));
|
||||
}
|
||||
|
||||
@@ -28,8 +28,8 @@ import {
|
||||
import { test, expect } from "vitest";
|
||||
|
||||
test("trash should be empty", () =>
|
||||
databaseTest().then((db) => {
|
||||
expect(db.trash.all).toHaveLength(0);
|
||||
databaseTest().then(async (db) => {
|
||||
expect(await db.trash.all()).toHaveLength(0);
|
||||
}));
|
||||
|
||||
test("permanently delete a note", () =>
|
||||
@@ -38,19 +38,19 @@ test("permanently delete a note", () =>
|
||||
...TEST_NOTE,
|
||||
sessionId: Date.now().toString()
|
||||
});
|
||||
const note = db.notes.note(noteId);
|
||||
const note = await db.notes.note(noteId);
|
||||
if (!note) throw new Error("Could not find note.");
|
||||
|
||||
let sessions = await db.noteHistory.get(noteId);
|
||||
expect(sessions).toHaveLength(1);
|
||||
|
||||
await db.notes.delete(noteId);
|
||||
expect(db.trash.all).toHaveLength(1);
|
||||
expect(await note.content()).toBeDefined();
|
||||
await db.trash.delete(db.trash.all[0].id);
|
||||
expect(db.trash.all).toHaveLength(0);
|
||||
const content = note.contentId && (await db.content.get(note.contentId));
|
||||
expect(content).toBeUndefined();
|
||||
await db.notes.moveToTrash(noteId);
|
||||
|
||||
expect(await db.trash.all()).toHaveLength(1);
|
||||
expect(await db.content.get(note.contentId)).toBeDefined();
|
||||
await db.trash.delete({ id: noteId, type: "note" });
|
||||
expect(await db.trash.all()).toHaveLength(0);
|
||||
expect(await db.content.get(note.contentId)).toBeUndefined();
|
||||
|
||||
sessions = await db.noteHistory.get(noteId);
|
||||
expect(sessions).toHaveLength(0);
|
||||
@@ -67,45 +67,45 @@ test("restore a deleted note that was in a notebook", () =>
|
||||
);
|
||||
await db.notes.addToNotebook(subNotebookId, id);
|
||||
|
||||
await db.notes.delete(id);
|
||||
await db.trash.restore(db.trash.all[0].id);
|
||||
expect(db.trash.all).toHaveLength(0);
|
||||
await db.notes.moveToTrash(id);
|
||||
await db.trash.restore({ type: "note", id });
|
||||
expect(await db.trash.all()).toHaveLength(0);
|
||||
|
||||
const note = db.notes.note(id);
|
||||
const note = await db.notes.note(id);
|
||||
const content = await db.content.get(note.contentId);
|
||||
|
||||
expect(note).toBeDefined();
|
||||
expect(await note?.content()).toBe(TEST_NOTE.content.data);
|
||||
expect(content.data).toBe(TEST_NOTE.content.data);
|
||||
|
||||
expect(
|
||||
db.relations.from({ type: "notebook", id: subNotebookId }, "note").has(id)
|
||||
await db.relations
|
||||
.from({ type: "notebook", id: subNotebookId }, "note")
|
||||
.has(id)
|
||||
).toBe(true);
|
||||
}));
|
||||
|
||||
test("delete a locked note", () =>
|
||||
noteTest().then(async ({ db, id }) => {
|
||||
const note = db.notes.note(id);
|
||||
const note = await db.notes.note(id);
|
||||
await db.vault.create("password");
|
||||
await db.vault.add(id);
|
||||
await db.notes.delete(id);
|
||||
expect(db.trash.all).toHaveLength(1);
|
||||
expect(
|
||||
note && note.contentId && (await db.content.get(note.contentId))
|
||||
).toBeDefined();
|
||||
await db.notes.moveToTrash(id);
|
||||
expect(await db.trash.all()).toHaveLength(1);
|
||||
expect(await db.content.get(note.contentId)).toBeDefined();
|
||||
}));
|
||||
|
||||
test("restore a deleted locked note", () =>
|
||||
noteTest().then(async ({ db, id }) => {
|
||||
let note = db.notes.note(id);
|
||||
let note = await db.notes.note(id);
|
||||
await db.vault.create("password");
|
||||
await db.vault.add(id);
|
||||
await db.notes.delete(id);
|
||||
expect(db.trash.all).toHaveLength(1);
|
||||
expect(
|
||||
note && note.contentId && (await db.content.get(note.contentId))
|
||||
).toBeDefined();
|
||||
await db.trash.restore(db.trash.all[0].id);
|
||||
expect(db.trash.all).toHaveLength(0);
|
||||
note = db.notes.note(id);
|
||||
await db.notes.moveToTrash(id);
|
||||
expect(await db.trash.all()).toHaveLength(1);
|
||||
expect(await db.content.get(note.contentId)).toBeDefined();
|
||||
await db.trash.restore({ type: "note", id });
|
||||
|
||||
note = await db.notes.note(id);
|
||||
expect(await db.trash.all()).toHaveLength(0);
|
||||
expect(note).toBeDefined();
|
||||
}));
|
||||
|
||||
@@ -114,13 +114,15 @@ test("restore a deleted note that's in a deleted notebook", () =>
|
||||
const notebookId = await db.notebooks.add(TEST_NOTEBOOK);
|
||||
await db.notes.addToNotebook(notebookId, id);
|
||||
|
||||
await db.notes.delete(id);
|
||||
await db.notebooks.delete(notebookId);
|
||||
await db.notes.moveToTrash(id);
|
||||
await db.notebooks.moveToTrash(notebookId);
|
||||
|
||||
await db.trash.restore(id);
|
||||
const note = db.notes.note(id);
|
||||
await db.trash.restore({ type: "note", id });
|
||||
const note = await db.notes.note(id);
|
||||
expect(note).toBeDefined();
|
||||
expect(db.relations.to({ type: "note", id }, "notebook")).toHaveLength(0);
|
||||
expect(
|
||||
await db.relations.to({ type: "note", id }, "notebook").count()
|
||||
).toBe(0);
|
||||
}));
|
||||
|
||||
test("delete a notebook", () =>
|
||||
@@ -129,11 +131,11 @@ test("delete a notebook", () =>
|
||||
|
||||
await db.notes.addToNotebook(id, noteId);
|
||||
|
||||
await db.notebooks.delete(id);
|
||||
expect(db.notebooks.notebook(id)).toBeUndefined();
|
||||
await db.notebooks.moveToTrash(id);
|
||||
expect(await db.notebooks.notebook(id)).toBeUndefined();
|
||||
expect(
|
||||
db.relations.to({ type: "note", id: noteId }, "notebook")
|
||||
).toHaveLength(0);
|
||||
await db.relations.to({ type: "note", id: noteId }, "notebook").count()
|
||||
).toBe(0);
|
||||
}));
|
||||
|
||||
test("restore a deleted notebook", () =>
|
||||
@@ -141,17 +143,17 @@ test("restore a deleted notebook", () =>
|
||||
const noteId = await db.notes.add(TEST_NOTE);
|
||||
await db.notes.addToNotebook(id, noteId);
|
||||
|
||||
await db.notebooks.delete(id);
|
||||
await db.trash.restore(id);
|
||||
await db.notebooks.moveToTrash(id);
|
||||
await db.trash.restore({ type: "notebook", id });
|
||||
|
||||
const notebook = db.notebooks.notebook(id);
|
||||
expect(notebook).toBeDefined();
|
||||
|
||||
expect(
|
||||
db.relations.to({ type: "note", id: noteId }, "notebook")
|
||||
).toHaveLength(1);
|
||||
await db.relations.to({ type: "note", id: noteId }, "notebook").count()
|
||||
).toBe(1);
|
||||
expect(
|
||||
db.relations.to({ type: "note", id: noteId }, "notebook").has(id)
|
||||
await db.relations.to({ type: "note", id: noteId }, "notebook").has(id)
|
||||
).toBe(true);
|
||||
}));
|
||||
|
||||
@@ -160,14 +162,14 @@ test("restore a notebook that has deleted notes", () =>
|
||||
const noteId = await db.notes.add(TEST_NOTE);
|
||||
await db.notes.addToNotebook(id, noteId);
|
||||
|
||||
await db.notebooks.delete(id);
|
||||
await db.notes.delete(noteId);
|
||||
await db.trash.restore(id);
|
||||
await db.notebooks.moveToTrash(id);
|
||||
await db.notes.moveToTrash(noteId);
|
||||
await db.trash.restore({ type: "notebook", id });
|
||||
|
||||
const notebook = db.notebooks.notebook(id);
|
||||
expect(notebook).toBeDefined();
|
||||
expect(
|
||||
db.relations.from({ type: "notebook", id: id }, "note").has(noteId)
|
||||
await db.relations.from({ type: "notebook", id: id }, "note").has(noteId)
|
||||
).toBe(false);
|
||||
}));
|
||||
|
||||
@@ -177,37 +179,28 @@ test("permanently delete items older than 7 days", () =>
|
||||
const noteId = await db.notes.add(TEST_NOTE);
|
||||
const notebookId = await db.notebooks.add(TEST_NOTEBOOK);
|
||||
|
||||
await db.notebooks.delete(notebookId);
|
||||
await db.notes.delete(noteId);
|
||||
await db.notebooks.moveToTrash(notebookId);
|
||||
await db.notes.moveToTrash(noteId);
|
||||
|
||||
const note = db.trash.all.find((t) => t.id === noteId);
|
||||
if (!note || note.itemType !== "note")
|
||||
throw new Error("Could not find note in trash.");
|
||||
|
||||
await db.notes.collection.update({
|
||||
...note,
|
||||
await db.notes.collection.update([noteId], {
|
||||
type: "trash",
|
||||
itemType: "note",
|
||||
id: noteId,
|
||||
dateDeleted: sevenDaysEarlier
|
||||
});
|
||||
|
||||
const notebook = db.trash.all.find((t) => t.id === notebookId);
|
||||
if (!notebook || notebook.itemType !== "notebook")
|
||||
throw new Error("Could not find notebook in trash.");
|
||||
|
||||
await db.notebooks.collection.update({
|
||||
...notebook,
|
||||
await db.notebooks.collection.update([notebookId], {
|
||||
type: "trash",
|
||||
id: notebookId,
|
||||
dateDeleted: sevenDaysEarlier,
|
||||
itemType: "notebook"
|
||||
});
|
||||
|
||||
expect(db.trash.all).toHaveLength(2);
|
||||
expect(await db.trash.all()).toHaveLength(2);
|
||||
|
||||
await db.trash.cleanup();
|
||||
|
||||
expect(db.trash.all).toHaveLength(0);
|
||||
expect(await db.trash.all()).toHaveLength(0);
|
||||
}));
|
||||
|
||||
test("trash cleanup should not delete items newer than 7 days", () =>
|
||||
@@ -215,14 +208,14 @@ test("trash cleanup should not delete items newer than 7 days", () =>
|
||||
const noteId = await db.notes.add(TEST_NOTE);
|
||||
const notebookId = await db.notebooks.add(TEST_NOTEBOOK);
|
||||
|
||||
await db.notebooks.delete(notebookId);
|
||||
await db.notes.delete(noteId);
|
||||
await db.notebooks.moveToTrash(notebookId);
|
||||
await db.notes.moveToTrash(noteId);
|
||||
|
||||
expect(db.trash.all).toHaveLength(2);
|
||||
expect(await db.trash.all()).toHaveLength(2);
|
||||
|
||||
await db.trash.cleanup();
|
||||
|
||||
expect(db.trash.all).toHaveLength(2);
|
||||
expect(await db.trash.all()).toHaveLength(2);
|
||||
}));
|
||||
|
||||
test("clear trash should delete note content", () =>
|
||||
@@ -237,16 +230,16 @@ test("clear trash should delete note content", () =>
|
||||
let sessions = await db.noteHistory.get(noteId);
|
||||
expect(sessions).toHaveLength(1);
|
||||
|
||||
const note = { ...db.notes.note(noteId)?.data };
|
||||
const note = { ...(await db.notes.note(noteId)) };
|
||||
|
||||
await db.notebooks.delete(notebookId);
|
||||
await db.notes.delete(noteId);
|
||||
await db.notebooks.moveToTrash(notebookId);
|
||||
await db.notes.moveToTrash(noteId);
|
||||
|
||||
expect(db.trash.all).toHaveLength(2);
|
||||
expect(await db.trash.all()).toHaveLength(2);
|
||||
|
||||
await db.trash.clear();
|
||||
|
||||
expect(db.trash.all).toHaveLength(0);
|
||||
expect(await db.trash.all()).toHaveLength(0);
|
||||
|
||||
const content = note.contentId && (await db.content.get(note.contentId));
|
||||
expect(content).toBeUndefined();
|
||||
|
||||
@@ -28,6 +28,8 @@ import { EventSourcePolyfill as EventSource } from "event-source-polyfill";
|
||||
import { randomBytes } from "../../src/utils/random";
|
||||
import { GroupOptions, Note, Notebook } from "../../src/types";
|
||||
import { NoteContent } from "../../src/collections/session-content";
|
||||
import { SqliteDriver } from "kysely";
|
||||
import BetterSQLite3 from "better-sqlite3";
|
||||
|
||||
const TEST_NOTEBOOK: Partial<Notebook> = {
|
||||
title: "Test Notebook",
|
||||
@@ -45,7 +47,8 @@ function databaseTest() {
|
||||
storage: new NodeStorageInterface(),
|
||||
eventsource: EventSource,
|
||||
fs: FS,
|
||||
compressor: Compressor
|
||||
compressor: Compressor,
|
||||
sqlite: new SqliteDriver({ database: BetterSQLite3(":memory:") })
|
||||
});
|
||||
return db.init().then(() => db);
|
||||
}
|
||||
|
||||
@@ -58,11 +58,11 @@ test("lock a note", () =>
|
||||
noteTest().then(async ({ db, id }) => {
|
||||
await db.vault.create("password");
|
||||
await db.vault.add(id);
|
||||
const note = db.notes.note(id);
|
||||
const note = await db.notes.note(id);
|
||||
|
||||
expect(note.headline).toBe("");
|
||||
|
||||
const content = await db.content.raw(note.data.contentId, false);
|
||||
const content = await db.content.get(note.contentId);
|
||||
expect(content.noteId).toBeDefined();
|
||||
expect(content.data.iv).toBeDefined();
|
||||
expect(content.data.cipher).toBeDefined();
|
||||
@@ -72,9 +72,9 @@ test("locked note is not favorited", () =>
|
||||
noteTest().then(async ({ db, id }) => {
|
||||
await db.vault.create("password");
|
||||
await db.vault.add(id);
|
||||
const note = db.notes.note(id);
|
||||
const note = await db.notes.note(id);
|
||||
|
||||
expect(note.data.favorite).toBeFalsy();
|
||||
expect(note.favorite).toBeFalsy();
|
||||
}));
|
||||
|
||||
test("unlock a note", () =>
|
||||
@@ -92,12 +92,12 @@ test("unlock a note permanently", () =>
|
||||
await db.vault.create("password");
|
||||
await db.vault.add(id);
|
||||
await db.vault.remove(id, "password");
|
||||
const note = db.notes.note(id);
|
||||
const note = await db.notes.note(id);
|
||||
expect(note.id).toBe(id);
|
||||
expect(note.headline).not.toBe("");
|
||||
const content = await db.content.raw(note.data.contentId);
|
||||
const content = await db.content.get(note.contentId);
|
||||
expect(content.data).toBeDefined();
|
||||
expect(typeof content.data).toBe("string");
|
||||
expect(typeof content.data).toBe("object");
|
||||
}));
|
||||
|
||||
test("save a locked note", () =>
|
||||
@@ -105,13 +105,12 @@ test("save a locked note", () =>
|
||||
await db.vault.create("password");
|
||||
await db.vault.add(id);
|
||||
|
||||
const note = db.notes.note(id).data;
|
||||
const note = await db.notes.note(id);
|
||||
await db.vault.save(note);
|
||||
|
||||
const content = await db.content.raw(note.contentId);
|
||||
const content = await db.content.get(note.contentId);
|
||||
|
||||
expect(content.data.cipher).toBeTypeOf("string");
|
||||
expect(() => JSON.parse()).toThrow();
|
||||
}));
|
||||
|
||||
test("save an edited locked note", () =>
|
||||
@@ -119,13 +118,13 @@ test("save an edited locked note", () =>
|
||||
await db.vault.create("password");
|
||||
await db.vault.add(id);
|
||||
|
||||
const note = db.notes.note(id).data;
|
||||
const note = await db.notes.note(id);
|
||||
await db.vault.save({
|
||||
...note,
|
||||
content: { type: "tiptap", data: "<p>hello world</p>" }
|
||||
});
|
||||
|
||||
const content = await db.content.raw(note.contentId);
|
||||
const content = await db.content.get(note.contentId);
|
||||
|
||||
expect(content.data.cipher).toBeTypeOf("string");
|
||||
expect(() => JSON.parse(content.data.cipher)).toThrow();
|
||||
|
||||
6
packages/core/package-lock.json
generated
6
packages/core/package-lock.json
generated
@@ -10,6 +10,7 @@
|
||||
"hasInstallScript": true,
|
||||
"license": "GPL-3.0-or-later",
|
||||
"dependencies": {
|
||||
"@leeoniya/ufuzzy": "^1.0.10",
|
||||
"@microsoft/signalr": "^8.0.0",
|
||||
"@notesnook/logger": "file:../logger",
|
||||
"@readme/data-urls": "^3.0.0",
|
||||
@@ -1884,6 +1885,11 @@
|
||||
"@jridgewell/sourcemap-codec": "^1.4.14"
|
||||
}
|
||||
},
|
||||
"node_modules/@leeoniya/ufuzzy": {
|
||||
"version": "1.0.14",
|
||||
"resolved": "https://registry.npmjs.org/@leeoniya/ufuzzy/-/ufuzzy-1.0.14.tgz",
|
||||
"integrity": "sha512-/xF4baYuCQMo+L/fMSUrZnibcu0BquEGnbxfVPiZhs/NbJeKj4c/UmFpQzW9Us0w45ui/yYW3vyaqawhNYsTzA=="
|
||||
},
|
||||
"node_modules/@microsoft/signalr": {
|
||||
"version": "8.0.0",
|
||||
"license": "MIT",
|
||||
|
||||
@@ -47,6 +47,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@microsoft/signalr": "^8.0.0",
|
||||
"@leeoniya/ufuzzy": "^1.0.10",
|
||||
"@notesnook/logger": "file:../logger",
|
||||
"@readme/data-urls": "^3.0.0",
|
||||
"@streetwriters/showdown": "^3.0.5-alpha",
|
||||
|
||||
@@ -22,9 +22,9 @@ import { Crypto, CryptoAccessor } from "../database/crypto";
|
||||
import { FileStorage, FileStorageAccessor } from "../database/fs";
|
||||
import { Notebooks } from "../collections/notebooks";
|
||||
import Trash from "../collections/trash";
|
||||
import { Tags } from "../collections/tags";
|
||||
import { Colors } from "../collections/colors";
|
||||
import Sync, { SyncOptions } from "./sync";
|
||||
import { Tags } from "../collections/tags";
|
||||
import { Colors, } from "../collections/colors";
|
||||
import Vault from "./vault";
|
||||
import Lookup from "./lookup";
|
||||
import { Content } from "../collections/content";
|
||||
@@ -62,13 +62,14 @@ import { Attachment } from "../types";
|
||||
import { Settings } from "../collections/settings";
|
||||
import { DatabaseAccessor, DatabaseSchema, createDatabase } from "../database";
|
||||
import { Kysely, SqliteDriver, Transaction } from "kysely";
|
||||
import BetterSQLite3 from "better-sqlite3";
|
||||
import { CachedCollection } from "../database/cached-collection";
|
||||
|
||||
type EventSourceConstructor = new (
|
||||
uri: string,
|
||||
init: EventSourceInit & { headers?: Record<string, string> }
|
||||
) => EventSource;
|
||||
type Options = {
|
||||
sqlite: SqliteDriver;
|
||||
storage: IStorage;
|
||||
eventsource?: EventSourceConstructor;
|
||||
fs: IFileStorage;
|
||||
@@ -126,18 +127,20 @@ class Database {
|
||||
};
|
||||
|
||||
private _transaction?: Transaction<DatabaseSchema>;
|
||||
private transactionMutex = new Mutex();
|
||||
transaction = (
|
||||
executor: (tr: Transaction<DatabaseSchema>) => void | Promise<void>
|
||||
) => {
|
||||
if (this._transaction) return executor(this._transaction);
|
||||
return this.sql()
|
||||
.transaction()
|
||||
.execute(async (tr) => {
|
||||
this._transaction = tr;
|
||||
await executor(tr);
|
||||
this._transaction = undefined;
|
||||
})
|
||||
.finally(() => (this._transaction = undefined));
|
||||
return this.transactionMutex.runExclusive(() =>
|
||||
this.sql()
|
||||
.transaction()
|
||||
.execute(async (tr) => {
|
||||
this._transaction = tr;
|
||||
await executor(tr);
|
||||
this._transaction = undefined;
|
||||
})
|
||||
.finally(() => (this._transaction = undefined))
|
||||
);
|
||||
};
|
||||
|
||||
private options?: Options;
|
||||
@@ -174,6 +177,23 @@ class Database {
|
||||
relations = new Relations(this);
|
||||
notes = new Notes(this);
|
||||
|
||||
/**
|
||||
* @deprecated only kept here for migration purposes
|
||||
*/
|
||||
legacyTags = new CachedCollection(this.storage, "tags", this.eventManager);
|
||||
/**
|
||||
* @deprecated only kept here for migration purposes
|
||||
*/
|
||||
legacyColors = new CachedCollection(
|
||||
this.storage,
|
||||
"colors",
|
||||
this.eventManager
|
||||
);
|
||||
/**
|
||||
* @deprecated only kept here for migration purposes
|
||||
*/
|
||||
legacyNotes = new CachedCollection(this.storage, "notes", this.eventManager);
|
||||
|
||||
// constructor() {
|
||||
// this.sseMutex = new Mutex();
|
||||
// // this.lastHeartbeat = undefined; // { local: 0, server: 0 };
|
||||
@@ -209,9 +229,7 @@ class Database {
|
||||
this.disconnectSSE();
|
||||
});
|
||||
|
||||
this._sql = await createDatabase(
|
||||
new SqliteDriver({ database: BetterSQLite3("nn.db") })
|
||||
);
|
||||
if (this.options) this._sql = await createDatabase(this.options.sqlite);
|
||||
|
||||
await this._validate();
|
||||
|
||||
@@ -242,6 +260,11 @@ class Database {
|
||||
|
||||
await this.trash.init();
|
||||
|
||||
// legacy collections
|
||||
await this.legacyTags.init();
|
||||
await this.legacyColors.init();
|
||||
await this.legacyNotes.init();
|
||||
|
||||
// we must not wait on network requests that's why
|
||||
// no await
|
||||
this.monographs.refresh();
|
||||
|
||||
@@ -17,7 +17,7 @@ You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import { filter, parse } from "liqe";
|
||||
import uFuzzy from "@leeoniya/ufuzzy";
|
||||
import Database from ".";
|
||||
import {
|
||||
Attachment,
|
||||
@@ -26,31 +26,41 @@ import {
|
||||
Notebook,
|
||||
Reminder,
|
||||
Tag,
|
||||
TrashItem,
|
||||
isDeleted
|
||||
TrashItem
|
||||
} from "../types";
|
||||
import { isUnencryptedContent } from "../collections/content";
|
||||
import { isFalse } from "../database";
|
||||
import { sql } from "kysely";
|
||||
|
||||
export default class Lookup {
|
||||
constructor(private readonly db: Database) {}
|
||||
|
||||
async notes(notes: Note[], query: string) {
|
||||
const contents = await this.db.content.multi(
|
||||
notes.map((note) => note.contentId || "")
|
||||
);
|
||||
|
||||
return search(notes, query, (note) => {
|
||||
let text = note.title;
|
||||
const noteContent = note.contentId ? contents[note.contentId] : "";
|
||||
if (
|
||||
!note.locked &&
|
||||
noteContent &&
|
||||
!isDeleted(noteContent) &&
|
||||
isUnencryptedContent(noteContent)
|
||||
async notes(
|
||||
query: string,
|
||||
ids?: string[]
|
||||
): Promise<Note & { rank: number }[]> {
|
||||
return (await this.db
|
||||
.sql()
|
||||
.with("matching", (eb) =>
|
||||
eb
|
||||
.selectFrom("content_fts")
|
||||
.where("data", "match", query)
|
||||
.select(["noteId as id", "rank"])
|
||||
.unionAll(
|
||||
eb
|
||||
.selectFrom("notes_fts")
|
||||
.where("title", "match", query)
|
||||
// add 10 weight to title
|
||||
.select(["id", sql.raw<number>(`rank * 10`).as("rank")])
|
||||
)
|
||||
)
|
||||
text += noteContent.data;
|
||||
return text;
|
||||
});
|
||||
.selectFrom("notes")
|
||||
.$if(!!ids && ids.length > 0, (eb) => eb.where("id", "in", ids!))
|
||||
.where(isFalse("notes.deleted"))
|
||||
.where(isFalse("notes.dateDeleted"))
|
||||
.innerJoin("matching", (eb) => eb.onRef("notes.id", "==", "matching.id"))
|
||||
.orderBy("matching.rank")
|
||||
.selectAll()
|
||||
.execute()) as unknown as Note & { rank: number }[];
|
||||
}
|
||||
|
||||
notebooks(array: Notebook[], query: string) {
|
||||
@@ -70,11 +80,7 @@ export default class Lookup {
|
||||
}
|
||||
|
||||
attachments(array: Attachment[], query: string) {
|
||||
return search(
|
||||
array,
|
||||
query,
|
||||
(n) => `${n.metadata.filename} ${n.metadata.type} ${n.metadata.hash}`
|
||||
);
|
||||
return search(array, query, (n) => `${n.filename} ${n.mimeType} ${n.hash}`);
|
||||
}
|
||||
|
||||
private byTitle<T extends { title: string }>(array: T[], query: string) {
|
||||
@@ -82,14 +88,16 @@ export default class Lookup {
|
||||
}
|
||||
}
|
||||
|
||||
const uf = new uFuzzy();
|
||||
function search<T>(items: T[], query: string, selector: (item: T) => string) {
|
||||
try {
|
||||
return filter(
|
||||
parse(`text:"${query.toLowerCase()}"`),
|
||||
items.map((item) => {
|
||||
return { item, text: selector(item).toLowerCase() };
|
||||
})
|
||||
).map((v) => v.item);
|
||||
const [_idxs, _info, order] = uf.search(items.map(selector), query, true);
|
||||
if (!order) return [];
|
||||
const filtered: T[] = [];
|
||||
for (const i of order) {
|
||||
filtered.push(items[i]);
|
||||
}
|
||||
return filtered;
|
||||
} catch (e) {
|
||||
return [];
|
||||
}
|
||||
|
||||
@@ -26,7 +26,7 @@ import {
|
||||
import Collector from "../collector";
|
||||
import { test, expect } from "vitest";
|
||||
|
||||
test.only("newly created note should get included in collector", () =>
|
||||
test("newly created note should get included in collector", () =>
|
||||
databaseTest().then(async (db) => {
|
||||
await loginFakeUser(db);
|
||||
const collector = new Collector(db);
|
||||
@@ -42,7 +42,7 @@ test.only("newly created note should get included in collector", () =>
|
||||
|
||||
expect(items).toHaveLength(2);
|
||||
expect(items[0].type).toBe("content");
|
||||
expect(items[0].items[0].id).toBe(db.notes.note(noteId).data.contentId);
|
||||
expect(items[0].items[0].id).toBe((await db.notes.note(noteId)).contentId);
|
||||
expect(items[1].items[0].id).toBe(noteId);
|
||||
expect(items[1].type).toBe("note");
|
||||
}));
|
||||
|
||||
@@ -21,12 +21,14 @@ import { Cipher, SerializedKey } from "@notesnook/crypto";
|
||||
import Database from "..";
|
||||
import { CURRENT_DATABASE_VERSION, EV, EVENTS } from "../../common";
|
||||
import { logger } from "../../logger";
|
||||
import { SYNC_COLLECTIONS_MAP, SyncItem, SyncTransferItem } from "./types";
|
||||
import {
|
||||
SyncItem,
|
||||
SyncTransferItem,
|
||||
SYNC_COLLECTIONS_MAP,
|
||||
SYNC_ITEM_TYPES
|
||||
} from "./types";
|
||||
import { Item, MaybeDeletedItem } from "../../types";
|
||||
|
||||
const ASYNC_COLLECTIONS_MAP = {
|
||||
content: "content"
|
||||
} as const;
|
||||
class Collector {
|
||||
logger = logger.scope("SyncCollector");
|
||||
constructor(private readonly db: Database) {}
|
||||
@@ -42,58 +44,22 @@ class Collector {
|
||||
throw new Error("User encryption key not generated. Please relogin.");
|
||||
}
|
||||
|
||||
const attachments = await this.prepareChunk(
|
||||
this.db.attachments.syncable,
|
||||
lastSyncedTimestamp,
|
||||
isForceSync,
|
||||
key
|
||||
);
|
||||
if (attachments) yield { items: attachments, type: "attachment" };
|
||||
|
||||
for (const itemType in ASYNC_COLLECTIONS_MAP) {
|
||||
const collectionKey =
|
||||
ASYNC_COLLECTIONS_MAP[itemType as keyof typeof ASYNC_COLLECTIONS_MAP];
|
||||
for (const itemType of SYNC_ITEM_TYPES) {
|
||||
const collectionKey = SYNC_COLLECTIONS_MAP[itemType];
|
||||
const collection = this.db[collectionKey].collection;
|
||||
for await (const chunk of collection.iterate(chunkSize)) {
|
||||
const items = await this.prepareChunk(
|
||||
chunk.map((item) => item[1]),
|
||||
lastSyncedTimestamp,
|
||||
isForceSync,
|
||||
key
|
||||
);
|
||||
for await (const chunk of collection.unsynced(
|
||||
isForceSync ? 0 : lastSyncedTimestamp,
|
||||
chunkSize
|
||||
)) {
|
||||
const items = await this.prepareChunk(chunk, key);
|
||||
if (!items) continue;
|
||||
yield { items, type: itemType as keyof typeof ASYNC_COLLECTIONS_MAP };
|
||||
yield { items, type: itemType };
|
||||
}
|
||||
}
|
||||
|
||||
// for (const itemType in SYNC_COLLECTIONS_MAP) {
|
||||
// const collectionKey =
|
||||
// SYNC_COLLECTIONS_MAP[itemType as keyof typeof SYNC_COLLECTIONS_MAP];
|
||||
// const collection = this.db[collectionKey].collection;
|
||||
// for (const chunk of collection.iterateSync(chunkSize)) {
|
||||
// const items = await this.prepareChunk(
|
||||
// chunk,
|
||||
// lastSyncedTimestamp,
|
||||
// isForceSync,
|
||||
// key
|
||||
// );
|
||||
// if (!items) continue;
|
||||
// yield { items, type: itemType as keyof typeof SYNC_COLLECTIONS_MAP };
|
||||
// }
|
||||
// }
|
||||
}
|
||||
|
||||
async prepareChunk(
|
||||
chunk: MaybeDeletedItem<Item>[],
|
||||
lastSyncedTimestamp: number,
|
||||
isForceSync: boolean,
|
||||
key: SerializedKey
|
||||
) {
|
||||
const { ids, items } = filterSyncableItems(
|
||||
chunk,
|
||||
lastSyncedTimestamp,
|
||||
isForceSync
|
||||
);
|
||||
async prepareChunk(chunk: MaybeDeletedItem<Item>[], key: SerializedKey) {
|
||||
const { ids, items } = filterSyncableItems(chunk);
|
||||
if (!ids.length) return;
|
||||
const ciphers = await this.db.storage().encryptMulti(key, items);
|
||||
return toPushItem(ids, ciphers);
|
||||
@@ -114,39 +80,22 @@ function toPushItem(ids: string[], ciphers: Cipher<"base64">[]) {
|
||||
return items;
|
||||
}
|
||||
|
||||
function filterSyncableItems(
|
||||
items: MaybeDeletedItem<Item>[],
|
||||
lastSyncedTimestamp: number,
|
||||
isForceSync = false
|
||||
): { items: string[]; ids: string[] } {
|
||||
function filterSyncableItems(items: MaybeDeletedItem<Item>[]): {
|
||||
items: string[];
|
||||
ids: string[];
|
||||
} {
|
||||
if (!items || !items.length) return { items: [], ids: [] };
|
||||
|
||||
const ids = [];
|
||||
const syncableItems = [];
|
||||
for (const item of items) {
|
||||
if (!item) continue;
|
||||
// const isSyncable = !item.synced || isForceSync;
|
||||
|
||||
const isSyncable = !item.synced || isForceSync;
|
||||
const isUnsynced = item.dateModified > lastSyncedTimestamp || isForceSync;
|
||||
|
||||
// synced is a local only property
|
||||
// synced is a local only property. we don't want to sync it.
|
||||
delete item.synced;
|
||||
|
||||
if (isUnsynced && isSyncable) {
|
||||
ids.push(item.id);
|
||||
syncableItems.push(
|
||||
JSON.stringify(
|
||||
"localOnly" in item && item.localOnly
|
||||
? {
|
||||
id: item.id,
|
||||
deleted: true,
|
||||
dateModified: item.dateModified,
|
||||
deleteReason: "localOnly"
|
||||
}
|
||||
: item
|
||||
)
|
||||
);
|
||||
}
|
||||
ids.push(item.id);
|
||||
syncableItems.push(JSON.stringify(item));
|
||||
}
|
||||
return { items: syncableItems, ids };
|
||||
}
|
||||
|
||||
@@ -37,18 +37,9 @@ import { Mutex } from "async-mutex";
|
||||
import Database from "..";
|
||||
import { migrateItem } from "../../migrations";
|
||||
import { SerializedKey } from "@notesnook/crypto";
|
||||
import {
|
||||
ItemMap,
|
||||
MaybeDeletedItem,
|
||||
Note,
|
||||
Notebook,
|
||||
TrashOrItem
|
||||
} from "../../types";
|
||||
import {
|
||||
MERGE_COLLECTIONS_MAP,
|
||||
SyncableItemType,
|
||||
SyncTransferItem
|
||||
} from "./types";
|
||||
import { Item, MaybeDeletedItem } from "../../types";
|
||||
import { SYNC_COLLECTIONS_MAP, SyncTransferItem } from "./types";
|
||||
import { DownloadableFile } from "../../database/fs";
|
||||
|
||||
export type SyncOptions = {
|
||||
type: "full" | "fetch" | "send";
|
||||
@@ -347,14 +338,13 @@ class Sync {
|
||||
* @private
|
||||
*/
|
||||
async uploadAttachments() {
|
||||
const attachments = this.db.attachments.pending;
|
||||
const attachments = await this.db.attachments.pending.items();
|
||||
this.logger.info("Uploading attachments...", { total: attachments.length });
|
||||
|
||||
await this.db.fs().queueUploads(
|
||||
attachments.map((a) => ({
|
||||
filename: a.metadata.hash,
|
||||
chunkSize: a.chunkSize,
|
||||
metadata: a.metadata
|
||||
attachments.map<DownloadableFile>((a) => ({
|
||||
filename: a.hash,
|
||||
chunkSize: a.chunkSize
|
||||
})),
|
||||
"sync-uploads"
|
||||
);
|
||||
@@ -389,36 +379,29 @@ class Sync {
|
||||
)
|
||||
);
|
||||
|
||||
let items: (
|
||||
| MaybeDeletedItem<
|
||||
ItemMap[SyncableItemType] | TrashOrItem<Note> | TrashOrItem<Notebook>
|
||||
>
|
||||
| undefined
|
||||
)[] = [];
|
||||
const collectionType = SYNC_COLLECTIONS_MAP[itemType];
|
||||
const collection = this.db[collectionType].collection;
|
||||
const localItems = await collection.items(chunk.items.map((i) => i.id));
|
||||
let items: (MaybeDeletedItem<Item> | undefined)[] = [];
|
||||
if (itemType === "content") {
|
||||
const localItems = await this.db.content.multi(
|
||||
chunk.items.map((i) => i.id)
|
||||
);
|
||||
items = await Promise.all(
|
||||
deserialized.map((item) =>
|
||||
this.merger.mergeContent(item, localItems[item.id], dbLastSynced)
|
||||
)
|
||||
);
|
||||
} else {
|
||||
items = this.merger.isSyncCollection(itemType)
|
||||
? deserialized.map((item) =>
|
||||
this.merger.mergeItemSync(item, itemType, dbLastSynced)
|
||||
)
|
||||
: await Promise.all(
|
||||
deserialized.map((item) =>
|
||||
this.merger.mergeItem(item, itemType, dbLastSynced)
|
||||
items =
|
||||
itemType === "attachment"
|
||||
? await Promise.all(
|
||||
deserialized.map((item) =>
|
||||
this.merger.mergeItemAsync(item, localItems[item.id], itemType)
|
||||
)
|
||||
)
|
||||
);
|
||||
: deserialized.map((item) =>
|
||||
this.merger.mergeItemSync(item, localItems[item.id], itemType)
|
||||
);
|
||||
}
|
||||
|
||||
const collectionType = MERGE_COLLECTIONS_MAP[itemType];
|
||||
await this.db[collectionType].collection.setItems(items as any);
|
||||
|
||||
if (
|
||||
notify &&
|
||||
(itemType === "note" || itemType === "content") &&
|
||||
@@ -428,6 +411,8 @@ class Sync {
|
||||
this.db.eventManager.publish(EVENTS.syncItemMerged, item)
|
||||
);
|
||||
}
|
||||
|
||||
await collection.put(items as any);
|
||||
}
|
||||
|
||||
private async pushItem(item: SyncTransferItem, newLastSynced: number) {
|
||||
|
||||
@@ -20,26 +20,15 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
import { logger } from "../../logger";
|
||||
import { isHTMLEqual } from "../../utils/html-diff";
|
||||
import Database from "..";
|
||||
import { SYNC_COLLECTIONS_MAP } from "./types";
|
||||
import {
|
||||
Attachment,
|
||||
ContentItem,
|
||||
Item,
|
||||
ItemMap,
|
||||
MaybeDeletedItem,
|
||||
Note,
|
||||
Notebook,
|
||||
TrashOrItem,
|
||||
isDeleted
|
||||
} from "../../types";
|
||||
import { ContentItem, Item, MaybeDeletedItem, isDeleted } from "../../types";
|
||||
|
||||
class Merger {
|
||||
logger = logger.scope("Merger");
|
||||
constructor(private readonly db: Database) {}
|
||||
|
||||
isSyncCollection(type: string): type is keyof typeof SYNC_COLLECTIONS_MAP {
|
||||
return type in SYNC_COLLECTIONS_MAP;
|
||||
}
|
||||
// isSyncCollection(type: string): type is keyof typeof SYNC_COLLECTIONS_MAP {
|
||||
// return type in SYNC_COLLECTIONS_MAP;
|
||||
// }
|
||||
|
||||
isConflicted(
|
||||
localItem: MaybeDeletedItem<Item>,
|
||||
@@ -79,12 +68,18 @@ class Merger {
|
||||
}
|
||||
}
|
||||
|
||||
mergeItemSync<TType extends keyof typeof SYNC_COLLECTIONS_MAP>(
|
||||
remoteItem: MaybeDeletedItem<
|
||||
ItemMap[TType] | TrashOrItem<Note> | TrashOrItem<Notebook>
|
||||
>,
|
||||
type: TType,
|
||||
_lastSynced: number
|
||||
mergeItemSync(
|
||||
remoteItem: MaybeDeletedItem<Item>,
|
||||
localItem: MaybeDeletedItem<Item> | undefined,
|
||||
type:
|
||||
| "shortcut"
|
||||
| "reminder"
|
||||
| "tag"
|
||||
| "color"
|
||||
| "note"
|
||||
| "relation"
|
||||
| "notebook"
|
||||
| "settingitem"
|
||||
) {
|
||||
switch (type) {
|
||||
case "shortcut":
|
||||
@@ -95,9 +90,6 @@ class Merger {
|
||||
case "relation":
|
||||
case "notebook":
|
||||
case "settingitem": {
|
||||
const localItem = this.db[SYNC_COLLECTIONS_MAP[type]].collection.getRaw(
|
||||
remoteItem.id
|
||||
);
|
||||
if (!localItem || remoteItem.dateModified > localItem.dateModified) {
|
||||
return remoteItem;
|
||||
}
|
||||
@@ -107,8 +99,8 @@ class Merger {
|
||||
}
|
||||
|
||||
async mergeContent(
|
||||
remoteItem: MaybeDeletedItem<ContentItem>,
|
||||
localItem: MaybeDeletedItem<ContentItem>,
|
||||
remoteItem: MaybeDeletedItem<Item>,
|
||||
localItem: MaybeDeletedItem<Item> | undefined,
|
||||
lastSynced: number
|
||||
) {
|
||||
if (localItem && "localOnly" in localItem && localItem.localOnly) return;
|
||||
@@ -120,63 +112,55 @@ class Merger {
|
||||
if (!localItem || conflicted === "merge") {
|
||||
return remoteItem;
|
||||
} else if (conflicted === "conflict") {
|
||||
if (isDeleted(localItem) || isDeleted(remoteItem)) {
|
||||
if (remoteItem.dateModified > localItem.dateModified) return remoteItem;
|
||||
return;
|
||||
}
|
||||
|
||||
const note = this.db.notes.collection.get(localItem.noteId);
|
||||
if (!note) return;
|
||||
|
||||
// if hashes are equal do nothing
|
||||
if (
|
||||
!note.locked &&
|
||||
(!remoteItem ||
|
||||
!remoteItem ||
|
||||
!localItem.data ||
|
||||
!remoteItem.data ||
|
||||
isHTMLEqual(localItem.data, remoteItem.data))
|
||||
)
|
||||
return;
|
||||
|
||||
if (note.locked) {
|
||||
// if note is locked or content is deleted we keep the most recent version.
|
||||
isDeleted(localItem) ||
|
||||
isDeleted(remoteItem) ||
|
||||
remoteItem.type !== "tiptap" ||
|
||||
localItem.type !== "tiptap" ||
|
||||
localItem.locked ||
|
||||
remoteItem.locked ||
|
||||
!localItem.data ||
|
||||
!remoteItem.data ||
|
||||
isHTMLEqual(localItem.data, remoteItem.data)
|
||||
) {
|
||||
if (remoteItem.dateModified > localItem.dateModified) return remoteItem;
|
||||
} else {
|
||||
// otherwise we trigger the conflicts
|
||||
await this.db.notes.add({
|
||||
id: localItem.noteId,
|
||||
conflicted: true
|
||||
});
|
||||
await this.db.storage().write("hasConflicts", true);
|
||||
return {
|
||||
...localItem,
|
||||
conflicted: remoteItem
|
||||
};
|
||||
return;
|
||||
}
|
||||
|
||||
// otherwise we trigger the conflicts
|
||||
await this.db.notes.add({
|
||||
id: localItem.noteId,
|
||||
conflicted: true
|
||||
});
|
||||
return {
|
||||
...localItem,
|
||||
conflicted: remoteItem
|
||||
} as ContentItem;
|
||||
}
|
||||
}
|
||||
|
||||
async mergeItem(
|
||||
remoteItem: MaybeDeletedItem<Attachment>,
|
||||
type: "settings" | "attachment",
|
||||
_lastSynced: number
|
||||
async mergeItemAsync(
|
||||
remoteItem: MaybeDeletedItem<Item>,
|
||||
localItem: MaybeDeletedItem<Item> | undefined,
|
||||
type: "attachment"
|
||||
) {
|
||||
switch (type) {
|
||||
case "attachment": {
|
||||
if (isDeleted(remoteItem)) return remoteItem;
|
||||
|
||||
if (remoteItem.type !== "attachment") return;
|
||||
|
||||
const localAttachment = this.db.attachments.attachment(
|
||||
remoteItem.metadata.hash
|
||||
);
|
||||
if (!localItem) return remoteItem;
|
||||
if (
|
||||
localAttachment &&
|
||||
localAttachment.dateUploaded !== remoteItem.dateUploaded
|
||||
isDeleted(localItem) ||
|
||||
isDeleted(remoteItem) ||
|
||||
remoteItem.type !== "attachment" ||
|
||||
localItem.type !== "attachment"
|
||||
) {
|
||||
if (remoteItem.dateModified > localItem.dateModified)
|
||||
return remoteItem;
|
||||
return;
|
||||
}
|
||||
|
||||
if (localItem.dateUploaded !== remoteItem.dateUploaded) {
|
||||
const isRemoved = await this.db.attachments.remove(
|
||||
localAttachment.metadata.hash,
|
||||
localItem.hash,
|
||||
true
|
||||
);
|
||||
if (!isRemoved)
|
||||
|
||||
@@ -37,22 +37,46 @@ export type SyncItem = {
|
||||
v: number;
|
||||
} & Cipher<"base64">;
|
||||
|
||||
// export const SYNC_COLLECTIONS_MAP = {
|
||||
// note: "notes",
|
||||
// notebook: "notebooks",
|
||||
// shortcut: "shortcuts",
|
||||
// reminder: "reminders",
|
||||
// relation: "relations",
|
||||
// tag: "tags",
|
||||
// color: "colors",
|
||||
// settingitem: "settings"
|
||||
// } as const;
|
||||
|
||||
// export const ASYNC_COLLECTIONS_MAP = {
|
||||
// content: "content"
|
||||
// } as const;
|
||||
|
||||
export const SYNC_COLLECTIONS_MAP = {
|
||||
note: "notes",
|
||||
settingitem: "settings",
|
||||
attachment: "attachments",
|
||||
content: "content",
|
||||
notebook: "notebooks",
|
||||
shortcut: "shortcuts",
|
||||
reminder: "reminders",
|
||||
relation: "relations",
|
||||
tag: "tags",
|
||||
color: "colors",
|
||||
settingitem: "settings"
|
||||
note: "notes"
|
||||
} as const;
|
||||
|
||||
export const MERGE_COLLECTIONS_MAP = {
|
||||
...SYNC_COLLECTIONS_MAP,
|
||||
attachment: "attachments",
|
||||
content: "content"
|
||||
} as const;
|
||||
export const SYNC_ITEM_TYPES = [
|
||||
"settingitem",
|
||||
"attachment",
|
||||
"content",
|
||||
"notebook",
|
||||
"shortcut",
|
||||
"reminder",
|
||||
"relation",
|
||||
"tag",
|
||||
"color",
|
||||
"note"
|
||||
] as const;
|
||||
|
||||
export type SyncTransferItem = {
|
||||
items: SyncItem[];
|
||||
|
||||
@@ -22,7 +22,7 @@ import Database from ".";
|
||||
import { CHECK_IDS, EV, EVENTS, checkIsUserPremium } from "../common";
|
||||
import { tinyToTiptap } from "../migrations";
|
||||
import { isCipher } from "../database/crypto";
|
||||
import { EncryptedContentItem, Note, isDeleted } from "../types";
|
||||
import { EncryptedContentItem, Note } from "../types";
|
||||
import {
|
||||
EMPTY_CONTENT,
|
||||
isEncryptedContent,
|
||||
@@ -102,14 +102,10 @@ export default class Vault {
|
||||
async changePassword(oldPassword: string, newPassword: string) {
|
||||
if (await this.unlock(oldPassword)) {
|
||||
const contentItems = [];
|
||||
for (const note of this.db.notes.locked) {
|
||||
for await (const note of this.db.notes.locked) {
|
||||
if (!note.contentId) continue;
|
||||
const encryptedContent = await this.db.content.raw(note.contentId);
|
||||
if (
|
||||
!encryptedContent ||
|
||||
isDeleted(encryptedContent) ||
|
||||
!isEncryptedContent(encryptedContent)
|
||||
)
|
||||
const encryptedContent = await this.db.content.get(note.contentId);
|
||||
if (!encryptedContent || !isEncryptedContent(encryptedContent))
|
||||
continue;
|
||||
|
||||
try {
|
||||
@@ -143,8 +139,7 @@ export default class Vault {
|
||||
|
||||
async clear(password: string) {
|
||||
if (await this.unlock(password)) {
|
||||
await this.db.notes.init();
|
||||
for (const note of this.db.notes.locked) {
|
||||
for await (const note of this.db.notes.locked) {
|
||||
await this.unlockNote(note, password, true);
|
||||
}
|
||||
}
|
||||
@@ -152,10 +147,8 @@ export default class Vault {
|
||||
|
||||
async delete(deleteAllLockedNotes = false) {
|
||||
if (deleteAllLockedNotes) {
|
||||
await this.db.notes.init();
|
||||
await this.db.notes.remove(
|
||||
...this.db.notes.locked.map((note) => note.id)
|
||||
);
|
||||
const lockedIds = await this.db.notes.locked.ids();
|
||||
await this.db.notes.remove(...lockedIds);
|
||||
}
|
||||
await this.db.storage().remove("vaultKey");
|
||||
this.password = undefined;
|
||||
@@ -175,9 +168,9 @@ export default class Vault {
|
||||
* Permanently unlocks (remove from vault) a note
|
||||
*/
|
||||
async remove(noteId: string, password: string) {
|
||||
const note = this.db.notes.note(noteId);
|
||||
const note = await this.db.notes.note(noteId);
|
||||
if (!note) return;
|
||||
await this.unlockNote(note.data, password, true);
|
||||
await this.unlockNote(note, password, true);
|
||||
|
||||
if (!(await this.exists())) await this.create(password);
|
||||
}
|
||||
@@ -186,10 +179,10 @@ export default class Vault {
|
||||
* Temporarily unlock (open) a note
|
||||
*/
|
||||
async open(noteId: string, password: string) {
|
||||
const note = this.db.notes.note(noteId);
|
||||
const note = await this.db.notes.note(noteId);
|
||||
if (!note) return;
|
||||
|
||||
const unlockedNote = await this.unlockNote(note.data, password, false);
|
||||
const unlockedNote = await this.unlockNote(note, password, false);
|
||||
this.password = password;
|
||||
if (!(await this.exists())) await this.create(password);
|
||||
return unlockedNote;
|
||||
@@ -291,7 +284,7 @@ export default class Vault {
|
||||
const { id, content, sessionId, title } = item;
|
||||
let { type, data } = content || {};
|
||||
|
||||
const note = this.db.notes.note(id);
|
||||
const note = await this.db.notes.note(id);
|
||||
if (!note) return;
|
||||
|
||||
const contentId = note.contentId;
|
||||
@@ -299,12 +292,8 @@ export default class Vault {
|
||||
|
||||
// Case: when note is being newly locked
|
||||
if (!note.locked && (!data || !type) && !!contentId) {
|
||||
const rawContent = await this.db.content.raw(contentId);
|
||||
if (
|
||||
!rawContent ||
|
||||
isDeleted(rawContent) ||
|
||||
!isUnencryptedContent(rawContent)
|
||||
)
|
||||
const rawContent = await this.db.content.get(contentId);
|
||||
if (!rawContent || !isUnencryptedContent(rawContent))
|
||||
return await this.db.notes.add({
|
||||
id,
|
||||
locked: true
|
||||
@@ -332,9 +321,9 @@ export default class Vault {
|
||||
locked: true,
|
||||
headline: "",
|
||||
title: title || note.title,
|
||||
favorite: note.data.favorite,
|
||||
localOnly: note.data.localOnly,
|
||||
readonly: note.data.readonly,
|
||||
favorite: note.favorite,
|
||||
localOnly: note.localOnly,
|
||||
readonly: note.readonly,
|
||||
dateEdited: Date.now()
|
||||
});
|
||||
}
|
||||
@@ -342,13 +331,8 @@ export default class Vault {
|
||||
private async unlockNote(note: Note, password: string, perm = false) {
|
||||
if (!note.contentId) return;
|
||||
|
||||
const encryptedContent = await this.db.content.raw(note.contentId);
|
||||
if (
|
||||
!encryptedContent ||
|
||||
isDeleted(encryptedContent) ||
|
||||
!isEncryptedContent(encryptedContent)
|
||||
)
|
||||
return;
|
||||
const encryptedContent = await this.db.content.get(note.contentId);
|
||||
if (!encryptedContent || !isEncryptedContent(encryptedContent)) return;
|
||||
const content = await this.decryptContent(encryptedContent, password);
|
||||
|
||||
if (perm) {
|
||||
|
||||
@@ -33,7 +33,6 @@ import { Output } from "../interfaces";
|
||||
import { Attachment } from "../types";
|
||||
import Database from "../api";
|
||||
import { SQLCollection } from "../database/sql-collection";
|
||||
import { isCipher } from "../database/crypto";
|
||||
|
||||
export class Attachments implements ICollection {
|
||||
name = "attachments";
|
||||
@@ -113,12 +112,12 @@ export class Attachments implements ICollection {
|
||||
const id = oldAttachment?.id || getId();
|
||||
|
||||
const encryptedKey = item.key
|
||||
? JSON.stringify(await this.encryptKey(item.key))
|
||||
: oldAttachment?.encryptionKey;
|
||||
? await this.encryptKey(item.key)
|
||||
: oldAttachment?.key;
|
||||
const attachment = {
|
||||
...oldAttachment,
|
||||
...item,
|
||||
encryptionKey: encryptedKey
|
||||
key: encryptedKey
|
||||
};
|
||||
|
||||
const {
|
||||
@@ -131,7 +130,7 @@ export class Attachments implements ICollection {
|
||||
mimeType,
|
||||
salt,
|
||||
chunkSize,
|
||||
encryptionKey
|
||||
key
|
||||
} = attachment;
|
||||
|
||||
if (
|
||||
@@ -144,7 +143,7 @@ export class Attachments implements ICollection {
|
||||
// !mimeType ||
|
||||
!salt ||
|
||||
!chunkSize ||
|
||||
!encryptionKey
|
||||
!key
|
||||
) {
|
||||
console.error(
|
||||
"Attachment is invalid because all properties are required:",
|
||||
@@ -161,7 +160,7 @@ export class Attachments implements ICollection {
|
||||
salt,
|
||||
size,
|
||||
alg,
|
||||
encryptionKey,
|
||||
key,
|
||||
chunkSize,
|
||||
|
||||
filename:
|
||||
@@ -188,9 +187,7 @@ export class Attachments implements ICollection {
|
||||
return await this.db.crypto().generateRandomKey();
|
||||
}
|
||||
|
||||
async decryptKey(keyJSON: string): Promise<SerializedKey | null> {
|
||||
const key = JSON.parse(keyJSON);
|
||||
if (!isCipher(key)) return null;
|
||||
async decryptKey(key: Cipher<"base64">): Promise<SerializedKey | null> {
|
||||
const encryptionKey = await this._getEncryptionKey();
|
||||
const plainData = await this.db.storage().decrypt(encryptionKey, key);
|
||||
if (!plainData) return null;
|
||||
@@ -263,7 +260,7 @@ export class Attachments implements ICollection {
|
||||
const attachment = await this.attachment(hash);
|
||||
if (!attachment) return;
|
||||
|
||||
const key = await this.decryptKey(attachment.encryptionKey);
|
||||
const key = await this.decryptKey(attachment.key);
|
||||
if (!key) return;
|
||||
const data = await this.db.fs().readEncrypted(attachment.hash, key, {
|
||||
chunkSize: attachment.chunkSize,
|
||||
@@ -361,27 +358,25 @@ export class Attachments implements ICollection {
|
||||
|
||||
async cleanup() {
|
||||
const now = dayjs().unix();
|
||||
for (const attachment of this.deleted) {
|
||||
if (
|
||||
!attachment.metadata ||
|
||||
dayjs(attachment.dateDeleted).add(7, "days").unix() < now
|
||||
)
|
||||
continue;
|
||||
const ids: string[] = [];
|
||||
for await (const attachment of this.deleted) {
|
||||
if (dayjs(attachment.dateDeleted).add(7, "days").unix() < now) continue;
|
||||
|
||||
const isDeleted = await this.db.fs().deleteFile(attachment.metadata.hash);
|
||||
const isDeleted = await this.db.fs().deleteFile(attachment.hash);
|
||||
if (!isDeleted) continue;
|
||||
|
||||
await this.collection.softDelete(attachment.id);
|
||||
ids.push(attachment.id);
|
||||
}
|
||||
await this.collection.softDelete(ids);
|
||||
}
|
||||
|
||||
// get pending() {
|
||||
// return this.all.filter(
|
||||
// (attachment) =>
|
||||
// (!attachment.dateUploaded || attachment.dateUploaded <= 0) &&
|
||||
// this.db.relations.to(attachment, "note").length > 0
|
||||
// );
|
||||
// }
|
||||
get pending() {
|
||||
return this.collection.createFilter<Attachment>((qb) =>
|
||||
qb.where((eb) =>
|
||||
eb.or([eb("dateUploaded", "is", null), eb("dateUploaded", "<=", 0)])
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
// get uploaded() {
|
||||
// return this.all.filter((attachment) => !!attachment.dateUploaded);
|
||||
@@ -395,9 +390,11 @@ export class Attachments implements ICollection {
|
||||
// );
|
||||
// }
|
||||
|
||||
// get deleted() {
|
||||
// return this.all.filter((attachment) => !!attachment.dateDeleted);
|
||||
// }
|
||||
get deleted() {
|
||||
return this.collection.createFilter<Attachment>((qb) =>
|
||||
qb.where("dateDeleted", "is not", null)
|
||||
);
|
||||
}
|
||||
|
||||
// get images() {
|
||||
// return this.all.filter((attachment) => isImage(attachment.metadata.type));
|
||||
|
||||
@@ -85,9 +85,11 @@ export class Colors implements ICollection {
|
||||
// return this.collection.raw();
|
||||
// }
|
||||
|
||||
// get all(): Color[] {
|
||||
// return this.collection.items();
|
||||
// }
|
||||
get all() {
|
||||
return this.collection.createFilter<Color>((qb) =>
|
||||
qb.where("deleted", "is", null)
|
||||
);
|
||||
}
|
||||
|
||||
async remove(...ids: string[]) {
|
||||
await this.db.transaction(async () => {
|
||||
|
||||
90
packages/core/src/collections/conflicts.ts
Normal file
90
packages/core/src/collections/conflicts.ts
Normal file
@@ -0,0 +1,90 @@
|
||||
/*
|
||||
This file is part of the Notesnook project (https://notesnook.com/)
|
||||
|
||||
Copyright (C) 2023 Streetwriters (Private) Limited
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import { getId } from "../utils/id";
|
||||
import { Tag } from "../types";
|
||||
import Database from "../api";
|
||||
import { ICollection } from "./collection";
|
||||
import { SQLCollection } from "../database/sql-collection";
|
||||
|
||||
export class Tags implements ICollection {
|
||||
name = "tags";
|
||||
readonly collection: SQLCollection<"tags", Tag>;
|
||||
constructor(private readonly db: Database) {
|
||||
this.collection = new SQLCollection(db.sql, "tags", db.eventManager);
|
||||
}
|
||||
|
||||
init() {
|
||||
return this.collection.init();
|
||||
}
|
||||
|
||||
tag(id: string) {
|
||||
return this.collection.get(id);
|
||||
}
|
||||
|
||||
// find(idOrTitle: string) {
|
||||
// return this.all.find(
|
||||
// (tag) => tag.title === idOrTitle || tag.id === idOrTitle
|
||||
// );
|
||||
// }
|
||||
|
||||
async add(item: Partial<Tag>) {
|
||||
if (item.remote)
|
||||
throw new Error("Please use db.tags.merge to merge remote tags.");
|
||||
|
||||
const id = item.id || getId(item.dateCreated);
|
||||
const oldTag = await this.tag(id);
|
||||
|
||||
item.title = item.title ? Tags.sanitize(item.title) : item.title;
|
||||
if (!item.title && !oldTag?.title) throw new Error("Title is required.");
|
||||
|
||||
await this.collection.upsert({
|
||||
id,
|
||||
dateCreated: item.dateCreated || oldTag?.dateCreated || Date.now(),
|
||||
dateModified: item.dateModified || oldTag?.dateModified || Date.now(),
|
||||
title: item.title || oldTag?.title || "",
|
||||
type: "tag",
|
||||
remote: false
|
||||
});
|
||||
return id;
|
||||
}
|
||||
|
||||
// get raw() {
|
||||
// return this.collection.raw();
|
||||
// }
|
||||
|
||||
// get all() {
|
||||
// return this.collection.items();
|
||||
// }
|
||||
|
||||
async remove(...ids: string[]) {
|
||||
await this.db.transaction(async () => {
|
||||
await this.db.relations.unlinkOfType("tag", ids);
|
||||
await this.collection.softDelete(ids);
|
||||
});
|
||||
}
|
||||
|
||||
exists(id: string) {
|
||||
return this.collection.exists(id);
|
||||
}
|
||||
|
||||
static sanitize(title: string) {
|
||||
return title.replace(/^\s+|\s+$/gm, "");
|
||||
}
|
||||
}
|
||||
@@ -42,7 +42,8 @@ export const EMPTY_CONTENT = (noteId: string): UnencryptedContentItem => ({
|
||||
id: getId(),
|
||||
localOnly: true,
|
||||
type: "tiptap",
|
||||
data: "<p></p>"
|
||||
data: "<p></p>",
|
||||
locked: false
|
||||
});
|
||||
|
||||
export class Content implements ICollection {
|
||||
@@ -71,59 +72,65 @@ export class Content implements ICollection {
|
||||
"Please use db.content.merge for merging remote content."
|
||||
);
|
||||
|
||||
const oldContent = content.id ? await this.raw(content.id) : undefined;
|
||||
if (content.id && oldContent) {
|
||||
content = {
|
||||
...oldContent,
|
||||
...content
|
||||
};
|
||||
}
|
||||
if (!content.noteId) return;
|
||||
const id = content.id || getId();
|
||||
const oldContent = content.id ? await this.get(content.id) : undefined;
|
||||
const noteId = oldContent?.noteId || content.noteId;
|
||||
if (!noteId) throw new Error("No noteId found to link the content to.");
|
||||
|
||||
const encryptedData = isCipher(content.data)
|
||||
? content.data
|
||||
: oldContent && isCipher(oldContent.data)
|
||||
? oldContent.data
|
||||
: null;
|
||||
|
||||
const unencryptedData =
|
||||
typeof content.data === "string"
|
||||
? content.data
|
||||
: oldContent && typeof oldContent.data === "string"
|
||||
? oldContent.data
|
||||
: "<p></p>";
|
||||
|
||||
const contentItem: ContentItem = {
|
||||
noteId: content.noteId,
|
||||
type: "tiptap",
|
||||
noteId,
|
||||
id,
|
||||
type: content.type || "tiptap",
|
||||
data: content.data || "<p></p>",
|
||||
dateEdited: content.dateEdited || Date.now(),
|
||||
dateCreated: content.dateCreated || Date.now(),
|
||||
dateModified: content.dateModified || Date.now(),
|
||||
localOnly: !!content.localOnly,
|
||||
conflicted: content.conflicted,
|
||||
dateResolved: content.dateResolved
|
||||
|
||||
dateEdited: content.dateEdited || oldContent?.dateEdited || Date.now(),
|
||||
dateCreated: content.dateCreated || oldContent?.dateCreated || Date.now(),
|
||||
dateModified: Date.now(),
|
||||
localOnly: content.localOnly || !!oldContent?.localOnly,
|
||||
|
||||
conflicted: content.conflicted || oldContent?.conflicted,
|
||||
dateResolved: content.dateResolved || oldContent?.dateResolved,
|
||||
|
||||
...(encryptedData
|
||||
? { locked: true, data: encryptedData }
|
||||
: { locked: false, data: unencryptedData })
|
||||
};
|
||||
|
||||
await this.collection.upsert(
|
||||
isUnencryptedContent(contentItem)
|
||||
? await this.extractAttachments(contentItem)
|
||||
: contentItem
|
||||
contentItem.locked
|
||||
? contentItem
|
||||
: await this.extractAttachments(contentItem)
|
||||
);
|
||||
|
||||
if (content.sessionId) {
|
||||
await this.db.noteHistory?.add(
|
||||
contentItem.noteId,
|
||||
content.sessionId,
|
||||
isCipher(contentItem.data),
|
||||
{
|
||||
data: contentItem.data,
|
||||
type: contentItem.type
|
||||
}
|
||||
);
|
||||
}
|
||||
if (content.sessionId)
|
||||
await this.db.noteHistory.add(content.sessionId, contentItem);
|
||||
|
||||
return id;
|
||||
}
|
||||
|
||||
async get(id: string) {
|
||||
const content = await this.raw(id);
|
||||
const content = await this.collection.get(id);
|
||||
if (!content || isDeleted(content)) return;
|
||||
return content;
|
||||
}
|
||||
|
||||
async raw(id: string) {
|
||||
const content = await this.collection.get(id);
|
||||
if (!content) return;
|
||||
return content;
|
||||
}
|
||||
// async raw(id: string) {
|
||||
// const content = await this.collection.get(id);
|
||||
// if (!content) return;
|
||||
// return content;
|
||||
// }
|
||||
|
||||
remove(...ids: string[]) {
|
||||
return this.collection.softDelete(ids);
|
||||
@@ -147,6 +154,17 @@ export class Content implements ICollection {
|
||||
.execute();
|
||||
}
|
||||
|
||||
async updateByNoteId(partial: Partial<ContentItem>, ...ids: string[]) {
|
||||
await this.db
|
||||
.sql()
|
||||
.updateTable("content")
|
||||
.where("noteId", "in", ids)
|
||||
.set({
|
||||
...partial,
|
||||
dateModified: Date.now()
|
||||
})
|
||||
.execute();
|
||||
}
|
||||
// multi(ids: string[]) {
|
||||
// return this.collection.getItems(ids);
|
||||
// }
|
||||
@@ -228,9 +246,8 @@ export class Content implements ICollection {
|
||||
}
|
||||
|
||||
async removeAttachments(id: string, hashes: string[]) {
|
||||
const contentItem = await this.raw(id);
|
||||
if (!contentItem || isDeleted(contentItem) || isCipher(contentItem.data))
|
||||
return;
|
||||
const contentItem = await this.get(id);
|
||||
if (!contentItem || isCipher(contentItem.data)) return;
|
||||
const content = getContentFromData(contentItem.type, contentItem.data);
|
||||
if (!content) return;
|
||||
contentItem.data = content.removeAttachments(hashes);
|
||||
@@ -310,11 +327,11 @@ export class Content implements ICollection {
|
||||
export function isUnencryptedContent(
|
||||
content: ContentItem
|
||||
): content is UnencryptedContentItem {
|
||||
return !isCipher(content.data);
|
||||
return content.locked === false;
|
||||
}
|
||||
|
||||
export function isEncryptedContent(
|
||||
content: ContentItem
|
||||
): content is EncryptedContentItem {
|
||||
return isCipher(content.data);
|
||||
return content.locked === true;
|
||||
}
|
||||
|
||||
@@ -20,16 +20,16 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
import Database from "../api";
|
||||
import { isCipher } from "../database/crypto";
|
||||
import { SQLCollection } from "../database/sql-collection";
|
||||
import { HistorySession, isDeleted } from "../types";
|
||||
import { ContentItem, HistorySession, isDeleted } from "../types";
|
||||
import { makeSessionContentId } from "../utils/id";
|
||||
import { ICollection } from "./collection";
|
||||
import { SessionContent, NoteContent } from "./session-content";
|
||||
import { SessionContent } from "./session-content";
|
||||
|
||||
export class NoteHistory implements ICollection {
|
||||
name = "notehistory";
|
||||
versionsLimit = 100;
|
||||
sessionContent = new SessionContent(this.db);
|
||||
private readonly collection: SQLCollection<"notehistory", HistorySession>;
|
||||
readonly collection: SQLCollection<"notehistory", HistorySession>;
|
||||
constructor(private readonly db: Database) {
|
||||
this.collection = new SQLCollection(db.sql, "notehistory", db.eventManager);
|
||||
}
|
||||
@@ -39,25 +39,29 @@ export class NoteHistory implements ICollection {
|
||||
await this.sessionContent.init();
|
||||
}
|
||||
|
||||
async get(noteId: string) {
|
||||
async get(noteId: string, order: "asc" | "desc" = "desc") {
|
||||
if (!noteId) return [];
|
||||
|
||||
const indices = this.collection.indexer.indices;
|
||||
const sessionIds = indices.filter((id) => id.startsWith(noteId));
|
||||
if (sessionIds.length === 0) return [];
|
||||
const history = await this.getSessions(sessionIds);
|
||||
// const indices = this.collection.indexer.indices;
|
||||
// const sessionIds = indices.filter((id) => id.startsWith(noteId));
|
||||
// if (sessionIds.length === 0) return [];
|
||||
// const history = await this.getSessions(sessionIds);
|
||||
|
||||
return history.sort(function (a, b) {
|
||||
return b.dateModified - a.dateModified;
|
||||
});
|
||||
// return history.sort(function (a, b) {
|
||||
// return b.dateModified - a.dateModified;
|
||||
// });
|
||||
const history = await this.db
|
||||
.sql()
|
||||
.selectFrom("notehistory")
|
||||
.where("noteId", "==", noteId)
|
||||
.orderBy(`dateModified ${order}`)
|
||||
.selectAll()
|
||||
.execute();
|
||||
return history as HistorySession[];
|
||||
}
|
||||
|
||||
async add(
|
||||
noteId: string,
|
||||
sessionId: string,
|
||||
locked: boolean,
|
||||
content: NoteContent<boolean>
|
||||
) {
|
||||
async add(sessionId: string, content: ContentItem) {
|
||||
const { noteId, locked } = content;
|
||||
sessionId = `${noteId}_${sessionId}`;
|
||||
const oldSession = await this.collection.get(sessionId);
|
||||
|
||||
@@ -82,13 +86,9 @@ export class NoteHistory implements ICollection {
|
||||
}
|
||||
|
||||
private async cleanup(noteId: string, limit = this.versionsLimit) {
|
||||
const history = await this.get(noteId);
|
||||
const history = await this.get(noteId, "asc");
|
||||
if (history.length === 0 || history.length < limit) return;
|
||||
history.sort(function (a, b) {
|
||||
return a.dateModified - b.dateModified;
|
||||
});
|
||||
const deleteCount = history.length - limit;
|
||||
|
||||
for (let i = 0; i < deleteCount; i++) {
|
||||
const session = history[i];
|
||||
await this._remove(session);
|
||||
@@ -108,14 +108,31 @@ export class NoteHistory implements ICollection {
|
||||
}
|
||||
|
||||
async clearSessions(...noteIds: string[]) {
|
||||
const history = await this.get(noteId);
|
||||
for (const item of history) {
|
||||
await this._remove(item);
|
||||
}
|
||||
await this.db.transaction(async () => {
|
||||
const deletedIds = await this.db
|
||||
.sql()
|
||||
.deleteFrom("notehistory")
|
||||
.where("noteId", "in", noteIds)
|
||||
.returning("sessionContentId as sessionContentId")
|
||||
.execute();
|
||||
await this.db
|
||||
.sql()
|
||||
.deleteFrom("sessioncontent")
|
||||
.where(
|
||||
"id",
|
||||
"in",
|
||||
deletedIds.reduce((arr, item) => {
|
||||
if (item.sessionContentId && !arr.includes(item.sessionContentId))
|
||||
arr.push(item.sessionContentId);
|
||||
return arr;
|
||||
}, [] as string[])
|
||||
)
|
||||
.execute();
|
||||
});
|
||||
}
|
||||
|
||||
private async _remove(session: HistorySession) {
|
||||
await this.collection.delete(session.id);
|
||||
await this.collection.delete([session.id]);
|
||||
await this.sessionContent.remove(session.sessionContentId);
|
||||
}
|
||||
|
||||
|
||||
@@ -77,15 +77,20 @@ export class Notebooks implements ICollection {
|
||||
// return this.collection.raw();
|
||||
// }
|
||||
|
||||
// get all() {
|
||||
// return this.collection.items((note) =>
|
||||
// isTrashItem(note) ? undefined : note
|
||||
// ) as Notebook[];
|
||||
// }
|
||||
get all() {
|
||||
return this.collection.createFilter<Notebook>((qb) =>
|
||||
qb.where("dateDeleted", "is", null).where("deleted", "is", null)
|
||||
);
|
||||
}
|
||||
|
||||
// get pinned() {
|
||||
// return this.all.filter((item) => item.pinned === true);
|
||||
// }
|
||||
get pinned() {
|
||||
return this.collection.createFilter<Notebook>((qb) =>
|
||||
qb
|
||||
.where("dateDeleted", "is", null)
|
||||
.where("deleted", "is", null)
|
||||
.where("pinned", "==", true)
|
||||
);
|
||||
}
|
||||
|
||||
// get trashed() {
|
||||
// return this.raw.filter((item) =>
|
||||
@@ -93,12 +98,8 @@ export class Notebooks implements ICollection {
|
||||
// ) as BaseTrashItem<Notebook>[];
|
||||
// }
|
||||
|
||||
async pin(...ids: string[]) {
|
||||
await this.collection.update(ids, { pinned: true });
|
||||
}
|
||||
|
||||
async unpin(...ids: string[]) {
|
||||
await this.collection.update(ids, { pinned: false });
|
||||
async pin(state: boolean, ...ids: string[]) {
|
||||
await this.collection.update(ids, { pinned: state });
|
||||
}
|
||||
|
||||
async totalNotes(id: string) {
|
||||
@@ -114,7 +115,7 @@ export class Notebooks implements ICollection {
|
||||
.where("toType", "==", "notebook")
|
||||
.where("fromType", "==", "notebook")
|
||||
.whereRef("fromId", "==", "subNotebooks.id")
|
||||
.where("toId", "not in", this.db.trash.cache)
|
||||
.where("toId", "not in", this.db.trash.cache.notebooks)
|
||||
.$narrowType<{ id: string }>()
|
||||
)
|
||||
)
|
||||
@@ -124,7 +125,7 @@ export class Notebooks implements ICollection {
|
||||
.where("fromId", "in", (eb) =>
|
||||
eb.selectFrom("subNotebooks").select("subNotebooks.id")
|
||||
)
|
||||
.where("toId", "not in", this.db.trash.cache)
|
||||
.where("toId", "not in", this.db.trash.cache.notes)
|
||||
.select((eb) => eb.fn.count<number>("id").as("totalNotes"))
|
||||
.executeTakeFirst();
|
||||
|
||||
@@ -142,11 +143,11 @@ export class Notebooks implements ICollection {
|
||||
return this.collection.exists(id);
|
||||
}
|
||||
|
||||
async remove(...ids: string[]) {
|
||||
async moveToTrash(...ids: string[]) {
|
||||
await this.db.trash.add("notebook", ids);
|
||||
}
|
||||
|
||||
async delete(...ids: string[]) {
|
||||
async remove(...ids: string[]) {
|
||||
await this.db.transaction(async () => {
|
||||
await this.db.relations.unlinkOfType("notebook", ids);
|
||||
await this.collection.softDelete(ids);
|
||||
|
||||
@@ -147,11 +147,11 @@ export class Notes implements ICollection {
|
||||
// return this.collection.raw();
|
||||
// }
|
||||
|
||||
// get all() {
|
||||
// return this.collection.items((note) =>
|
||||
// isTrashItem(note) ? undefined : note
|
||||
// ) as Note[];
|
||||
// }
|
||||
get all() {
|
||||
return this.collection.createFilter<Note>((qb) =>
|
||||
qb.where("dateDeleted", "is", null).where("deleted", "is", null)
|
||||
);
|
||||
}
|
||||
|
||||
// isTrashed(id: string) {
|
||||
// return this.raw.find((item) => item.id === id && isTrashItem(item));
|
||||
@@ -163,29 +163,42 @@ export class Notes implements ICollection {
|
||||
// ) as BaseTrashItem<Note>[];
|
||||
// }
|
||||
|
||||
// get pinned() {
|
||||
// return this.all.filter((item) => item.pinned === true);
|
||||
// }
|
||||
get pinned() {
|
||||
return this.collection.createFilter<Note>((qb) =>
|
||||
qb
|
||||
.where("dateDeleted", "is", null)
|
||||
.where("deleted", "is", null)
|
||||
.where("pinned", "==", true)
|
||||
);
|
||||
}
|
||||
|
||||
// get conflicted() {
|
||||
// return this.all.filter((item) => item.conflicted === true);
|
||||
// }
|
||||
|
||||
// get favorites() {
|
||||
// return this.all.filter((item) => item.favorite === true);
|
||||
// }
|
||||
get favorites() {
|
||||
return this.collection.createFilter<Note>((qb) =>
|
||||
qb
|
||||
.where("dateDeleted", "is", null)
|
||||
.where("deleted", "is", null)
|
||||
.where("favorite", "==", true)
|
||||
);
|
||||
}
|
||||
|
||||
// get locked(): Note[] {
|
||||
// return this.all.filter(
|
||||
// (item) => !isTrashItem(item) && item.locked === true
|
||||
// ) as Note[];
|
||||
// }
|
||||
get locked() {
|
||||
return this.collection.createFilter<Note>((qb) =>
|
||||
qb
|
||||
.where("dateDeleted", "is", null)
|
||||
.where("deleted", "is", null)
|
||||
.where("locked", "==", true)
|
||||
);
|
||||
}
|
||||
|
||||
exists(id: string) {
|
||||
return this.collection.exists(id);
|
||||
}
|
||||
|
||||
delete(...ids: string[]) {
|
||||
moveToTrash(...ids: string[]) {
|
||||
return this._delete(true, ...ids);
|
||||
}
|
||||
|
||||
@@ -193,6 +206,22 @@ export class Notes implements ICollection {
|
||||
return this._delete(false, ...ids);
|
||||
}
|
||||
|
||||
pin(state: boolean, ...ids: string[]) {
|
||||
return this.collection.update(ids, { pinned: state });
|
||||
}
|
||||
favorite(state: boolean, ...ids: string[]) {
|
||||
return this.collection.update(ids, { favorite: state });
|
||||
}
|
||||
readonly(state: boolean, ...ids: string[]) {
|
||||
return this.collection.update(ids, { readonly: state });
|
||||
}
|
||||
async localOnly(state: boolean, ...ids: string[]) {
|
||||
await this.db.transaction(async () => {
|
||||
await this.collection.update(ids, { localOnly: state });
|
||||
await this.db.content.updateByNoteId({ localOnly: state }, ...ids);
|
||||
});
|
||||
}
|
||||
|
||||
async export(id: string, options: ExportOptions) {
|
||||
const { format, rawContent } = options;
|
||||
if (format !== "txt" && !(await checkIsUserPremium(CHECK_IDS.noteExport)))
|
||||
@@ -203,13 +232,9 @@ export class Notes implements ICollection {
|
||||
|
||||
if (!options.contentItem) {
|
||||
const rawContent = note.contentId
|
||||
? await this.db.content.raw(note.contentId)
|
||||
? await this.db.content.get(note.contentId)
|
||||
: undefined;
|
||||
if (
|
||||
rawContent &&
|
||||
(isDeleted(rawContent) || !isUnencryptedContent(rawContent))
|
||||
)
|
||||
return false;
|
||||
if (rawContent && !isUnencryptedContent(rawContent)) return false;
|
||||
options.contentItem = rawContent || EMPTY_CONTENT(note.id);
|
||||
}
|
||||
|
||||
@@ -246,7 +271,7 @@ export class Notes implements ICollection {
|
||||
if (!note) continue;
|
||||
|
||||
const content = note.contentId
|
||||
? await this.db.content.raw(note.contentId)
|
||||
? await this.db.content.get(note.contentId)
|
||||
: undefined;
|
||||
if (content && (isDeleted(content) || !isUnencryptedContent(content)))
|
||||
throw new Error("Cannot duplicate a locked or deleted note.");
|
||||
|
||||
@@ -59,26 +59,14 @@ export class Relations implements ICollection {
|
||||
reference: ItemReference,
|
||||
type: TType
|
||||
) {
|
||||
return new RelationsArray(
|
||||
this.db.sql,
|
||||
this.db.trash.cache,
|
||||
reference,
|
||||
type,
|
||||
"from"
|
||||
);
|
||||
return new RelationsArray(this.db, reference, type, "from");
|
||||
}
|
||||
|
||||
to<TType extends keyof RelatableTable>(
|
||||
reference: ItemReference,
|
||||
type: TType
|
||||
) {
|
||||
return new RelationsArray(
|
||||
this.db.sql,
|
||||
this.db.trash.cache,
|
||||
reference,
|
||||
type,
|
||||
"to"
|
||||
);
|
||||
return new RelationsArray(this.db, reference, type, "to");
|
||||
}
|
||||
|
||||
// get raw() {
|
||||
@@ -161,15 +149,15 @@ class RelationsArray<TType extends keyof RelatableTable> {
|
||||
private table: ValueOf<RelatableTable> = TABLE_MAP[this.type];
|
||||
|
||||
constructor(
|
||||
private readonly sql: DatabaseAccessor,
|
||||
private readonly trashIds: string[],
|
||||
private readonly db: Database,
|
||||
private readonly reference: ItemReference,
|
||||
private readonly type: TType,
|
||||
private readonly direction: "from" | "to"
|
||||
) {}
|
||||
|
||||
async resolve(limit?: number): Promise<ItemMap[TType][]> {
|
||||
const items = await this.sql()
|
||||
const items = await this.db
|
||||
.sql()
|
||||
.selectFrom(this.table)
|
||||
.where("id", "in", (b) =>
|
||||
b
|
||||
@@ -189,7 +177,8 @@ class RelationsArray<TType extends keyof RelatableTable> {
|
||||
}
|
||||
|
||||
async unlink() {
|
||||
await this.sql()
|
||||
await this.db
|
||||
.sql()
|
||||
.replaceInto("relations")
|
||||
.columns(["id", "dateModified", "deleted"])
|
||||
.expression((eb) =>
|
||||
@@ -207,7 +196,8 @@ class RelationsArray<TType extends keyof RelatableTable> {
|
||||
}
|
||||
|
||||
async get() {
|
||||
const ids = await this.sql()
|
||||
const ids = await this.db
|
||||
.sql()
|
||||
.selectFrom("relations")
|
||||
.$call(this.buildRelationsQuery())
|
||||
.execute();
|
||||
@@ -215,7 +205,8 @@ class RelationsArray<TType extends keyof RelatableTable> {
|
||||
}
|
||||
|
||||
async count() {
|
||||
const result = await this.sql()
|
||||
const result = await this.db
|
||||
.sql()
|
||||
.selectFrom("relations")
|
||||
.$call(this.buildRelationsQuery())
|
||||
.clearSelect()
|
||||
@@ -226,7 +217,8 @@ class RelationsArray<TType extends keyof RelatableTable> {
|
||||
}
|
||||
|
||||
async has(id: string) {
|
||||
const result = await this.sql()
|
||||
const result = await this.db
|
||||
.sql()
|
||||
.selectFrom("relations")
|
||||
.$call(this.buildRelationsQuery())
|
||||
.clearSelect()
|
||||
@@ -252,9 +244,13 @@ class RelationsArray<TType extends keyof RelatableTable> {
|
||||
.where("toType", "==", this.reference.type)
|
||||
.where("toId", "==", this.reference.id)
|
||||
.$if(
|
||||
(this.type === "note" || this.type === "notebook") &&
|
||||
this.trashIds.length > 0,
|
||||
(b) => b.where("fromId", "not in", this.trashIds)
|
||||
this.type === "note" && this.db.trash.cache.notes.length > 0,
|
||||
(b) => b.where("fromId", "not in", this.db.trash.cache.notes)
|
||||
)
|
||||
.$if(
|
||||
this.type === "notebook" &&
|
||||
this.db.trash.cache.notebooks.length > 0,
|
||||
(b) => b.where("fromId", "not in", this.db.trash.cache.notebooks)
|
||||
)
|
||||
.select("relations.fromId as id")
|
||||
.$narrowType<{ id: string }>();
|
||||
@@ -264,9 +260,13 @@ class RelationsArray<TType extends keyof RelatableTable> {
|
||||
.where("fromType", "==", this.reference.type)
|
||||
.where("fromId", "==", this.reference.id)
|
||||
.$if(
|
||||
(this.type === "note" || this.type === "notebook") &&
|
||||
this.trashIds.length > 0,
|
||||
(b) => b.where("toId", "not in", this.trashIds)
|
||||
this.type === "note" && this.db.trash.cache.notes.length > 0,
|
||||
(b) => b.where("toId", "not in", this.db.trash.cache.notes)
|
||||
)
|
||||
.$if(
|
||||
this.type === "notebook" &&
|
||||
this.db.trash.cache.notebooks.length > 0,
|
||||
(b) => b.where("toId", "not in", this.db.trash.cache.notebooks)
|
||||
)
|
||||
.select("relations.toId as id")
|
||||
.$narrowType<{ id: string }>();
|
||||
|
||||
@@ -33,10 +33,7 @@ export type NoteContent<TLocked extends boolean> = {
|
||||
|
||||
export class SessionContent implements ICollection {
|
||||
name = "sessioncontent";
|
||||
private readonly collection: SQLCollection<
|
||||
"sessioncontent",
|
||||
SessionContentItem
|
||||
>;
|
||||
readonly collection: SQLCollection<"sessioncontent", SessionContentItem>;
|
||||
constructor(private readonly db: Database) {
|
||||
this.collection = new SQLCollection(
|
||||
db.sql,
|
||||
@@ -102,7 +99,7 @@ export class SessionContent implements ICollection {
|
||||
}
|
||||
|
||||
async remove(sessionContentId: string) {
|
||||
await this.collection.delete(sessionContentId);
|
||||
await this.collection.delete([sessionContentId]);
|
||||
}
|
||||
|
||||
// async all() {
|
||||
|
||||
@@ -59,20 +59,19 @@ export class Shortcuts implements ICollection {
|
||||
...shortcut
|
||||
};
|
||||
|
||||
if (!shortcut.item)
|
||||
if (!shortcut.itemId || !shortcut.itemType)
|
||||
throw new Error("Cannot create a shortcut without an item.");
|
||||
|
||||
const id = shortcut.id || shortcut.item.id;
|
||||
const id = shortcut.id || shortcut.itemId;
|
||||
|
||||
await this.collection.upsert({
|
||||
id,
|
||||
type: "shortcut",
|
||||
itemId: shortcut.itemId,
|
||||
itemType: shortcut.itemType,
|
||||
// item: shortcut.item,
|
||||
dateCreated: shortcut.dateCreated || Date.now(),
|
||||
dateModified: shortcut.dateModified || Date.now(),
|
||||
sortIndex: await this.collection.count()
|
||||
sortIndex: -1 // await this.collection.count()
|
||||
});
|
||||
return id;
|
||||
}
|
||||
@@ -81,9 +80,11 @@ export class Shortcuts implements ICollection {
|
||||
// return this.collection.raw();
|
||||
// }
|
||||
|
||||
// get all() {
|
||||
// return this.collection.items();
|
||||
// }
|
||||
get all() {
|
||||
return this.collection.createFilter<Shortcut>((qb) =>
|
||||
qb.where("deleted", "is", null)
|
||||
);
|
||||
}
|
||||
|
||||
async get() {
|
||||
// return this.all.reduce((prev, shortcut) => {
|
||||
@@ -106,7 +107,7 @@ export class Shortcuts implements ICollection {
|
||||
// }, [] as (Notebook | Topic | Tag)[]);
|
||||
}
|
||||
|
||||
exists(id: string) {
|
||||
async exists(id: string) {
|
||||
return this.collection.exists(id);
|
||||
}
|
||||
|
||||
|
||||
@@ -69,9 +69,11 @@ export class Tags implements ICollection {
|
||||
// return this.collection.raw();
|
||||
// }
|
||||
|
||||
// get all() {
|
||||
// return this.collection.items();
|
||||
// }
|
||||
get all() {
|
||||
return this.collection.createFilter<Tag>((qb) =>
|
||||
qb.where("deleted", "is", null)
|
||||
);
|
||||
}
|
||||
|
||||
async remove(...ids: string[]) {
|
||||
await this.db.transaction(async () => {
|
||||
|
||||
@@ -19,10 +19,19 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import dayjs from "dayjs";
|
||||
import Database from "../api";
|
||||
import { deleteItems } from "../utils/array";
|
||||
import { FilteredSelector } from "../database/sql-collection";
|
||||
import { TrashItem } from "../types";
|
||||
|
||||
export default class Trash {
|
||||
collections = ["notes", "notebooks"] as const;
|
||||
cache: string[] = [];
|
||||
cache: {
|
||||
notes: string[];
|
||||
notebooks: string[];
|
||||
} = {
|
||||
notebooks: [],
|
||||
notes: []
|
||||
};
|
||||
constructor(private readonly db: Database) {}
|
||||
|
||||
async init() {
|
||||
@@ -34,19 +43,19 @@ export default class Trash {
|
||||
.selectFrom("notes")
|
||||
.where("type", "==", "trash")
|
||||
.select("id")
|
||||
.as("id"),
|
||||
.as("noteId"),
|
||||
eb
|
||||
.selectFrom("notebooks")
|
||||
.where("type", "==", "trash")
|
||||
.select("id")
|
||||
.as("id")
|
||||
.as("notebookId")
|
||||
])
|
||||
.execute();
|
||||
|
||||
this.cache = result.reduce((ids, item) => {
|
||||
if (item.id) ids.push(item.id);
|
||||
return ids;
|
||||
}, [] as string[]);
|
||||
for (const { noteId, notebookId } of result) {
|
||||
if (noteId) this.cache.notes.push(noteId);
|
||||
else if (notebookId) this.cache.notebooks.push(notebookId);
|
||||
}
|
||||
}
|
||||
|
||||
async cleanup() {
|
||||
@@ -79,8 +88,8 @@ export default class Trash {
|
||||
},
|
||||
{ noteIds: [] as string[], notebookIds: [] as string[] }
|
||||
);
|
||||
await this.delete("note", noteIds);
|
||||
await this.delete("note", notebookIds);
|
||||
|
||||
await this._delete(noteIds, notebookIds);
|
||||
}
|
||||
|
||||
async add(type: "note" | "notebook", ids: string[]) {
|
||||
@@ -90,50 +99,85 @@ export default class Trash {
|
||||
itemType: "note",
|
||||
dateDeleted: Date.now()
|
||||
});
|
||||
this.cache.notes.push(...ids);
|
||||
} else if (type === "notebook") {
|
||||
await this.db.notebooks.collection.update(ids, {
|
||||
type: "trash",
|
||||
itemType: "notebook",
|
||||
dateDeleted: Date.now()
|
||||
});
|
||||
this.cache.notebooks.push(...ids);
|
||||
}
|
||||
this.cache.push(...ids);
|
||||
}
|
||||
|
||||
async delete(type: "note" | "notebook", ids: string[]) {
|
||||
if (type === "note") {
|
||||
await this.db.content.removeByNoteId(...ids);
|
||||
await this.db.noteHistory.clearSessions(...ids);
|
||||
await this.db.notes.delete(...ids);
|
||||
} else if (type === "notebook") {
|
||||
await this.db.relations.unlinkOfType("notebook", ids);
|
||||
await this.db.notebooks.delete(...ids);
|
||||
async delete(...items: { id: string; type: "note" | "notebook" }[]) {
|
||||
if (items.length <= 0) return;
|
||||
|
||||
const noteIds = [];
|
||||
const notebookIds = [];
|
||||
for (const item of items) {
|
||||
if (item.type === "note") {
|
||||
noteIds.push(item.id);
|
||||
this.cache.notes.splice(this.cache.notes.indexOf(item.id), 1);
|
||||
} else if (item.type === "notebook") {
|
||||
notebookIds.push(item.id);
|
||||
this.cache.notebooks.splice(this.cache.notebooks.indexOf(item.id), 1);
|
||||
}
|
||||
}
|
||||
ids.forEach((id) => this.cache.splice(this.cache.indexOf(id), 1));
|
||||
|
||||
await this._delete(noteIds, notebookIds);
|
||||
}
|
||||
|
||||
async restore(type: "note" | "notebook", ids: string[]) {
|
||||
if (type === "note") {
|
||||
await this.db.notes.collection.update(ids, {
|
||||
private async _delete(noteIds: string[], notebookIds: string[]) {
|
||||
if (noteIds.length > 0) {
|
||||
await this.db.content.removeByNoteId(...noteIds);
|
||||
await this.db.noteHistory.clearSessions(...noteIds);
|
||||
await this.db.notes.remove(...noteIds);
|
||||
deleteItems(this.cache.notes, ...noteIds);
|
||||
}
|
||||
|
||||
if (notebookIds.length > 0) {
|
||||
await this.db.relations.unlinkOfType("notebook", notebookIds);
|
||||
await this.db.notebooks.remove(...notebookIds);
|
||||
deleteItems(this.cache.notebooks, ...notebookIds);
|
||||
}
|
||||
}
|
||||
|
||||
async restore(...items: { id: string; type: "note" | "notebook" }[]) {
|
||||
if (items.length <= 0) return;
|
||||
|
||||
const noteIds = [];
|
||||
const notebookIds = [];
|
||||
for (const item of items) {
|
||||
if (item.type === "note") {
|
||||
noteIds.push(item.id);
|
||||
this.cache.notes.splice(this.cache.notes.indexOf(item.id), 1);
|
||||
} else if (item.type === "notebook") {
|
||||
notebookIds.push(item.id);
|
||||
this.cache.notebooks.splice(this.cache.notebooks.indexOf(item.id), 1);
|
||||
}
|
||||
}
|
||||
|
||||
if (noteIds.length > 0) {
|
||||
await this.db.notes.collection.update(noteIds, {
|
||||
type: "note",
|
||||
dateDeleted: null,
|
||||
itemType: null
|
||||
});
|
||||
} else {
|
||||
await this.db.notebooks.collection.update(ids, {
|
||||
}
|
||||
|
||||
if (notebookIds.length > 0) {
|
||||
await this.db.notebooks.collection.update(notebookIds, {
|
||||
type: "notebook",
|
||||
dateDeleted: null,
|
||||
itemType: null
|
||||
});
|
||||
}
|
||||
ids.forEach((id) => this.cache.splice(this.cache.indexOf(id), 1));
|
||||
}
|
||||
|
||||
async clear() {
|
||||
// for (const item of this.all) {
|
||||
// await this.delete(item.id);
|
||||
// }
|
||||
this.cache = [];
|
||||
await this._delete(this.cache.notes, this.cache.notebooks);
|
||||
this.cache = { notebooks: [], notes: [] };
|
||||
}
|
||||
|
||||
// synced(id: string) {
|
||||
@@ -144,11 +188,31 @@ export default class Trash {
|
||||
// } else return true;
|
||||
// }
|
||||
|
||||
async all() {
|
||||
const trashedNotes = await this.db
|
||||
.sql()
|
||||
.selectFrom("notes")
|
||||
.where("type", "==", "trash")
|
||||
.where("id", "in", this.cache.notes)
|
||||
.selectAll()
|
||||
.execute();
|
||||
|
||||
const trashedNotebooks = await this.db
|
||||
.sql()
|
||||
.selectFrom("notebooks")
|
||||
.where("type", "==", "trash")
|
||||
.where("id", "in", this.cache.notebooks)
|
||||
.selectAll()
|
||||
.execute();
|
||||
|
||||
return [...trashedNotes, ...trashedNotebooks] as TrashItem[];
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} id
|
||||
*/
|
||||
exists(id: string) {
|
||||
return this.cache.includes(id);
|
||||
return this.cache.notebooks.includes(id) || this.cache.notes.includes(id);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,20 +21,11 @@ import SparkMD5 from "spark-md5";
|
||||
import { CURRENT_DATABASE_VERSION } from "../common.js";
|
||||
import Migrator from "./migrator.js";
|
||||
import Database from "../api/index.js";
|
||||
import {
|
||||
Item,
|
||||
MaybeDeletedItem,
|
||||
Note,
|
||||
Notebook,
|
||||
ValueOf,
|
||||
isDeleted
|
||||
} from "../types.js";
|
||||
import { Cipher } from "@notesnook/crypto";
|
||||
import { Item, MaybeDeletedItem, Note, Notebook, isDeleted } from "../types.js";
|
||||
import { Cipher, SerializedKey } from "@notesnook/crypto";
|
||||
import { isCipher } from "./crypto.js";
|
||||
import { toChunks } from "../utils/array";
|
||||
import { migrateItem } from "../migrations";
|
||||
import Indexer from "./indexer";
|
||||
import { set } from "../utils/set.js";
|
||||
import { DatabaseCollection } from "./index.js";
|
||||
|
||||
type BackupDataItem = MaybeDeletedItem<Item> | string[];
|
||||
type BackupPlatform = "web" | "mobile" | "node";
|
||||
@@ -74,6 +65,15 @@ type EncryptedBackupFile = BaseBackupFile & {
|
||||
type BackupFile = UnencryptedBackupFile | EncryptedBackupFile;
|
||||
type LegacyBackupFile = LegacyUnencryptedBackupFile | LegacyEncryptedBackupFile;
|
||||
|
||||
type BackupState = {
|
||||
buffer: string[];
|
||||
bufferLength: number;
|
||||
chunkIndex: number;
|
||||
key?: SerializedKey;
|
||||
encrypt: boolean;
|
||||
type: BackupPlatform;
|
||||
};
|
||||
|
||||
function isEncryptedBackup(
|
||||
backup: LegacyBackupFile | BackupFile
|
||||
): backup is EncryptedBackupFile | LegacyEncryptedBackupFile {
|
||||
@@ -86,6 +86,7 @@ function isLegacyBackupFile(
|
||||
return backup.version <= 5.8;
|
||||
}
|
||||
|
||||
const MAX_CHUNK_SIZE = 10 * 1024 * 1024;
|
||||
const COLORS = [
|
||||
"red",
|
||||
"orange",
|
||||
@@ -133,11 +134,12 @@ const itemTypeToCollectionKey = {
|
||||
relation: "relations",
|
||||
reminder: "reminders",
|
||||
sessioncontent: "sessioncontent",
|
||||
session: "notehistory",
|
||||
session: "noteHistory",
|
||||
notehistory: "notehistory",
|
||||
content: "content",
|
||||
shortcut: "shortcuts",
|
||||
settingitem: "settingsv2",
|
||||
settingitem: "settings",
|
||||
settings: "settings",
|
||||
|
||||
// to make ts happy
|
||||
topic: "topics"
|
||||
@@ -170,81 +172,85 @@ export default class Backup {
|
||||
data: ""
|
||||
};
|
||||
|
||||
const keys = await this.db.storage().getAllKeys();
|
||||
const chunks = toChunks(keys, 20);
|
||||
let buffer: string[] = [];
|
||||
let bufferLength = 0;
|
||||
const MAX_CHUNK_SIZE = 10 * 1024 * 1024;
|
||||
let chunkIndex = 0;
|
||||
const backupState: BackupState = {
|
||||
buffer: [] as string[],
|
||||
bufferLength: 0,
|
||||
chunkIndex: 0,
|
||||
key,
|
||||
encrypt,
|
||||
type
|
||||
};
|
||||
|
||||
while (chunks.length > 0) {
|
||||
const chunk = chunks.pop();
|
||||
if (!chunk) break;
|
||||
yield* this.backupCollection(this.db.notes.collection, backupState);
|
||||
yield* this.backupCollection(this.db.notebooks.collection, backupState);
|
||||
yield* this.backupCollection(this.db.content.collection, backupState);
|
||||
yield* this.backupCollection(this.db.noteHistory.collection, backupState);
|
||||
yield* this.backupCollection(
|
||||
this.db.noteHistory.sessionContent.collection,
|
||||
backupState
|
||||
);
|
||||
yield* this.backupCollection(this.db.colors.collection, backupState);
|
||||
yield* this.backupCollection(this.db.tags.collection, backupState);
|
||||
yield* this.backupCollection(this.db.settings.collection, backupState);
|
||||
yield* this.backupCollection(this.db.shortcuts.collection, backupState);
|
||||
yield* this.backupCollection(this.db.reminders.collection, backupState);
|
||||
yield* this.backupCollection(this.db.relations.collection, backupState);
|
||||
yield* this.backupCollection(this.db.attachments.collection, backupState);
|
||||
|
||||
const items = await this.db.storage().readMulti(chunk);
|
||||
items.forEach(([id, item]) => {
|
||||
const isDeleted =
|
||||
item &&
|
||||
typeof item === "object" &&
|
||||
"deleted" in item &&
|
||||
!("type" in item);
|
||||
|
||||
if (
|
||||
!item ||
|
||||
invalidKeys.includes(id) ||
|
||||
isDeleted ||
|
||||
id.startsWith("_uk_")
|
||||
)
|
||||
return;
|
||||
|
||||
const data = JSON.stringify(item);
|
||||
buffer.push(data);
|
||||
bufferLength += data.length;
|
||||
});
|
||||
|
||||
if (bufferLength >= MAX_CHUNK_SIZE || chunks.length === 0) {
|
||||
let itemsJSON = `[${buffer.join(",")}]`;
|
||||
|
||||
buffer = [];
|
||||
bufferLength = 0;
|
||||
|
||||
itemsJSON = await this.db.compressor().compress(itemsJSON);
|
||||
|
||||
const hash = SparkMD5.hash(itemsJSON);
|
||||
|
||||
if (encrypt && key)
|
||||
itemsJSON = JSON.stringify(
|
||||
await this.db.storage().encrypt(key, itemsJSON)
|
||||
);
|
||||
else itemsJSON = JSON.stringify(itemsJSON);
|
||||
|
||||
yield {
|
||||
path: `${chunkIndex++}-${encrypt ? "encrypted" : "plain"}-${hash}`,
|
||||
data: `{
|
||||
"version": ${CURRENT_DATABASE_VERSION},
|
||||
"type": "${type}",
|
||||
"date": ${Date.now()},
|
||||
"data": ${itemsJSON},
|
||||
"hash": "${hash}",
|
||||
"hash_type": "md5",
|
||||
"compressed": true,
|
||||
"encrypted": ${encrypt ? "true" : "false"}
|
||||
}`
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (bufferLength > 0 || buffer.length > 0)
|
||||
throw new Error("Buffer not empty.");
|
||||
if (backupState.buffer.length > 0) yield* this.bufferToFile(backupState);
|
||||
|
||||
await this.updateBackupTime();
|
||||
}
|
||||
|
||||
async import(
|
||||
backup: LegacyBackupFile | BackupFile,
|
||||
password?: string,
|
||||
encryptionKey?: string
|
||||
private async *backupCollection<T, B extends boolean>(
|
||||
collection: DatabaseCollection<T, B>,
|
||||
state: BackupState
|
||||
) {
|
||||
for await (const item of collection.stream()) {
|
||||
const data = JSON.stringify(item);
|
||||
state.buffer.push(data);
|
||||
state.bufferLength += data.length;
|
||||
|
||||
if (state.bufferLength >= MAX_CHUNK_SIZE) {
|
||||
yield* this.bufferToFile(state);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async *bufferToFile(state: BackupState) {
|
||||
let itemsJSON = `[${state.buffer.join(",")}]`;
|
||||
|
||||
state.buffer = [];
|
||||
state.bufferLength = 0;
|
||||
|
||||
itemsJSON = await this.db.compressor().compress(itemsJSON);
|
||||
|
||||
const hash = SparkMD5.hash(itemsJSON);
|
||||
|
||||
if (state.encrypt && state.key)
|
||||
itemsJSON = JSON.stringify(
|
||||
await this.db.storage().encrypt(state.key, itemsJSON)
|
||||
);
|
||||
else itemsJSON = JSON.stringify(itemsJSON);
|
||||
|
||||
yield {
|
||||
path: `${state.chunkIndex++}-${
|
||||
state.encrypt ? "encrypted" : "plain"
|
||||
}-${hash}`,
|
||||
data: `{
|
||||
"version": ${CURRENT_DATABASE_VERSION},
|
||||
"type": "${state.type}",
|
||||
"date": ${Date.now()},
|
||||
"data": ${itemsJSON},
|
||||
"hash": "${hash}",
|
||||
"hash_type": "md5",
|
||||
"compressed": true,
|
||||
"encrypted": ${state.encrypt ? "true" : "false"}
|
||||
}`
|
||||
};
|
||||
}
|
||||
|
||||
async import(backup: LegacyBackupFile | BackupFile, password?: string, encryptionKey?: string) {
|
||||
if (!this.validate(backup)) throw new Error("Invalid backup.");
|
||||
|
||||
backup = this.migrateBackup(backup);
|
||||
@@ -325,105 +331,102 @@ export default class Backup {
|
||||
}
|
||||
|
||||
private async migrateData(data: BackupDataItem[], version: number) {
|
||||
const toAdd: Partial<
|
||||
Record<
|
||||
ValueOf<typeof itemTypeToCollectionKey>,
|
||||
[string, MaybeDeletedItem<Item>][]
|
||||
>
|
||||
> = {};
|
||||
for (let item of data) {
|
||||
// we do not want to restore deleted items
|
||||
if (
|
||||
!item ||
|
||||
typeof item !== "object" ||
|
||||
Array.isArray(item) ||
|
||||
isDeleted(item)
|
||||
)
|
||||
continue;
|
||||
// in v5.6 of the database, we did not set note history session's type
|
||||
if ("sessionContentId" in item && item.type !== "session")
|
||||
(item as any).type = "notehistory";
|
||||
await this.db.transaction(async () => {
|
||||
for (let item of data) {
|
||||
// we do not want to restore deleted items
|
||||
if (
|
||||
!item ||
|
||||
typeof item !== "object" ||
|
||||
Array.isArray(item) ||
|
||||
isDeleted(item)
|
||||
)
|
||||
continue;
|
||||
// in v5.6 of the database, we did not set note history session's type
|
||||
if ("sessionContentId" in item && item.type !== "session")
|
||||
(item as any).type = "notehistory";
|
||||
|
||||
await migrateItem(
|
||||
item,
|
||||
version,
|
||||
CURRENT_DATABASE_VERSION,
|
||||
item.type,
|
||||
this.db,
|
||||
"backup"
|
||||
);
|
||||
// since items in trash can have their own set of migrations,
|
||||
// we have to run the migration again to account for that.
|
||||
if (item.type === "trash" && item.itemType)
|
||||
await migrateItem(
|
||||
item as unknown as Note | Notebook,
|
||||
item,
|
||||
version,
|
||||
CURRENT_DATABASE_VERSION,
|
||||
item.itemType,
|
||||
item.type,
|
||||
this.db,
|
||||
"backup"
|
||||
);
|
||||
// since items in trash can have their own set of migrations,
|
||||
// we have to run the migration again to account for that.
|
||||
if (item.type === "trash" && item.itemType)
|
||||
await migrateItem(
|
||||
item as unknown as Note | Notebook,
|
||||
version,
|
||||
CURRENT_DATABASE_VERSION,
|
||||
item.itemType,
|
||||
this.db,
|
||||
"backup"
|
||||
);
|
||||
|
||||
if (item.type === "attachment" && item.metadata && item.metadata.hash) {
|
||||
const attachment = this.db.attachments.attachment(item.metadata.hash);
|
||||
if (attachment) {
|
||||
const isNewGeneric =
|
||||
item.metadata.type === "application/octet-stream";
|
||||
const isOldGeneric =
|
||||
attachment.metadata.type === "application/octet-stream";
|
||||
item = {
|
||||
...attachment,
|
||||
metadata: {
|
||||
...attachment.metadata,
|
||||
type:
|
||||
// we keep whichever mime type is more specific
|
||||
isNewGeneric && !isOldGeneric
|
||||
? attachment.metadata.type
|
||||
: item.metadata.type,
|
||||
filename:
|
||||
// we keep the filename based on which item's mime type we kept
|
||||
isNewGeneric && !isOldGeneric
|
||||
? attachment.metadata.filename
|
||||
: item.metadata.filename
|
||||
},
|
||||
noteIds: set.union(attachment.noteIds, item.noteIds)
|
||||
};
|
||||
} else {
|
||||
item.dateUploaded = undefined;
|
||||
item.failed = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
// items should sync immediately after getting restored
|
||||
item.dateModified = Date.now();
|
||||
item.synced = false;
|
||||
|
||||
if (item.type === "settings")
|
||||
await this.db.storage().write("settings", item);
|
||||
else {
|
||||
const itemType =
|
||||
// colors are naively of type "tag" instead of "color" so we have to fix that.
|
||||
item.type === "tag" && COLORS.includes(item.title.toLowerCase())
|
||||
? "color"
|
||||
: "itemType" in item
|
||||
: "itemType" in item && item.itemType
|
||||
? item.itemType
|
||||
: item.type;
|
||||
const collectionKey = itemTypeToCollectionKey[itemType];
|
||||
if (collectionKey) {
|
||||
toAdd[collectionKey] = toAdd[collectionKey] || [];
|
||||
toAdd[collectionKey]?.push([item.id, item]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const collectionKey in toAdd) {
|
||||
const items =
|
||||
toAdd[collectionKey as ValueOf<typeof itemTypeToCollectionKey>];
|
||||
if (!items) continue;
|
||||
const indexer = new Indexer(this.db.storage, collectionKey);
|
||||
await indexer.init();
|
||||
await indexer.writeMulti(items);
|
||||
}
|
||||
if (!itemType || itemType === "topic" || itemType === "settings")
|
||||
continue;
|
||||
|
||||
if (item.type === "attachment" && (item.hash || item.metadata?.hash)) {
|
||||
const attachment = await this.db.attachments.attachment(
|
||||
item.metadata?.hash || item.hash
|
||||
);
|
||||
if (attachment) {
|
||||
const isNewGeneric =
|
||||
item.metadata?.type === "application/octet-stream" ||
|
||||
item.mimeType === "application/octet-stream";
|
||||
const isOldGeneric =
|
||||
attachment.mimeType === "application/octet-stream";
|
||||
item = {
|
||||
...attachment,
|
||||
mimeType:
|
||||
// we keep whichever mime type is more specific
|
||||
isNewGeneric && !isOldGeneric
|
||||
? attachment.mimeType
|
||||
: item.metadata?.type || item.mimeType,
|
||||
filename:
|
||||
// we keep the filename based on which item's mime type we kept
|
||||
isNewGeneric && !isOldGeneric
|
||||
? attachment.filename
|
||||
: item.metadata?.filename || item.filename
|
||||
};
|
||||
for (const noteId of item.noteIds || []) {
|
||||
await this.db.relations.add(
|
||||
{
|
||||
id: noteId,
|
||||
type: "note"
|
||||
},
|
||||
attachment
|
||||
);
|
||||
}
|
||||
} else {
|
||||
delete item.dateUploaded;
|
||||
delete item.failed;
|
||||
}
|
||||
}
|
||||
|
||||
const collectionKey = itemTypeToCollectionKey[itemType];
|
||||
const collection =
|
||||
collectionKey === "sessioncontent"
|
||||
? this.db.noteHistory.sessionContent.collection
|
||||
: this.db[collectionKey].collection;
|
||||
|
||||
// items should sync immediately after getting restored
|
||||
item.dateModified = Date.now();
|
||||
item.synced = false;
|
||||
|
||||
await collection.upsert(item as any);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private validate(backup: LegacyBackupFile | BackupFile) {
|
||||
|
||||
@@ -35,13 +35,18 @@ import {
|
||||
ValueNode,
|
||||
PrimitiveValueListNode,
|
||||
Transaction,
|
||||
ColumnType
|
||||
ColumnType,
|
||||
ExpressionBuilder,
|
||||
ReferenceExpression
|
||||
} from "kysely";
|
||||
import {
|
||||
Attachment,
|
||||
Color,
|
||||
ContentItem,
|
||||
GroupOptions,
|
||||
HistorySession,
|
||||
ItemType,
|
||||
MaybeDeletedItem,
|
||||
Note,
|
||||
Notebook,
|
||||
Relation,
|
||||
@@ -67,8 +72,10 @@ export type SQLiteItem<T> = {
|
||||
[P in keyof T]?: T[P] | null;
|
||||
} & { id: string };
|
||||
|
||||
export type SQLiteItemWithRowID<T> = SQLiteItem<T> & { rowid: number };
|
||||
|
||||
export interface DatabaseSchema {
|
||||
notes: SQLiteItem<TrashOrItem<Note>>; //| SQLiteItem<BaseTrashItem<Note>>;
|
||||
notes: SQLiteItem<TrashOrItem<Note>>;
|
||||
content: SQLiteItem<ContentItem>;
|
||||
relations: SQLiteItem<Relation>;
|
||||
notebooks: SQLiteItem<TrashOrItem<Notebook>>;
|
||||
@@ -82,36 +89,77 @@ export interface DatabaseSchema {
|
||||
shortcuts: SQLiteItem<Shortcut>;
|
||||
}
|
||||
|
||||
export type DatabaseSchemaWithFTS = DatabaseSchema & {
|
||||
notes_fts: SQLiteItemWithRowID<{
|
||||
notes_fts: string;
|
||||
title: string;
|
||||
rank: number;
|
||||
}>;
|
||||
content_fts: SQLiteItemWithRowID<{
|
||||
content_fts: string;
|
||||
data: string;
|
||||
rank: number;
|
||||
noteId: string;
|
||||
}>;
|
||||
};
|
||||
|
||||
type AsyncOrSyncResult<Async extends boolean, Response> = Async extends true
|
||||
? Promise<Response>
|
||||
: Response;
|
||||
|
||||
export interface DatabaseCollection<T, Async extends boolean> {
|
||||
export interface DatabaseCollection<T, IsAsync extends boolean> {
|
||||
clear(): Promise<void>;
|
||||
init(): Promise<void>;
|
||||
upsert(item: T): Promise<void>;
|
||||
softDelete(ids: string[]): Promise<void>;
|
||||
delete(ids: string[]): Promise<void>;
|
||||
exists(id: string): AsyncOrSyncResult<Async, boolean>;
|
||||
count(): AsyncOrSyncResult<Async, number>;
|
||||
get(id: string): AsyncOrSyncResult<Async, T | undefined>;
|
||||
exists(id: string): AsyncOrSyncResult<IsAsync, boolean>;
|
||||
count(): AsyncOrSyncResult<IsAsync, number>;
|
||||
get(id: string): AsyncOrSyncResult<IsAsync, T | undefined>;
|
||||
put(items: (T | undefined)[]): Promise<void>;
|
||||
update(ids: string[], partial: Partial<T>): Promise<void>;
|
||||
ids(options: GroupOptions): AsyncOrSyncResult<IsAsync, string[]>;
|
||||
items(
|
||||
ids: string[],
|
||||
sortOptions?: GroupOptions
|
||||
): AsyncOrSyncResult<
|
||||
IsAsync,
|
||||
Record<string, MaybeDeletedItem<T> | undefined>
|
||||
>;
|
||||
unsynced(
|
||||
after: number,
|
||||
chunkSize: number
|
||||
): IsAsync extends true
|
||||
? AsyncIterableIterator<MaybeDeletedItem<T>[]>
|
||||
: IterableIterator<MaybeDeletedItem<T>[]>;
|
||||
stream(): IsAsync extends true
|
||||
? AsyncIterableIterator<T>
|
||||
: IterableIterator<T>;
|
||||
}
|
||||
|
||||
export type DatabaseAccessor = () =>
|
||||
| Kysely<DatabaseSchema>
|
||||
| Transaction<DatabaseSchema>;
|
||||
|
||||
type FilterBooleanProperties<T> = keyof {
|
||||
[K in keyof T as T[K] extends boolean | undefined | null ? K : never]: T[K];
|
||||
type FilterBooleanProperties<T, Type> = keyof {
|
||||
[K in keyof T as T[K] extends Type ? K : never]: T[K];
|
||||
};
|
||||
|
||||
type BooleanFields = ValueOf<{
|
||||
[D in keyof DatabaseSchema]: FilterBooleanProperties<DatabaseSchema[D]>;
|
||||
[D in keyof DatabaseSchema]: FilterBooleanProperties<
|
||||
DatabaseSchema[D],
|
||||
boolean | undefined | null
|
||||
>;
|
||||
}>;
|
||||
|
||||
const BooleanProperties: BooleanFields[] = [
|
||||
// type ObjectFields = ValueOf<{
|
||||
// [D in keyof DatabaseSchema]: FilterBooleanProperties<
|
||||
// DatabaseSchema[D],
|
||||
// object | undefined | null
|
||||
// >;
|
||||
// }>;
|
||||
|
||||
const BooleanProperties: Set<BooleanFields> = new Set([
|
||||
"compressed",
|
||||
"conflicted",
|
||||
"deleted",
|
||||
@@ -124,7 +172,31 @@ const BooleanProperties: BooleanFields[] = [
|
||||
"readonly",
|
||||
"remote",
|
||||
"synced"
|
||||
];
|
||||
]);
|
||||
|
||||
const DataMappers: Partial<Record<ItemType, (row: any) => void>> = {
|
||||
reminder: (row) => {
|
||||
if (row.selectedDays) row.selectedDays = JSON.parse(row.selectedDays);
|
||||
},
|
||||
settingitem: (row) => {
|
||||
if (
|
||||
row.value &&
|
||||
(row.key.startsWith("groupOptions") ||
|
||||
row.key.startsWith("toolbarConfig"))
|
||||
)
|
||||
row.value = JSON.parse(row.value);
|
||||
},
|
||||
tiptap: (row) => {
|
||||
if (row.conflicted) row.conflicted = JSON.parse(row.conflicted);
|
||||
if (row.locked && row.data) row.data = JSON.parse(row.data);
|
||||
},
|
||||
sessioncontent: (row) => {
|
||||
if (row.locked && row.data) row.data = JSON.parse(row.data);
|
||||
},
|
||||
attachment: (row) => {
|
||||
if (row.key) row.key = JSON.parse(row.key);
|
||||
}
|
||||
};
|
||||
|
||||
export async function createDatabase(driver: Driver) {
|
||||
const db = new Kysely<DatabaseSchema>({
|
||||
@@ -150,6 +222,13 @@ export async function createDatabase(driver: Driver) {
|
||||
return db;
|
||||
}
|
||||
|
||||
export function isFalse<TB extends keyof DatabaseSchema>(
|
||||
column: ReferenceExpression<DatabaseSchema, TB>
|
||||
) {
|
||||
return (eb: ExpressionBuilder<DatabaseSchema, TB>) =>
|
||||
eb.or([eb(column, "is", eb.lit(null)), eb(column, "==", eb.lit(0))]);
|
||||
}
|
||||
|
||||
export class SqliteBooleanPlugin implements KyselyPlugin {
|
||||
readonly #transformer = new SqliteBooleanTransformer();
|
||||
|
||||
@@ -161,9 +240,15 @@ export class SqliteBooleanPlugin implements KyselyPlugin {
|
||||
args: PluginTransformResultArgs
|
||||
): Promise<QueryResult<UnknownRow>> {
|
||||
for (const row of args.result.rows) {
|
||||
for (const key of BooleanProperties) {
|
||||
const value = row[key];
|
||||
row[key] = value === 1 ? true : false;
|
||||
for (const key in row) {
|
||||
if (BooleanProperties.has(key as BooleanFields)) {
|
||||
row[key] = row[key] === 1 ? true : false;
|
||||
}
|
||||
}
|
||||
|
||||
const mapper = DataMappers[row.type as ItemType];
|
||||
if (row.type && mapper) {
|
||||
mapper(row);
|
||||
}
|
||||
}
|
||||
return Promise.resolve(args.result);
|
||||
@@ -174,7 +259,7 @@ class SqliteBooleanTransformer extends OperationNodeTransformer {
|
||||
transformValue(node: ValueNode): ValueNode {
|
||||
return {
|
||||
...super.transformValue(node),
|
||||
value: typeof node.value === "boolean" ? (node.value ? 1 : 0) : node.value
|
||||
value: this.serialize(node.value)
|
||||
};
|
||||
}
|
||||
|
||||
@@ -183,9 +268,17 @@ class SqliteBooleanTransformer extends OperationNodeTransformer {
|
||||
): PrimitiveValueListNode {
|
||||
return {
|
||||
...super.transformPrimitiveValueList(node),
|
||||
values: node.values.map((value) =>
|
||||
typeof value === "boolean" ? (value ? 1 : 0) : value
|
||||
)
|
||||
values: node.values.map((value) => this.serialize(value))
|
||||
};
|
||||
}
|
||||
|
||||
private serialize(value: unknown) {
|
||||
return typeof value === "boolean"
|
||||
? value
|
||||
? 1
|
||||
: 0
|
||||
: typeof value === "object" && value !== null
|
||||
? JSON.stringify(value)
|
||||
: value;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,7 +17,13 @@ You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import { CreateTableBuilder, Migration, MigrationProvider, sql } from "kysely";
|
||||
import {
|
||||
CreateTableBuilder,
|
||||
Kysely,
|
||||
Migration,
|
||||
MigrationProvider,
|
||||
sql
|
||||
} from "kysely";
|
||||
|
||||
export class NNMigrationProvider implements MigrationProvider {
|
||||
async getMigrations(): Promise<Record<string, Migration>> {
|
||||
@@ -26,8 +32,9 @@ export class NNMigrationProvider implements MigrationProvider {
|
||||
async up(db) {
|
||||
await db.schema
|
||||
.createTable("notes")
|
||||
.modifyEnd(sql`without rowid`)
|
||||
// .modifyEnd(sql`without rowid`)
|
||||
.$call(addBaseColumns)
|
||||
.$call(addTrashColumns)
|
||||
.addColumn("title", "text")
|
||||
.addColumn("headline", "text")
|
||||
.addColumn("contentId", "text")
|
||||
@@ -38,34 +45,56 @@ export class NNMigrationProvider implements MigrationProvider {
|
||||
.addColumn("conflicted", "boolean")
|
||||
.addColumn("readonly", "boolean")
|
||||
.addColumn("dateEdited", "integer")
|
||||
.addColumn("dateDeleted", "integer")
|
||||
.addColumn("itemType", "text")
|
||||
.addForeignKeyConstraint(
|
||||
"note_has_content",
|
||||
["contentId"],
|
||||
"content",
|
||||
["id"],
|
||||
(b) => b.onDelete("restrict").onUpdate("restrict")
|
||||
)
|
||||
.execute();
|
||||
await createFTS5Table(db, "notes", ["title"]);
|
||||
|
||||
await db.schema
|
||||
.createTable("content")
|
||||
.modifyEnd(sql`without rowid`)
|
||||
// .modifyEnd(sql`without rowid`)
|
||||
.$call(addBaseColumns)
|
||||
.addColumn("noteId", "text")
|
||||
.addColumn("data", "text")
|
||||
.addColumn("locked", "boolean")
|
||||
.addColumn("localOnly", "boolean")
|
||||
.addColumn("conflicted", "text")
|
||||
.addColumn("sessionId", "text")
|
||||
.addColumn("dateEdited", "integer")
|
||||
.addColumn("dateResolved", "integer")
|
||||
.execute();
|
||||
await createFTS5Table(
|
||||
db,
|
||||
"content",
|
||||
["data"],
|
||||
["noteId"],
|
||||
["(new.locked is null or new.locked == 0)"]
|
||||
);
|
||||
|
||||
await db.schema
|
||||
.createTable("notehistory")
|
||||
.modifyEnd(sql`without rowid`)
|
||||
.$call(addBaseColumns)
|
||||
.addColumn("noteId", "text")
|
||||
.addColumn("sessionContentId", "text")
|
||||
.addColumn("localOnly", "boolean")
|
||||
.addColumn("locked", "boolean")
|
||||
.execute();
|
||||
|
||||
await db.schema
|
||||
.createTable("sessioncontent")
|
||||
.modifyEnd(sql`without rowid`)
|
||||
.$call(addBaseColumns)
|
||||
.addColumn("data", "text")
|
||||
.addColumn("contentType", "text")
|
||||
.addColumn("locked", "boolean")
|
||||
.addColumn("compressed", "boolean")
|
||||
.addColumn("localOnly", "boolean")
|
||||
.execute();
|
||||
|
||||
await db.schema
|
||||
.createTable("notebooks")
|
||||
.modifyEnd(sql`without rowid`)
|
||||
.$call(addBaseColumns)
|
||||
.$call(addTrashColumns)
|
||||
.addColumn("title", "text")
|
||||
.addColumn("description", "text")
|
||||
.addColumn("dateEdited", "text")
|
||||
@@ -116,7 +145,7 @@ export class NNMigrationProvider implements MigrationProvider {
|
||||
.addColumn("date", "integer")
|
||||
.addColumn("mode", "text")
|
||||
.addColumn("recurringMode", "text")
|
||||
.addColumn("selectedDays", "blob")
|
||||
.addColumn("selectedDays", "text")
|
||||
.addColumn("localOnly", "boolean")
|
||||
.addColumn("disabled", "boolean")
|
||||
.addColumn("snoozeUntil", "integer")
|
||||
@@ -141,6 +170,20 @@ export class NNMigrationProvider implements MigrationProvider {
|
||||
.addColumn("failed", "text")
|
||||
.execute();
|
||||
|
||||
await db.schema
|
||||
.createTable("settings")
|
||||
.modifyEnd(sql`without rowid`)
|
||||
.$call(addBaseColumns)
|
||||
.addColumn("key", "text")
|
||||
.addColumn("value", "text")
|
||||
.execute();
|
||||
|
||||
await db.schema
|
||||
.createIndex("notehistory_noteid")
|
||||
.on("notehistory")
|
||||
.column("noteId")
|
||||
.execute();
|
||||
|
||||
await db.schema
|
||||
.createIndex("relation_from_general")
|
||||
.on("relations")
|
||||
@@ -216,3 +259,63 @@ const addBaseColumns = <T extends string, C extends string = never>(
|
||||
.addColumn("synced", "boolean")
|
||||
.addColumn("deleted", "boolean");
|
||||
};
|
||||
|
||||
const addTrashColumns = <T extends string, C extends string = never>(
|
||||
builder: CreateTableBuilder<T, C>
|
||||
) => {
|
||||
return builder
|
||||
|
||||
.addColumn("dateDeleted", "integer")
|
||||
.addColumn("itemType", "text");
|
||||
};
|
||||
|
||||
async function createFTS5Table(
|
||||
db: Kysely<any>,
|
||||
table: string,
|
||||
indexedColumns: string[],
|
||||
unindexedColumns: string[] = [],
|
||||
insertConditions: string[] = []
|
||||
) {
|
||||
const ref = sql.raw(table);
|
||||
const ref_fts = sql.raw(table + "_fts");
|
||||
const ref_ai = sql.raw(table + "_ai");
|
||||
const ref_ad = sql.raw(table + "_ad");
|
||||
const ref_au = sql.raw(table + "_au");
|
||||
|
||||
const indexed_cols = sql.raw(indexedColumns.join(", "));
|
||||
const unindexed_cols =
|
||||
unindexedColumns.length > 0
|
||||
? sql.raw(unindexedColumns.join(" UNINDEXED,") + " UNINDEXED,")
|
||||
: sql.raw("");
|
||||
const new_indexed_cols = sql.raw(indexedColumns.join(", new."));
|
||||
const old_indexed_cols = sql.raw(indexedColumns.join(", old."));
|
||||
|
||||
await sql`CREATE VIRTUAL TABLE ${ref_fts} USING fts5(
|
||||
id UNINDEXED, ${unindexed_cols} ${indexed_cols}, content='${sql.raw(table)}'
|
||||
)`.execute(db);
|
||||
|
||||
insertConditions = [
|
||||
"(new.deleted is null or new.deleted == 0)",
|
||||
...insertConditions
|
||||
];
|
||||
await sql`CREATE TRIGGER ${ref_ai} AFTER INSERT ON ${ref} WHEN ${sql.raw(
|
||||
insertConditions.join(" AND ")
|
||||
)}
|
||||
BEGIN
|
||||
INSERT INTO ${ref_fts}(rowid, id, ${indexed_cols}) VALUES (new.rowid, new.id, new.${new_indexed_cols});
|
||||
END;`.execute(db);
|
||||
|
||||
await sql`CREATE TRIGGER ${ref_ad} AFTER DELETE ON ${ref}
|
||||
BEGIN
|
||||
INSERT INTO ${ref_fts} (${ref_fts}, rowid, id, ${indexed_cols})
|
||||
VALUES ('delete', old.rowid, old.id, old.${old_indexed_cols});
|
||||
END;`.execute(db);
|
||||
|
||||
await sql`CREATE TRIGGER ${ref_au} AFTER UPDATE ON ${ref}
|
||||
BEGIN
|
||||
INSERT INTO ${ref_fts} (${ref_fts}, rowid, id, ${indexed_cols})
|
||||
VALUES ('delete', old.rowid, old.id, old.${old_indexed_cols});
|
||||
INSERT INTO ${ref_fts} (rowid, id, ${indexed_cols})
|
||||
VALUES (new.rowid, new.id, new.${new_indexed_cols});
|
||||
END;`.execute(db);
|
||||
}
|
||||
|
||||
@@ -17,7 +17,7 @@ You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import { MaybeDeletedItem, isDeleted } from "../types";
|
||||
import { GroupOptions, MaybeDeletedItem, isDeleted } from "../types";
|
||||
import EventManager from "../utils/event-manager";
|
||||
import { DatabaseAccessor, DatabaseCollection, DatabaseSchema } from ".";
|
||||
import { SQLCollection } from "./sql-collection";
|
||||
@@ -110,6 +110,44 @@ export class SQLCachedCollection<
|
||||
}
|
||||
}
|
||||
|
||||
ids(_options: GroupOptions): string[] {
|
||||
return Array.from(this.cache.keys());
|
||||
}
|
||||
|
||||
items(
|
||||
ids: string[],
|
||||
_sortOptions?: GroupOptions
|
||||
): Record<string, MaybeDeletedItem<T> | undefined> {
|
||||
const items: Record<string, MaybeDeletedItem<T> | undefined> = {};
|
||||
for (const id of ids) {
|
||||
items[id] = this.cache.get(id);
|
||||
}
|
||||
return items;
|
||||
}
|
||||
|
||||
*unsynced(
|
||||
after: number,
|
||||
chunkSize: number
|
||||
): IterableIterator<MaybeDeletedItem<T>[]> {
|
||||
let chunk: MaybeDeletedItem<T>[] = [];
|
||||
for (const [_key, value] of this.cache) {
|
||||
if (value.dateModified && value.dateModified > after) {
|
||||
chunk.push(value);
|
||||
if (chunk.length === chunkSize) {
|
||||
yield chunk;
|
||||
chunk = [];
|
||||
}
|
||||
}
|
||||
}
|
||||
if (chunk.length > 0) yield chunk;
|
||||
}
|
||||
|
||||
*stream(): IterableIterator<T> {
|
||||
for (const [_key, value] of this.cache) {
|
||||
if (!value.deleted) yield value as T;
|
||||
}
|
||||
}
|
||||
|
||||
// has(id: string) {
|
||||
// return this.cache.has(id);
|
||||
// }
|
||||
|
||||
@@ -18,14 +18,16 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import { EVENTS } from "../common";
|
||||
import { isDeleted } from "../types";
|
||||
import { GroupOptions, MaybeDeletedItem, isDeleted } from "../types";
|
||||
import EventManager from "../utils/event-manager";
|
||||
import {
|
||||
DatabaseAccessor,
|
||||
DatabaseCollection,
|
||||
DatabaseSchema,
|
||||
SQLiteItem
|
||||
SQLiteItem,
|
||||
isFalse
|
||||
} from ".";
|
||||
import { ExpressionOrFactory, SelectQueryBuilder, SqlBool } from "kysely";
|
||||
|
||||
export class SQLCollection<
|
||||
TCollectionType extends keyof DatabaseSchema,
|
||||
@@ -91,6 +93,7 @@ export class SQLCollection<
|
||||
.selectFrom<keyof DatabaseSchema>(this.type)
|
||||
.select((a) => a.fn.count<number>("id").as("count"))
|
||||
.where("id", "==", id)
|
||||
.where("deleted", "is", null)
|
||||
.limit(1)
|
||||
.executeTakeFirst()) || {};
|
||||
|
||||
@@ -145,4 +148,173 @@ export class SQLCollection<
|
||||
})
|
||||
.execute();
|
||||
}
|
||||
|
||||
async ids(sortOptions: GroupOptions): Promise<string[]> {
|
||||
const ids = await this.db()
|
||||
.selectFrom<keyof DatabaseSchema>(this.type)
|
||||
.select("id")
|
||||
.where("deleted", "is", null)
|
||||
.$if(this.type === "notes" || this.type === "notebooks", (eb) =>
|
||||
eb.where("dateDeleted", "is", null)
|
||||
)
|
||||
.orderBy(sortOptions.sortBy, sortOptions.sortDirection)
|
||||
.execute();
|
||||
return ids.map((id) => id.id);
|
||||
}
|
||||
|
||||
async items(
|
||||
ids: string[],
|
||||
sortOptions?: GroupOptions
|
||||
): Promise<Record<string, MaybeDeletedItem<T> | undefined>> {
|
||||
const results = await this.db()
|
||||
.selectFrom<keyof DatabaseSchema>(this.type)
|
||||
.selectAll()
|
||||
.where("id", "in", ids)
|
||||
.$if(!!sortOptions, (eb) =>
|
||||
eb.orderBy(sortOptions!.sortBy, sortOptions!.sortDirection)
|
||||
)
|
||||
.execute();
|
||||
const items: Record<string, MaybeDeletedItem<T>> = {};
|
||||
for (const item of results) {
|
||||
items[item.id] = item as MaybeDeletedItem<T>;
|
||||
}
|
||||
return items;
|
||||
}
|
||||
|
||||
async *unsynced(
|
||||
after: number,
|
||||
chunkSize: number
|
||||
): AsyncIterableIterator<MaybeDeletedItem<T>[]> {
|
||||
let index = 0;
|
||||
while (true) {
|
||||
const rows = await this.db()
|
||||
.selectFrom<keyof DatabaseSchema>(this.type)
|
||||
.selectAll()
|
||||
.orderBy("dateModified", "asc")
|
||||
.$if(after > 0, (eb) =>
|
||||
eb
|
||||
.where("dateModified", ">", after)
|
||||
.where((eb) =>
|
||||
eb.or([eb("synced", "is", null), eb("synced", "==", false)])
|
||||
)
|
||||
)
|
||||
.$if(this.type === "attachments", (eb) =>
|
||||
eb.where("dateUploaded", ">", 0)
|
||||
)
|
||||
.offset(index)
|
||||
.limit(chunkSize)
|
||||
.execute();
|
||||
if (rows.length === 0) break;
|
||||
index += chunkSize;
|
||||
yield rows as MaybeDeletedItem<T>[];
|
||||
}
|
||||
}
|
||||
|
||||
async *stream(): AsyncIterableIterator<T> {
|
||||
let index = 0;
|
||||
const chunkSize = 50;
|
||||
while (true) {
|
||||
const rows = await this.db()
|
||||
.selectFrom<keyof DatabaseSchema>(this.type)
|
||||
.where(isFalse("deleted"))
|
||||
.orderBy("dateModified desc")
|
||||
.selectAll()
|
||||
.offset(index)
|
||||
.limit(chunkSize)
|
||||
.execute();
|
||||
if (rows.length === 0) break;
|
||||
index += chunkSize;
|
||||
for (const row of rows) {
|
||||
yield row as T;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
createFilter<T>(
|
||||
selector: (
|
||||
qb: SelectQueryBuilder<DatabaseSchema, keyof DatabaseSchema, unknown>
|
||||
) => SelectQueryBuilder<DatabaseSchema, keyof DatabaseSchema, unknown>,
|
||||
batchSize = 50
|
||||
) {
|
||||
return new FilteredSelector<T>(
|
||||
this.db().selectFrom<keyof DatabaseSchema>(this.type).$call(selector),
|
||||
batchSize
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export class FilteredSelector<T> {
|
||||
constructor(
|
||||
readonly filter: SelectQueryBuilder<
|
||||
DatabaseSchema,
|
||||
keyof DatabaseSchema,
|
||||
unknown
|
||||
>,
|
||||
readonly batchSize: number
|
||||
) {}
|
||||
|
||||
async ids() {
|
||||
return (await this.filter.select("id").execute()).map((i) => i.id);
|
||||
}
|
||||
|
||||
async items(ids?: string[]) {
|
||||
return (await this.filter
|
||||
.$if(!!ids && ids.length > 0, (eb) => eb.where("id", "in", ids!))
|
||||
.selectAll()
|
||||
.execute()) as T[];
|
||||
}
|
||||
|
||||
async has(id: string) {
|
||||
const { count } =
|
||||
(await this.filter
|
||||
.where("id", "==", id)
|
||||
.limit(1)
|
||||
.select((a) => a.fn.count<number>("id").as("count"))
|
||||
.executeTakeFirst()) || {};
|
||||
return count !== undefined && count > 0;
|
||||
}
|
||||
|
||||
async count() {
|
||||
const { count } =
|
||||
(await this.filter
|
||||
.select((a) => a.fn.count<number>("id").as("count"))
|
||||
.executeTakeFirst()) || {};
|
||||
return count || 0;
|
||||
}
|
||||
|
||||
async find(
|
||||
filter: ExpressionOrFactory<DatabaseSchema, keyof DatabaseSchema, SqlBool>
|
||||
) {
|
||||
const item = await this.filter
|
||||
.where(filter)
|
||||
.limit(1)
|
||||
.selectAll()
|
||||
.executeTakeFirst();
|
||||
return item as T | undefined;
|
||||
}
|
||||
|
||||
async *map<TReturnType>(
|
||||
fn: (item: T) => TReturnType
|
||||
): AsyncIterableIterator<TReturnType> {
|
||||
for await (const item of this) {
|
||||
yield fn(item);
|
||||
}
|
||||
}
|
||||
|
||||
async *[Symbol.asyncIterator]() {
|
||||
let index = 0;
|
||||
while (true) {
|
||||
const rows = await this.filter
|
||||
.selectAll()
|
||||
.orderBy("dateModified asc")
|
||||
.offset(index)
|
||||
.limit(this.batchSize)
|
||||
.execute();
|
||||
if (rows.length === 0) break;
|
||||
index += this.batchSize;
|
||||
for (const row of rows) {
|
||||
yield row as T;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -131,11 +131,8 @@ const migrations: Migration[] = [
|
||||
for (const pin of item.pins) {
|
||||
if (!pin.data) continue;
|
||||
await db.shortcuts.add({
|
||||
item: {
|
||||
type: pin.type,
|
||||
id: pin.data.id,
|
||||
notebookId: pin.data.notebookId
|
||||
}
|
||||
itemId: pin.data.id,
|
||||
itemType: pin.type === "topic" ? "notebook" : pin.type
|
||||
});
|
||||
}
|
||||
delete item.pins;
|
||||
@@ -156,7 +153,7 @@ const migrations: Migration[] = [
|
||||
return oldType !== item.type;
|
||||
},
|
||||
shortcut: (item) => {
|
||||
if (item.id === item.item.id) return false;
|
||||
if (!item.item || item.id === item.item.id) return false;
|
||||
item.id = item.item.id;
|
||||
return true;
|
||||
},
|
||||
@@ -192,12 +189,12 @@ const migrations: Migration[] = [
|
||||
const alias = db.legacySettings.getAlias(item.id);
|
||||
if (
|
||||
!alias &&
|
||||
(db.tags.all.find(
|
||||
(t) => item.title === t.title && t.id !== oldTagId
|
||||
) ||
|
||||
db.colors.all.find(
|
||||
(t) => item.title === t.title && t.id !== oldTagId
|
||||
))
|
||||
(db.legacyTags
|
||||
.items()
|
||||
.find((t) => item.title === t.title && t.id !== oldTagId) ||
|
||||
db.legacyColors
|
||||
.items()
|
||||
.find((t) => item.title === t.title && t.id !== oldTagId))
|
||||
)
|
||||
return false;
|
||||
|
||||
@@ -218,10 +215,10 @@ const migrations: Migration[] = [
|
||||
note: async (item, db) => {
|
||||
for (const tag of item.tags || []) {
|
||||
const oldTagId = makeId(tag);
|
||||
const oldTag = db.tags.tag(oldTagId);
|
||||
const oldTag = db.legacyTags.get(oldTagId);
|
||||
const alias = db.legacySettings.getAlias(oldTagId);
|
||||
const newTag = db.tags.all.find(
|
||||
(t) => [alias, tag].includes(t.title) && t.id !== oldTagId
|
||||
const newTag = await db.tags.all.find((eb) =>
|
||||
eb.or([eb("title", "in", [alias, tag])])
|
||||
);
|
||||
|
||||
const newTagId =
|
||||
@@ -234,16 +231,17 @@ const migrations: Migration[] = [
|
||||
}));
|
||||
if (!newTagId) continue;
|
||||
await db.relations.add({ type: "tag", id: newTagId }, item);
|
||||
await db.tags.delete(oldTagId);
|
||||
await db.legacyTags.delete(oldTagId);
|
||||
}
|
||||
|
||||
if (item.color) {
|
||||
const oldColorId = makeId(item.color);
|
||||
const oldColor = db.colors.color(oldColorId);
|
||||
const oldColor = db.legacyColors.get(oldColorId);
|
||||
const alias = db.legacySettings.getAlias(oldColorId);
|
||||
const newColor = db.colors.all.find(
|
||||
(t) => [alias, item.color].includes(t.title) && t.id !== oldColorId
|
||||
const newColor = await db.colors.all.find((eb) =>
|
||||
eb.or([eb("title", "in", [alias, item.color])])
|
||||
);
|
||||
|
||||
const newColorId =
|
||||
newColor?.id ||
|
||||
(await db.colors.add({
|
||||
@@ -255,7 +253,7 @@ const migrations: Migration[] = [
|
||||
}));
|
||||
if (newColorId) {
|
||||
await db.relations.add({ type: "color", id: newColorId }, item);
|
||||
await db.colors.delete(oldColorId);
|
||||
await db.legacyColors.delete(oldColorId);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -279,6 +277,18 @@ const migrations: Migration[] = [
|
||||
{ type: "note", id: noteId }
|
||||
);
|
||||
}
|
||||
|
||||
if (item.metadata) {
|
||||
item.hash = item.metadata.hash;
|
||||
item.mimeType = item.metadata.type;
|
||||
item.hashType = item.metadata.hashType;
|
||||
item.filename = item.metadata.filename;
|
||||
}
|
||||
|
||||
if (item.length) item.size = item.length;
|
||||
|
||||
delete item.length;
|
||||
delete item.metadata;
|
||||
delete item.noteIds;
|
||||
return true;
|
||||
},
|
||||
@@ -353,7 +363,12 @@ const migrations: Migration[] = [
|
||||
delete item.to;
|
||||
delete item.from;
|
||||
return true;
|
||||
}
|
||||
},
|
||||
tiptap: (item) => {
|
||||
item.locked = isCipher(item.data);
|
||||
return true;
|
||||
},
|
||||
all: () => true
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -394,7 +409,10 @@ export async function migrateItem<TItemType extends MigrationItemType>(
|
||||
|
||||
const itemMigrator = migration.items[type];
|
||||
if (!itemMigrator) continue;
|
||||
if (await itemMigrator(item, database, migrationType)) count++;
|
||||
if (await itemMigrator(item, database, migrationType)) {
|
||||
if (item.type && item.type !== type) type = item.type as TItemType;
|
||||
count++;
|
||||
}
|
||||
}
|
||||
|
||||
return count > 0;
|
||||
|
||||
@@ -224,12 +224,9 @@ export interface Attachment extends BaseItem<"attachment"> {
|
||||
hash: string;
|
||||
hashType: string;
|
||||
mimeType: string;
|
||||
encryptionKey: string;
|
||||
|
||||
/**
|
||||
* @deprecated only kept here for migration purposes
|
||||
*/
|
||||
key?: Cipher<"base64">;
|
||||
key: Cipher<"base64">;
|
||||
|
||||
/**
|
||||
* @deprecated only kept here for migration purposes
|
||||
*/
|
||||
@@ -335,24 +332,27 @@ export interface Reminder extends BaseItem<"reminder"> {
|
||||
}
|
||||
|
||||
export type ContentType = "tiptap" | "tiny";
|
||||
export interface ContentItem extends BaseItem<ContentType> {
|
||||
export interface BaseContentItem extends BaseItem<ContentType> {
|
||||
noteId: string;
|
||||
data: string | Cipher<"base64">;
|
||||
dateEdited: number;
|
||||
localOnly: boolean;
|
||||
conflicted?: ContentItem;
|
||||
dateResolved?: number;
|
||||
sessionId?: string;
|
||||
conflicted?: UnencryptedContentItem;
|
||||
}
|
||||
|
||||
export type UnencryptedContentItem = Omit<ContentItem, "data"> & {
|
||||
export type UnencryptedContentItem = BaseContentItem & {
|
||||
data: string;
|
||||
locked: false;
|
||||
};
|
||||
|
||||
export type EncryptedContentItem = Omit<ContentItem, "data"> & {
|
||||
export type EncryptedContentItem = BaseContentItem & {
|
||||
data: Cipher<"base64">;
|
||||
locked: true;
|
||||
};
|
||||
|
||||
export type ContentItem = EncryptedContentItem | UnencryptedContentItem;
|
||||
|
||||
export interface HistorySession extends BaseItem<"session"> {
|
||||
sessionContentId: string;
|
||||
noteId: string;
|
||||
|
||||
@@ -72,3 +72,20 @@ export function* chunkedIterate<T>(array: T[], chunkSize: number) {
|
||||
yield chunk;
|
||||
}
|
||||
}
|
||||
|
||||
export async function* chunkify<T>(
|
||||
iterator: AsyncIterableIterator<T> | IterableIterator<T>,
|
||||
chunkSize: number
|
||||
) {
|
||||
let chunk: T[] = [];
|
||||
for await (const item of iterator) {
|
||||
chunk.push(item);
|
||||
if (chunk.length === chunkSize) {
|
||||
yield chunk;
|
||||
chunk = [];
|
||||
}
|
||||
}
|
||||
if (chunk.length > 0) {
|
||||
yield chunk;
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user