mirror of
https://github.com/streetwriters/notesnook.git
synced 2025-12-23 15:09:33 +01:00
core: changes according to client needs
This commit is contained in:
@@ -28,7 +28,7 @@ import { EventSourcePolyfill as EventSource } from "event-source-polyfill";
|
||||
import { randomBytes } from "../../src/utils/random";
|
||||
import { GroupOptions, Note, Notebook } from "../../src/types";
|
||||
import { NoteContent } from "../../src/collections/session-content";
|
||||
import { SqliteDriver } from "kysely";
|
||||
import { SqliteDialect } from "kysely";
|
||||
import BetterSQLite3 from "better-sqlite3";
|
||||
|
||||
const TEST_NOTEBOOK: Partial<Notebook> = {
|
||||
@@ -48,7 +48,7 @@ function databaseTest() {
|
||||
eventsource: EventSource,
|
||||
fs: FS,
|
||||
compressor: Compressor,
|
||||
sqlite: new SqliteDriver({ database: BetterSQLite3(":memory:") })
|
||||
dialect: new SqliteDialect({ database: BetterSQLite3(":memory:") })
|
||||
});
|
||||
return db.init().then(() => db);
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@notesnook/core",
|
||||
"version": "7.4.1",
|
||||
"main": "dist/api/index.js",
|
||||
"main": "dist/index.js",
|
||||
"license": "GPL-3.0-or-later",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
@@ -60,8 +60,13 @@ import {
|
||||
import TokenManager from "./token-manager";
|
||||
import { Attachment } from "../types";
|
||||
import { Settings } from "../collections/settings";
|
||||
import { DatabaseAccessor, DatabaseSchema, createDatabase } from "../database";
|
||||
import { Kysely, SqliteDriver, Transaction } from "kysely";
|
||||
import {
|
||||
DatabaseAccessor,
|
||||
DatabaseSchema,
|
||||
SQLiteOptions,
|
||||
createDatabase
|
||||
} from "../database";
|
||||
import { Kysely, Transaction } from "kysely";
|
||||
import { CachedCollection } from "../database/cached-collection";
|
||||
|
||||
type EventSourceConstructor = new (
|
||||
@@ -69,11 +74,12 @@ type EventSourceConstructor = new (
|
||||
init: EventSourceInit & { headers?: Record<string, string> }
|
||||
) => EventSource;
|
||||
type Options = {
|
||||
sqlite: SqliteDriver;
|
||||
sqliteOptions: SQLiteOptions;
|
||||
storage: IStorage;
|
||||
eventsource?: EventSourceConstructor;
|
||||
fs: IFileStorage;
|
||||
compressor: ICompressor;
|
||||
batchSize: number;
|
||||
};
|
||||
|
||||
// const DIFFERENCE_THRESHOLD = 20 * 1000;
|
||||
@@ -131,6 +137,7 @@ class Database {
|
||||
transaction = (
|
||||
executor: (tr: Transaction<DatabaseSchema>) => void | Promise<void>
|
||||
) => {
|
||||
console.time("transaction");
|
||||
return this.transactionMutex.runExclusive(() =>
|
||||
this.sql()
|
||||
.transaction()
|
||||
@@ -139,11 +146,14 @@ class Database {
|
||||
await executor(tr);
|
||||
this._transaction = undefined;
|
||||
})
|
||||
.finally(() => (this._transaction = undefined))
|
||||
.finally(() => {
|
||||
console.timeEnd("transaction");
|
||||
this._transaction = undefined;
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
private options?: Options;
|
||||
options?: Options;
|
||||
EventSource?: EventSourceConstructor;
|
||||
eventSource?: EventSource | null;
|
||||
|
||||
@@ -160,7 +170,6 @@ class Database {
|
||||
vault = new Vault(this);
|
||||
lookup = new Lookup(this);
|
||||
backup = new Backup(this);
|
||||
legacySettings = new LegacySettings(this);
|
||||
settings = new Settings(this);
|
||||
migrations = new Migrations(this);
|
||||
monographs = new Monographs(this);
|
||||
@@ -193,7 +202,10 @@ class Database {
|
||||
* @deprecated only kept here for migration purposes
|
||||
*/
|
||||
legacyNotes = new CachedCollection(this.storage, "notes", this.eventManager);
|
||||
|
||||
/**
|
||||
* @deprecated only kept here for migration purposes
|
||||
*/
|
||||
legacySettings = new LegacySettings(this);
|
||||
// constructor() {
|
||||
// this.sseMutex = new Mutex();
|
||||
// // this.lastHeartbeat = undefined; // { local: 0, server: 0 };
|
||||
@@ -229,7 +241,8 @@ class Database {
|
||||
this.disconnectSSE();
|
||||
});
|
||||
|
||||
if (this.options) this._sql = await createDatabase(this.options.sqlite);
|
||||
if (this.options)
|
||||
this._sql = await createDatabase(this.options.sqliteOptions);
|
||||
|
||||
await this._validate();
|
||||
|
||||
|
||||
@@ -20,9 +20,10 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
import http from "../utils/http";
|
||||
import Constants from "../utils/constants";
|
||||
import Database from ".";
|
||||
import { isDeleted } from "../types";
|
||||
import { Note, isDeleted } from "../types";
|
||||
import { isUnencryptedContent } from "../collections/content";
|
||||
import { Cipher } from "@notesnook/crypto";
|
||||
import { isFalse } from "../database";
|
||||
|
||||
type BaseMonograph = {
|
||||
id: string;
|
||||
@@ -163,9 +164,15 @@ export class Monographs {
|
||||
this.monographs.splice(this.monographs.indexOf(noteId), 1);
|
||||
}
|
||||
|
||||
async all() {
|
||||
if (!this.monographs.length) return [];
|
||||
return await this.db.notes.all.items(this.monographs);
|
||||
get all() {
|
||||
return this.db.notes.collection.createFilter<Note>(
|
||||
(qb) =>
|
||||
qb
|
||||
.where(isFalse("dateDeleted"))
|
||||
.where(isFalse("deleted"))
|
||||
.where("id", "in", this.monographs),
|
||||
this.db.options?.batchSize
|
||||
);
|
||||
}
|
||||
|
||||
get(monographId: string) {
|
||||
|
||||
@@ -380,7 +380,7 @@ class Sync {
|
||||
|
||||
const collectionType = SYNC_COLLECTIONS_MAP[itemType];
|
||||
const collection = this.db[collectionType].collection;
|
||||
const localItems = await collection.items(chunk.items.map((i) => i.id));
|
||||
const localItems = await collection.records(chunk.items.map((i) => i.id));
|
||||
let items: (MaybeDeletedItem<Item> | undefined)[] = [];
|
||||
if (itemType === "content") {
|
||||
items = await Promise.all(
|
||||
|
||||
@@ -365,8 +365,9 @@ export class Attachments implements ICollection {
|
||||
}
|
||||
|
||||
get pending() {
|
||||
return this.collection.createFilter<Attachment>((qb) =>
|
||||
qb.where(isFalse("dateUploaded"))
|
||||
return this.collection.createFilter<Attachment>(
|
||||
(qb) => qb.where(isFalse("dateUploaded")),
|
||||
this.db.options?.batchSize
|
||||
);
|
||||
}
|
||||
|
||||
@@ -383,8 +384,9 @@ export class Attachments implements ICollection {
|
||||
// }
|
||||
|
||||
get deleted() {
|
||||
return this.collection.createFilter<Attachment>((qb) =>
|
||||
qb.where("dateDeleted", "is not", null)
|
||||
return this.collection.createFilter<Attachment>(
|
||||
(qb) => qb.where("dateDeleted", "is not", null),
|
||||
this.db.options?.batchSize
|
||||
);
|
||||
}
|
||||
|
||||
@@ -412,8 +414,9 @@ export class Attachments implements ICollection {
|
||||
// }
|
||||
|
||||
get all() {
|
||||
return this.collection.createFilter<Attachment>((qb) =>
|
||||
qb.where(isFalse("deleted"))
|
||||
return this.collection.createFilter<Attachment>(
|
||||
(qb) => qb.where(isFalse("deleted")),
|
||||
this.db.options?.batchSize
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -87,8 +87,9 @@ export class Colors implements ICollection {
|
||||
// }
|
||||
|
||||
get all() {
|
||||
return this.collection.createFilter<Color>((qb) =>
|
||||
qb.where(isFalse("deleted"))
|
||||
return this.collection.createFilter<Color>(
|
||||
(qb) => qb.where(isFalse("deleted")),
|
||||
this.db.options?.batchSize
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -33,6 +33,7 @@ import {
|
||||
import Database from "../api";
|
||||
import { getOutputType } from "./attachments";
|
||||
import { SQLCollection } from "../database/sql-collection";
|
||||
import { NoteContent } from "./session-content";
|
||||
|
||||
export const EMPTY_CONTENT = (noteId: string): UnencryptedContentItem => ({
|
||||
noteId,
|
||||
@@ -73,49 +74,67 @@ export class Content implements ICollection {
|
||||
);
|
||||
|
||||
const id = content.id || getId();
|
||||
const oldContent = content.id ? await this.get(content.id) : undefined;
|
||||
const noteId = oldContent?.noteId || content.noteId;
|
||||
if (!noteId) throw new Error("No noteId found to link the content to.");
|
||||
|
||||
const encryptedData = isCipher(content.data)
|
||||
? content.data
|
||||
: oldContent && isCipher(oldContent.data)
|
||||
? oldContent.data
|
||||
: null;
|
||||
if (!content.noteId)
|
||||
throw new Error("No noteId found to link the content to.");
|
||||
if (!content.type) throw new Error("Please specify content's type.");
|
||||
|
||||
const unencryptedData =
|
||||
typeof content.data === "string"
|
||||
? content.data
|
||||
: oldContent && typeof oldContent.data === "string"
|
||||
? oldContent.data
|
||||
: "<p></p>";
|
||||
const encryptedData = isCipher(content.data) ? content.data : undefined;
|
||||
let unencryptedData =
|
||||
typeof content.data === "string" ? content.data : undefined;
|
||||
|
||||
const contentItem: ContentItem = {
|
||||
type: "tiptap",
|
||||
noteId,
|
||||
id,
|
||||
if (unencryptedData)
|
||||
unencryptedData = await this.extractAttachments({
|
||||
type: content.type,
|
||||
data: unencryptedData,
|
||||
noteId: content.noteId
|
||||
});
|
||||
|
||||
dateEdited: content.dateEdited || oldContent?.dateEdited || Date.now(),
|
||||
dateCreated: content.dateCreated || oldContent?.dateCreated || Date.now(),
|
||||
dateModified: Date.now(),
|
||||
localOnly: content.localOnly || !!oldContent?.localOnly,
|
||||
if (content.id && (await this.exists(content.id))) {
|
||||
const contentData = encryptedData
|
||||
? { locked: true as const, data: encryptedData }
|
||||
: unencryptedData
|
||||
? { locked: false as const, data: unencryptedData }
|
||||
: undefined;
|
||||
|
||||
conflicted: content.conflicted || oldContent?.conflicted,
|
||||
dateResolved: content.dateResolved || oldContent?.dateResolved,
|
||||
await this.collection.update([content.id], {
|
||||
dateEdited: content.dateEdited,
|
||||
localOnly: content.localOnly,
|
||||
conflicted: content.conflicted,
|
||||
dateResolved: content.dateResolved,
|
||||
...contentData
|
||||
});
|
||||
|
||||
...(encryptedData
|
||||
? { locked: true, data: encryptedData }
|
||||
: { locked: false, data: unencryptedData })
|
||||
};
|
||||
if (content.sessionId && contentData)
|
||||
await this.db.noteHistory.add(content.sessionId, {
|
||||
noteId: content.noteId,
|
||||
type: content.type,
|
||||
...contentData
|
||||
});
|
||||
} else {
|
||||
const contentItem: ContentItem = {
|
||||
type: "tiptap",
|
||||
noteId: content.noteId,
|
||||
id,
|
||||
|
||||
await this.collection.upsert(
|
||||
contentItem.locked
|
||||
? contentItem
|
||||
: await this.extractAttachments(contentItem)
|
||||
);
|
||||
dateEdited: content.dateEdited || Date.now(),
|
||||
dateCreated: content.dateCreated || Date.now(),
|
||||
dateModified: Date.now(),
|
||||
localOnly: !!content.localOnly,
|
||||
|
||||
if (content.sessionId)
|
||||
await this.db.noteHistory.add(content.sessionId, contentItem);
|
||||
conflicted: content.conflicted,
|
||||
dateResolved: content.dateResolved,
|
||||
|
||||
...(encryptedData
|
||||
? { locked: true, data: encryptedData }
|
||||
: { locked: false, data: unencryptedData || "<p></p>" })
|
||||
};
|
||||
|
||||
await this.collection.upsert(contentItem);
|
||||
|
||||
if (content.sessionId)
|
||||
await this.db.noteHistory.add(content.sessionId, contentItem);
|
||||
}
|
||||
|
||||
return id;
|
||||
}
|
||||
@@ -254,18 +273,21 @@ export class Content implements ICollection {
|
||||
await this.add(contentItem);
|
||||
}
|
||||
|
||||
async extractAttachments(contentItem: UnencryptedContentItem) {
|
||||
if (contentItem.localOnly) return contentItem;
|
||||
async extractAttachments(
|
||||
contentItem: NoteContent<false> & { noteId: string }
|
||||
) {
|
||||
// if (contentItem.localOnly) return contentItem;
|
||||
|
||||
const content = getContentFromData(contentItem.type, contentItem.data);
|
||||
if (!content) return contentItem;
|
||||
if (!content) return contentItem.data;
|
||||
const { data, hashes } = await content.extractAttachments(
|
||||
this.db.attachments.save
|
||||
);
|
||||
|
||||
const noteAttachments = await this.db.relations
|
||||
.from({ type: "note", id: contentItem.noteId }, "attachment")
|
||||
.resolve();
|
||||
.selector.filter.select(["id", "hash"])
|
||||
.execute();
|
||||
|
||||
const toDelete = noteAttachments.filter((attachment) => {
|
||||
return hashes.every((hash) => hash !== attachment.hash);
|
||||
@@ -281,11 +303,12 @@ export class Content implements ICollection {
|
||||
id: contentItem.noteId,
|
||||
type: "note"
|
||||
},
|
||||
attachment
|
||||
{ id: attachment.id, type: "attachment" }
|
||||
);
|
||||
}
|
||||
|
||||
for (const hash of toAdd) {
|
||||
// TODO: only get id instead of the whole object
|
||||
const attachment = await this.db.attachments.attachment(hash);
|
||||
if (!attachment) continue;
|
||||
await this.db.relations.add(
|
||||
@@ -297,11 +320,11 @@ export class Content implements ICollection {
|
||||
);
|
||||
}
|
||||
|
||||
if (toAdd.length > 0) {
|
||||
contentItem.dateModified = Date.now();
|
||||
}
|
||||
contentItem.data = data;
|
||||
return contentItem;
|
||||
// if (toAdd.length > 0) {
|
||||
// contentItem.dateModified = Date.now();
|
||||
// }
|
||||
// contentItem.data = data;
|
||||
return data;
|
||||
}
|
||||
|
||||
// async cleanup() {
|
||||
|
||||
@@ -20,10 +20,10 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
import Database from "../api";
|
||||
import { isCipher } from "../database/crypto";
|
||||
import { SQLCollection } from "../database/sql-collection";
|
||||
import { ContentItem, HistorySession, isDeleted } from "../types";
|
||||
import { HistorySession, isDeleted } from "../types";
|
||||
import { makeSessionContentId } from "../utils/id";
|
||||
import { ICollection } from "./collection";
|
||||
import { SessionContent } from "./session-content";
|
||||
import { NoteContent, SessionContent } from "./session-content";
|
||||
|
||||
export class NoteHistory implements ICollection {
|
||||
name = "notehistory";
|
||||
@@ -60,39 +60,56 @@ export class NoteHistory implements ICollection {
|
||||
return history as HistorySession[];
|
||||
}
|
||||
|
||||
async add(sessionId: string, content: ContentItem) {
|
||||
async add(
|
||||
sessionId: string,
|
||||
content: NoteContent<boolean> & { noteId: string; locked: boolean }
|
||||
) {
|
||||
const { noteId, locked } = content;
|
||||
sessionId = `${noteId}_${sessionId}`;
|
||||
const oldSession = await this.collection.get(sessionId);
|
||||
|
||||
if (oldSession && isDeleted(oldSession)) return;
|
||||
|
||||
const session: HistorySession = {
|
||||
type: "session",
|
||||
id: sessionId,
|
||||
sessionContentId: makeSessionContentId(sessionId),
|
||||
noteId,
|
||||
dateCreated: oldSession ? oldSession.dateCreated : Date.now(),
|
||||
dateModified: Date.now(),
|
||||
localOnly: true,
|
||||
locked
|
||||
};
|
||||
|
||||
await this.collection.upsert(session);
|
||||
if (await this.collection.exists(sessionId)) {
|
||||
await this.collection.update([sessionId], { locked });
|
||||
} else {
|
||||
await this.collection.upsert({
|
||||
type: "session",
|
||||
id: sessionId,
|
||||
sessionContentId: makeSessionContentId(sessionId),
|
||||
noteId,
|
||||
dateCreated: Date.now(),
|
||||
dateModified: Date.now(),
|
||||
localOnly: true,
|
||||
locked
|
||||
});
|
||||
}
|
||||
await this.sessionContent.add(sessionId, content, locked);
|
||||
await this.cleanup(noteId);
|
||||
|
||||
return session;
|
||||
return sessionId;
|
||||
}
|
||||
|
||||
private async cleanup(noteId: string, limit = this.versionsLimit) {
|
||||
const history = await this.get(noteId, "asc");
|
||||
if (history.length === 0 || history.length < limit) return;
|
||||
const deleteCount = history.length - limit;
|
||||
for (let i = 0; i < deleteCount; i++) {
|
||||
const session = history[i];
|
||||
const history = await this.db
|
||||
.sql()
|
||||
.selectFrom("notehistory")
|
||||
.where("noteId", "==", noteId)
|
||||
.orderBy(`dateModified asc`)
|
||||
.select(["id", "sessionContentId"])
|
||||
.offset(limit)
|
||||
.limit(10)
|
||||
.$narrowType<{ id: string; sessionContentId: string }>()
|
||||
.execute();
|
||||
|
||||
for (const session of history) {
|
||||
await this._remove(session);
|
||||
}
|
||||
|
||||
// const history = await this.get(noteId, "asc");
|
||||
// if (history.length === 0 || history.length < limit) return;
|
||||
// const deleteCount = history.length - limit;
|
||||
// for (let i = 0; i < deleteCount; i++) {
|
||||
// const session = history[i];
|
||||
// await this._remove(session);
|
||||
// }
|
||||
}
|
||||
|
||||
async content(sessionId: string) {
|
||||
@@ -131,7 +148,7 @@ export class NoteHistory implements ICollection {
|
||||
});
|
||||
}
|
||||
|
||||
private async _remove(session: HistorySession) {
|
||||
private async _remove(session: { id: string; sessionContentId: string }) {
|
||||
await this.collection.delete([session.id]);
|
||||
await this.sessionContent.remove(session.sessionContentId);
|
||||
}
|
||||
|
||||
@@ -79,17 +79,20 @@ export class Notebooks implements ICollection {
|
||||
// }
|
||||
|
||||
get all() {
|
||||
return this.collection.createFilter<Notebook>((qb) =>
|
||||
qb.where(isFalse("dateDeleted")).where(isFalse("deleted"))
|
||||
return this.collection.createFilter<Notebook>(
|
||||
(qb) => qb.where(isFalse("dateDeleted")).where(isFalse("deleted")),
|
||||
this.db.options?.batchSize
|
||||
);
|
||||
}
|
||||
|
||||
get pinned() {
|
||||
return this.collection.createFilter<Notebook>((qb) =>
|
||||
qb
|
||||
.where(isFalse("dateDeleted"))
|
||||
.where(isFalse("deleted"))
|
||||
.where("pinned", "==", true)
|
||||
return this.collection.createFilter<Notebook>(
|
||||
(qb) =>
|
||||
qb
|
||||
.where(isFalse("dateDeleted"))
|
||||
.where(isFalse("deleted"))
|
||||
.where("pinned", "==", true),
|
||||
this.db.options?.batchSize
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -62,6 +62,7 @@ export class Notes implements ICollection {
|
||||
throw new Error("Please use db.notes.merge to merge remote notes.");
|
||||
|
||||
const id = item.id || getId();
|
||||
|
||||
const oldNote = await this.note(id);
|
||||
|
||||
const note = {
|
||||
@@ -74,59 +75,61 @@ export class Notes implements ICollection {
|
||||
if (!oldNote && !item.content && !item.contentId && !item.title)
|
||||
throw new Error("Note must have a title or content.");
|
||||
|
||||
if (item.content && item.content.data && item.content.type) {
|
||||
const { type, data } = item.content;
|
||||
await this.db.transaction(async () => {
|
||||
if (item.content && item.content.data && item.content.type) {
|
||||
const { type, data } = item.content;
|
||||
|
||||
const content = getContentFromData(type, data);
|
||||
if (!content) throw new Error("Invalid content type.");
|
||||
const content = getContentFromData(type, data);
|
||||
if (!content) throw new Error("Invalid content type.");
|
||||
|
||||
note.contentId = await this.db.content.add({
|
||||
noteId: id,
|
||||
sessionId: note.sessionId,
|
||||
id: note.contentId,
|
||||
type,
|
||||
data,
|
||||
localOnly: !!note.localOnly
|
||||
note.contentId = await this.db.content.add({
|
||||
noteId: id,
|
||||
sessionId: note.sessionId,
|
||||
id: note.contentId,
|
||||
type,
|
||||
data,
|
||||
localOnly: !!note.localOnly
|
||||
});
|
||||
|
||||
note.headline = note.locked ? "" : getNoteHeadline(content);
|
||||
if (oldNote) note.dateEdited = Date.now();
|
||||
}
|
||||
|
||||
if (item.localOnly !== undefined) {
|
||||
await this.db.content.add({
|
||||
id: note.contentId,
|
||||
localOnly: !!item.localOnly
|
||||
});
|
||||
}
|
||||
|
||||
const noteTitle = await this.getNoteTitle(note, oldNote, note.headline);
|
||||
if (oldNote && oldNote.title !== noteTitle) note.dateEdited = Date.now();
|
||||
|
||||
await this.collection.upsert({
|
||||
id,
|
||||
contentId: note.contentId,
|
||||
type: "note",
|
||||
|
||||
title: noteTitle,
|
||||
headline: note.headline,
|
||||
|
||||
notebooks: note.notebooks || undefined,
|
||||
|
||||
pinned: !!note.pinned,
|
||||
locked: !!note.locked,
|
||||
favorite: !!note.favorite,
|
||||
localOnly: !!note.localOnly,
|
||||
conflicted: !!note.conflicted,
|
||||
readonly: !!note.readonly,
|
||||
|
||||
dateCreated: note.dateCreated || Date.now(),
|
||||
dateEdited:
|
||||
item.dateEdited || note.dateEdited || note.dateCreated || Date.now(),
|
||||
dateModified: note.dateModified || Date.now()
|
||||
});
|
||||
|
||||
note.headline = note.locked ? "" : getNoteHeadline(content);
|
||||
if (oldNote) note.dateEdited = Date.now();
|
||||
}
|
||||
|
||||
if (item.localOnly !== undefined) {
|
||||
await this.db.content.add({
|
||||
id: note.contentId,
|
||||
localOnly: !!item.localOnly
|
||||
});
|
||||
}
|
||||
|
||||
const noteTitle = await this.getNoteTitle(note, oldNote, note.headline);
|
||||
if (oldNote && oldNote.title !== noteTitle) note.dateEdited = Date.now();
|
||||
|
||||
await this.collection.upsert({
|
||||
id,
|
||||
contentId: note.contentId,
|
||||
type: "note",
|
||||
|
||||
title: noteTitle,
|
||||
headline: note.headline,
|
||||
|
||||
notebooks: note.notebooks || undefined,
|
||||
|
||||
pinned: !!note.pinned,
|
||||
locked: !!note.locked,
|
||||
favorite: !!note.favorite,
|
||||
localOnly: !!note.localOnly,
|
||||
conflicted: !!note.conflicted,
|
||||
readonly: !!note.readonly,
|
||||
|
||||
dateCreated: note.dateCreated || Date.now(),
|
||||
dateEdited:
|
||||
item.dateEdited || note.dateEdited || note.dateCreated || Date.now(),
|
||||
dateModified: note.dateModified || Date.now()
|
||||
if (!oldNote) this.totalNotes++;
|
||||
});
|
||||
|
||||
if (!oldNote) this.totalNotes++;
|
||||
return id;
|
||||
}
|
||||
|
||||
@@ -149,8 +152,9 @@ export class Notes implements ICollection {
|
||||
// }
|
||||
|
||||
get all() {
|
||||
return this.collection.createFilter<Note>((qb) =>
|
||||
qb.where(isFalse("dateDeleted")).where(isFalse("deleted"))
|
||||
return this.collection.createFilter<Note>(
|
||||
(qb) => qb.where(isFalse("dateDeleted")).where(isFalse("deleted")),
|
||||
this.db.options?.batchSize
|
||||
);
|
||||
}
|
||||
|
||||
@@ -165,38 +169,46 @@ export class Notes implements ICollection {
|
||||
// }
|
||||
|
||||
get pinned() {
|
||||
return this.collection.createFilter<Note>((qb) =>
|
||||
qb
|
||||
.where(isFalse("dateDeleted"))
|
||||
.where(isFalse("deleted"))
|
||||
.where("pinned", "==", true)
|
||||
return this.collection.createFilter<Note>(
|
||||
(qb) =>
|
||||
qb
|
||||
.where(isFalse("dateDeleted"))
|
||||
.where(isFalse("deleted"))
|
||||
.where("pinned", "==", true),
|
||||
this.db.options?.batchSize
|
||||
);
|
||||
}
|
||||
|
||||
get conflicted() {
|
||||
return this.collection.createFilter<Note>((qb) =>
|
||||
qb
|
||||
.where(isFalse("dateDeleted"))
|
||||
.where(isFalse("deleted"))
|
||||
.where("conflicted", "==", true)
|
||||
return this.collection.createFilter<Note>(
|
||||
(qb) =>
|
||||
qb
|
||||
.where(isFalse("dateDeleted"))
|
||||
.where(isFalse("deleted"))
|
||||
.where("conflicted", "==", true),
|
||||
this.db.options?.batchSize
|
||||
);
|
||||
}
|
||||
|
||||
get favorites() {
|
||||
return this.collection.createFilter<Note>((qb) =>
|
||||
qb
|
||||
.where(isFalse("dateDeleted"))
|
||||
.where(isFalse("deleted"))
|
||||
.where("favorite", "==", true)
|
||||
return this.collection.createFilter<Note>(
|
||||
(qb) =>
|
||||
qb
|
||||
.where(isFalse("dateDeleted"))
|
||||
.where(isFalse("deleted"))
|
||||
.where("favorite", "==", true),
|
||||
this.db.options?.batchSize
|
||||
);
|
||||
}
|
||||
|
||||
get locked() {
|
||||
return this.collection.createFilter<Note>((qb) =>
|
||||
qb
|
||||
.where(isFalse("dateDeleted"))
|
||||
.where(isFalse("deleted"))
|
||||
.where("locked", "==", true)
|
||||
return this.collection.createFilter<Note>(
|
||||
(qb) =>
|
||||
qb
|
||||
.where(isFalse("dateDeleted"))
|
||||
.where(isFalse("deleted"))
|
||||
.where("locked", "==", true),
|
||||
this.db.options?.batchSize
|
||||
);
|
||||
}
|
||||
|
||||
@@ -299,21 +311,19 @@ export class Notes implements ICollection {
|
||||
dateCreated: undefined,
|
||||
dateModified: undefined
|
||||
});
|
||||
if (!duplicateId) return;
|
||||
if (!duplicateId) continue;
|
||||
|
||||
for (const notebook of await this.db.relations
|
||||
for (const relation of await this.db.relations
|
||||
.to(note, "notebook")
|
||||
.get()) {
|
||||
await this.db.relations.add(
|
||||
{ type: "notebook", id: notebook },
|
||||
{ type: "notebook", id: relation.fromId },
|
||||
{
|
||||
id: duplicateId,
|
||||
type: "note"
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
return duplicateId;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -19,18 +19,13 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import { makeId } from "../utils/id";
|
||||
import { ICollection } from "./collection";
|
||||
import {
|
||||
Relation,
|
||||
ItemMap,
|
||||
ItemReference,
|
||||
ValueOf,
|
||||
MaybeDeletedItem
|
||||
} from "../types";
|
||||
import { Relation, ItemMap, ItemReference, ValueOf, ItemType } from "../types";
|
||||
import Database from "../api";
|
||||
import { SQLCollection } from "../database/sql-collection";
|
||||
import { DatabaseAccessor, DatabaseSchema, isFalse } from "../database";
|
||||
import { FilteredSelector, SQLCollection } from "../database/sql-collection";
|
||||
import { DatabaseSchema, isFalse } from "../database";
|
||||
import { SelectQueryBuilder } from "kysely";
|
||||
|
||||
type ItemReferences = { type: ItemType; ids: string[] };
|
||||
export class Relations implements ICollection {
|
||||
name = "relations";
|
||||
readonly collection: SQLCollection<"relations", Relation>;
|
||||
@@ -39,6 +34,7 @@ export class Relations implements ICollection {
|
||||
}
|
||||
|
||||
async init() {
|
||||
await this.buildCache();
|
||||
// return this.collection.init();
|
||||
}
|
||||
|
||||
@@ -55,18 +51,71 @@ export class Relations implements ICollection {
|
||||
});
|
||||
}
|
||||
|
||||
from(
|
||||
reference: ItemReference | ItemReferences,
|
||||
types: (keyof RelatableTable)[]
|
||||
): RelationsArray<keyof RelatableTable>;
|
||||
from<TType extends keyof RelatableTable>(
|
||||
reference: ItemReference,
|
||||
reference: ItemReference | ItemReferences,
|
||||
type: TType
|
||||
): RelationsArray<TType>;
|
||||
from<TType extends keyof RelatableTable = keyof RelatableTable>(
|
||||
reference: ItemReference | ItemReferences,
|
||||
type: TType | keyof RelatableTable[]
|
||||
) {
|
||||
return new RelationsArray(this.db, reference, type, "from");
|
||||
return new RelationsArray(
|
||||
this.db,
|
||||
reference,
|
||||
Array.isArray(type) ? type : [type],
|
||||
"from"
|
||||
);
|
||||
}
|
||||
|
||||
to(
|
||||
reference: ItemReference | ItemReferences,
|
||||
types: (keyof RelatableTable)[]
|
||||
): RelationsArray<keyof RelatableTable>;
|
||||
to<TType extends keyof RelatableTable>(
|
||||
reference: ItemReference,
|
||||
reference: ItemReference | ItemReferences,
|
||||
type: TType
|
||||
): RelationsArray<TType>;
|
||||
to<TType extends keyof RelatableTable = keyof RelatableTable>(
|
||||
reference: ItemReference | ItemReferences,
|
||||
type: TType | keyof RelatableTable[]
|
||||
) {
|
||||
return new RelationsArray(this.db, reference, type, "to");
|
||||
return new RelationsArray(
|
||||
this.db,
|
||||
reference,
|
||||
Array.isArray(type) ? type : [type],
|
||||
"to"
|
||||
);
|
||||
}
|
||||
|
||||
fromCache: Map<string, string[]> = new Map();
|
||||
toCache: Map<string, string[]> = new Map();
|
||||
async buildCache() {
|
||||
console.time("cache build");
|
||||
this.fromCache.clear();
|
||||
this.toCache.clear();
|
||||
|
||||
console.time("query");
|
||||
const relations = await this.db
|
||||
.sql()
|
||||
.selectFrom("relations")
|
||||
.select(["toId", "fromId"])
|
||||
.$narrowType<{ toId: string; fromId: string }>()
|
||||
.execute();
|
||||
console.timeEnd("query");
|
||||
for (const { fromId, toId } of relations) {
|
||||
const fromIds = this.fromCache.get(fromId) || [];
|
||||
fromIds.push(toId);
|
||||
this.fromCache.set(fromId, fromIds);
|
||||
|
||||
const toIds = this.toCache.get(toId) || [];
|
||||
toIds.push(fromId);
|
||||
this.toCache.set(toId, toIds);
|
||||
}
|
||||
console.timeEnd("cache build");
|
||||
}
|
||||
|
||||
// get raw() {
|
||||
@@ -146,32 +195,40 @@ const TABLE_MAP = {
|
||||
type RelatableTable = typeof TABLE_MAP;
|
||||
|
||||
class RelationsArray<TType extends keyof RelatableTable> {
|
||||
private table: ValueOf<RelatableTable> = TABLE_MAP[this.type];
|
||||
private table: ValueOf<RelatableTable> = TABLE_MAP[this.types[0]];
|
||||
|
||||
constructor(
|
||||
private readonly db: Database,
|
||||
private readonly reference: ItemReference,
|
||||
private readonly type: TType,
|
||||
private readonly reference: ItemReference | ItemReferences,
|
||||
private readonly types: TType[],
|
||||
private readonly direction: "from" | "to"
|
||||
) {}
|
||||
|
||||
async resolve(limit?: number): Promise<ItemMap[TType][]> {
|
||||
const items = await this.db
|
||||
.sql()
|
||||
.selectFrom(this.table)
|
||||
.where("id", "in", (b) =>
|
||||
b
|
||||
.selectFrom("relations")
|
||||
.$call((eb) =>
|
||||
this.buildRelationsQuery()(
|
||||
eb as SelectQueryBuilder<DatabaseSchema, "relations", unknown>
|
||||
get selector() {
|
||||
return new FilteredSelector<ItemMap[TType]>(
|
||||
this.table,
|
||||
this.db
|
||||
.sql()
|
||||
.selectFrom<keyof DatabaseSchema>(this.table)
|
||||
.where("id", "in", (b) =>
|
||||
b
|
||||
.selectFrom("relations")
|
||||
.$call((eb) =>
|
||||
this.buildRelationsQuery()(
|
||||
eb as SelectQueryBuilder<DatabaseSchema, "relations", unknown>
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
// TODO: check if we need to index deleted field.
|
||||
.where(isFalse("deleted")),
|
||||
this.db.options?.batchSize
|
||||
);
|
||||
}
|
||||
|
||||
async resolve(limit?: number) {
|
||||
const items = await this.selector.filter
|
||||
.$if(limit !== undefined && limit > 0, (b) => b.limit(limit!))
|
||||
.selectAll()
|
||||
// TODO: check if we need to index deleted field.
|
||||
.where(isFalse("deleted"))
|
||||
.execute();
|
||||
return items as unknown as ItemMap[TType][];
|
||||
}
|
||||
@@ -196,12 +253,20 @@ class RelationsArray<TType extends keyof RelatableTable> {
|
||||
}
|
||||
|
||||
async get() {
|
||||
const ids = await this.db
|
||||
const relations = await this.db
|
||||
.sql()
|
||||
.selectFrom("relations")
|
||||
.$call(this.buildRelationsQuery())
|
||||
.clearSelect()
|
||||
.select(["fromId", "toId", "fromType", "toType"])
|
||||
.$narrowType<{
|
||||
fromId: string;
|
||||
toId: string;
|
||||
fromType: keyof ItemMap;
|
||||
toType: keyof ItemMap;
|
||||
}>()
|
||||
.execute();
|
||||
return ids.map((i) => i.id);
|
||||
return relations;
|
||||
}
|
||||
|
||||
async count() {
|
||||
@@ -216,13 +281,13 @@ class RelationsArray<TType extends keyof RelatableTable> {
|
||||
return result.count;
|
||||
}
|
||||
|
||||
async has(id: string) {
|
||||
async has(...ids: string[]) {
|
||||
const result = await this.db
|
||||
.sql()
|
||||
.selectFrom("relations")
|
||||
.$call(this.buildRelationsQuery())
|
||||
.clearSelect()
|
||||
.where(this.direction === "from" ? "toId" : "fromId", "==", id)
|
||||
.where(this.direction === "from" ? "toId" : "fromId", "in", ids)
|
||||
.select((b) => b.fn.count<number>("id").as("count"))
|
||||
.executeTakeFirst();
|
||||
if (!result) return false;
|
||||
@@ -240,15 +305,26 @@ class RelationsArray<TType extends keyof RelatableTable> {
|
||||
) => {
|
||||
if (this.direction === "to") {
|
||||
return builder
|
||||
.where("fromType", "==", this.type)
|
||||
.where(
|
||||
"fromType",
|
||||
this.types.length > 1 ? "in" : "==",
|
||||
this.types.length > 1 ? this.types : this.types[0]
|
||||
)
|
||||
.where("toType", "==", this.reference.type)
|
||||
.where("toId", "==", this.reference.id)
|
||||
.where(
|
||||
"toId",
|
||||
isItemReferences(this.reference) ? "in" : "==",
|
||||
isItemReferences(this.reference)
|
||||
? this.reference.ids
|
||||
: this.reference.id
|
||||
)
|
||||
.$if(
|
||||
this.type === "note" && this.db.trash.cache.notes.length > 0,
|
||||
this.types.includes("note" as TType) &&
|
||||
this.db.trash.cache.notes.length > 0,
|
||||
(b) => b.where("fromId", "not in", this.db.trash.cache.notes)
|
||||
)
|
||||
.$if(
|
||||
this.type === "notebook" &&
|
||||
this.types.includes("notebook" as TType) &&
|
||||
this.db.trash.cache.notebooks.length > 0,
|
||||
(b) => b.where("fromId", "not in", this.db.trash.cache.notebooks)
|
||||
)
|
||||
@@ -256,15 +332,26 @@ class RelationsArray<TType extends keyof RelatableTable> {
|
||||
.$narrowType<{ id: string }>();
|
||||
} else {
|
||||
return builder
|
||||
.where("toType", "==", this.type)
|
||||
.where(
|
||||
"toType",
|
||||
this.types.length > 1 ? "in" : "==",
|
||||
this.types.length > 1 ? this.types : this.types[0]
|
||||
)
|
||||
.where("fromType", "==", this.reference.type)
|
||||
.where("fromId", "==", this.reference.id)
|
||||
.where(
|
||||
"fromId",
|
||||
isItemReferences(this.reference) ? "in" : "==",
|
||||
isItemReferences(this.reference)
|
||||
? this.reference.ids
|
||||
: this.reference.id
|
||||
)
|
||||
.$if(
|
||||
this.type === "note" && this.db.trash.cache.notes.length > 0,
|
||||
this.types.includes("note" as TType) &&
|
||||
this.db.trash.cache.notes.length > 0,
|
||||
(b) => b.where("toId", "not in", this.db.trash.cache.notes)
|
||||
)
|
||||
.$if(
|
||||
this.type === "notebook" &&
|
||||
this.types.includes("notebook" as TType) &&
|
||||
this.db.trash.cache.notebooks.length > 0,
|
||||
(b) => b.where("toId", "not in", this.db.trash.cache.notebooks)
|
||||
)
|
||||
@@ -274,3 +361,9 @@ class RelationsArray<TType extends keyof RelatableTable> {
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function isItemReferences(
|
||||
ref: ItemReference | ItemReferences
|
||||
): ref is ItemReferences {
|
||||
return "ids" in ref;
|
||||
}
|
||||
|
||||
@@ -28,6 +28,7 @@ import { ICollection } from "./collection";
|
||||
import { Reminder } from "../types";
|
||||
import Database from "../api";
|
||||
import { SQLCollection } from "../database/sql-collection";
|
||||
import { isFalse } from "../database";
|
||||
|
||||
dayjs.extend(isTomorrow);
|
||||
dayjs.extend(isSameOrBefore);
|
||||
@@ -84,9 +85,12 @@ export class Reminders implements ICollection {
|
||||
// return this.collection.raw();
|
||||
// }
|
||||
|
||||
// get all() {
|
||||
// return this.collection.items();
|
||||
// }
|
||||
get all() {
|
||||
return this.collection.createFilter<Reminder>(
|
||||
(qb) => qb.where(isFalse("deleted")),
|
||||
this.db.options?.batchSize
|
||||
);
|
||||
}
|
||||
|
||||
exists(itemId: string) {
|
||||
return this.collection.exists(itemId);
|
||||
|
||||
@@ -52,17 +52,17 @@ export class SessionContent implements ICollection {
|
||||
locked: TLocked
|
||||
) {
|
||||
if (!sessionId || !content) return;
|
||||
const data =
|
||||
locked || isCipher(content.data)
|
||||
? content.data
|
||||
: await this.db.compressor().compress(content.data);
|
||||
// const data =
|
||||
// locked || isCipher(content.data)
|
||||
// ? content.data
|
||||
// : await this.db.compressor().compress(content.data);
|
||||
|
||||
await this.collection.upsert({
|
||||
type: "sessioncontent",
|
||||
id: makeSessionContentId(sessionId),
|
||||
data,
|
||||
data: content.data,
|
||||
contentType: content.type,
|
||||
compressed: !locked,
|
||||
compressed: false,
|
||||
localOnly: true,
|
||||
locked,
|
||||
dateCreated: Date.now(),
|
||||
|
||||
@@ -18,17 +18,20 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import Database from "../api";
|
||||
import { isFalse } from "../database";
|
||||
import { SQLCollection } from "../database/sql-collection";
|
||||
import { SQLCachedCollection } from "../database/sql-cached-collection";
|
||||
import { Shortcut } from "../types";
|
||||
import { ICollection } from "./collection";
|
||||
|
||||
const ALLOWED_SHORTCUT_TYPES = ["notebook", "topic", "tag"];
|
||||
export class Shortcuts implements ICollection {
|
||||
name = "shortcuts";
|
||||
readonly collection: SQLCollection<"shortcuts", Shortcut>;
|
||||
readonly collection: SQLCachedCollection<"shortcuts", Shortcut>;
|
||||
constructor(private readonly db: Database) {
|
||||
this.collection = new SQLCollection(db.sql, "shortcuts", db.eventManager);
|
||||
this.collection = new SQLCachedCollection(
|
||||
db.sql,
|
||||
"shortcuts",
|
||||
db.eventManager
|
||||
);
|
||||
}
|
||||
|
||||
init() {
|
||||
@@ -82,33 +85,25 @@ export class Shortcuts implements ICollection {
|
||||
// }
|
||||
|
||||
get all() {
|
||||
return this.collection.createFilter<Shortcut>((qb) =>
|
||||
qb.where(isFalse("deleted"))
|
||||
);
|
||||
return this.collection.items();
|
||||
}
|
||||
|
||||
async get() {
|
||||
// return this.all.reduce((prev, shortcut) => {
|
||||
// const {
|
||||
// item: { id }
|
||||
// } = shortcut;
|
||||
// let item: Notebook | Topic | Tag | null | undefined = null;
|
||||
// switch (shortcut.item.type) {
|
||||
// case "notebook": {
|
||||
// const notebook = this.db.notebooks.notebook(id);
|
||||
// item = notebook ? notebook.data : null;
|
||||
// break;
|
||||
// }
|
||||
// case "tag":
|
||||
// item = this.db.tags.tag(id);
|
||||
// break;
|
||||
// }
|
||||
// if (item) prev.push(item);
|
||||
// return prev;
|
||||
// }, [] as (Notebook | Topic | Tag)[]);
|
||||
async resolved() {
|
||||
const tagIds: string[] = [];
|
||||
const notebookIds: string[] = [];
|
||||
for (const shortcut of this.all) {
|
||||
if (shortcut.itemType === "notebook") notebookIds.push(shortcut.itemId);
|
||||
else if (shortcut.itemType === "tag") tagIds.push(shortcut.itemId);
|
||||
}
|
||||
return [
|
||||
...(notebookIds.length > 0
|
||||
? await this.db.notebooks.all.items(notebookIds)
|
||||
: []),
|
||||
...(tagIds.length > 0 ? await this.db.tags.all.items(tagIds) : [])
|
||||
];
|
||||
}
|
||||
|
||||
async exists(id: string) {
|
||||
exists(id: string) {
|
||||
return this.collection.exists(id);
|
||||
}
|
||||
|
||||
|
||||
@@ -71,8 +71,9 @@ export class Tags implements ICollection {
|
||||
// }
|
||||
|
||||
get all() {
|
||||
return this.collection.createFilter<Tag>((qb) =>
|
||||
qb.where(isFalse("deleted"))
|
||||
return this.collection.createFilter<Tag>(
|
||||
(qb) => qb.where(isFalse("deleted")),
|
||||
this.db.options?.batchSize
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -206,7 +206,7 @@ export default class Backup {
|
||||
collection: DatabaseCollection<T, B>,
|
||||
state: BackupState
|
||||
) {
|
||||
for await (const item of collection.stream()) {
|
||||
for await (const item of collection.stream() as any) {
|
||||
const data = JSON.stringify(item);
|
||||
state.buffer.push(data);
|
||||
state.bufferLength += data.length;
|
||||
|
||||
@@ -29,6 +29,9 @@ import { StorageAccessor } from "../interfaces";
|
||||
import EventManager from "../utils/event-manager";
|
||||
import { chunkedIterate } from "../utils/array";
|
||||
|
||||
/**
|
||||
* @deprecated only kept here for migration purposes
|
||||
*/
|
||||
export class CachedCollection<
|
||||
TCollectionType extends CollectionType,
|
||||
T extends ItemMap[Collections[TCollectionType]]
|
||||
|
||||
@@ -20,11 +20,7 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
import {
|
||||
Migrator,
|
||||
Kysely,
|
||||
SqliteAdapter,
|
||||
SqliteIntrospector,
|
||||
SqliteQueryCompiler,
|
||||
sql,
|
||||
Driver,
|
||||
KyselyPlugin,
|
||||
PluginTransformQueryArgs,
|
||||
PluginTransformResultArgs,
|
||||
@@ -37,7 +33,8 @@ import {
|
||||
Transaction,
|
||||
ColumnType,
|
||||
ExpressionBuilder,
|
||||
ReferenceExpression
|
||||
ReferenceExpression,
|
||||
Dialect
|
||||
} from "kysely";
|
||||
import {
|
||||
Attachment,
|
||||
@@ -119,9 +116,8 @@ export interface DatabaseCollection<T, IsAsync extends boolean> {
|
||||
put(items: (T | undefined)[]): Promise<void>;
|
||||
update(ids: string[], partial: Partial<T>): Promise<void>;
|
||||
ids(options: GroupOptions): AsyncOrSyncResult<IsAsync, string[]>;
|
||||
items(
|
||||
ids: string[],
|
||||
sortOptions?: GroupOptions
|
||||
records(
|
||||
ids: string[]
|
||||
): AsyncOrSyncResult<
|
||||
IsAsync,
|
||||
Record<string, MaybeDeletedItem<T> | undefined>
|
||||
@@ -198,14 +194,17 @@ const DataMappers: Partial<Record<ItemType, (row: any) => void>> = {
|
||||
}
|
||||
};
|
||||
|
||||
export async function createDatabase(driver: Driver) {
|
||||
export type SQLiteOptions = {
|
||||
dialect: Dialect;
|
||||
journalMode?: "WAL" | "MEMORY" | "OFF" | "PERSIST" | "TRUNCATE" | "DELETE";
|
||||
synchronous?: "normal" | "extra" | "full" | "off";
|
||||
lockingMode?: "normal" | "exclusive";
|
||||
cacheSize?: number;
|
||||
pageSize?: number;
|
||||
};
|
||||
export async function createDatabase(options: SQLiteOptions) {
|
||||
const db = new Kysely<DatabaseSchema>({
|
||||
dialect: {
|
||||
createAdapter: () => new SqliteAdapter(),
|
||||
createDriver: () => driver,
|
||||
createIntrospector: (db) => new SqliteIntrospector(db),
|
||||
createQueryCompiler: () => new SqliteQueryCompiler()
|
||||
},
|
||||
dialect: options.dialect,
|
||||
plugins: [new SqliteBooleanPlugin()]
|
||||
});
|
||||
|
||||
@@ -214,8 +213,28 @@ export async function createDatabase(driver: Driver) {
|
||||
provider: new NNMigrationProvider()
|
||||
});
|
||||
|
||||
await sql`PRAGMA journal_mode = WAL`.execute(db);
|
||||
await sql`PRAGMA synchronous = normal`.execute(db);
|
||||
await sql`PRAGMA journal_mode = ${sql.raw(
|
||||
options.journalMode || "WAL"
|
||||
)}`.execute(db);
|
||||
|
||||
await sql`PRAGMA synchronous = ${sql.raw(
|
||||
options.synchronous || "normal"
|
||||
)}`.execute(db);
|
||||
|
||||
if (options.pageSize)
|
||||
await sql`PRAGMA page_size = ${sql.raw(
|
||||
options.pageSize.toString()
|
||||
)}`.execute(db);
|
||||
|
||||
if (options.cacheSize)
|
||||
await sql`PRAGMA cache_size = ${sql.raw(
|
||||
options.cacheSize.toString()
|
||||
)}`.execute(db);
|
||||
|
||||
if (options.lockingMode)
|
||||
await sql`PRAGMA locking_mode = ${sql.raw(options.lockingMode)}`.execute(
|
||||
db
|
||||
);
|
||||
|
||||
await migrator.migrateToLatest();
|
||||
|
||||
@@ -240,6 +259,8 @@ export class SqliteBooleanPlugin implements KyselyPlugin {
|
||||
args: PluginTransformResultArgs
|
||||
): Promise<QueryResult<UnknownRow>> {
|
||||
for (const row of args.result.rows) {
|
||||
if (typeof row !== "object") continue;
|
||||
|
||||
for (const key in row) {
|
||||
if (BooleanProperties.has(key as BooleanFields)) {
|
||||
row[key] = row[key] === 1 ? true : false;
|
||||
|
||||
@@ -281,7 +281,6 @@ async function createFTS5Table(
|
||||
const ref_ai = sql.raw(table + "_ai");
|
||||
const ref_ad = sql.raw(table + "_ad");
|
||||
const ref_au = sql.raw(table + "_au");
|
||||
|
||||
const indexed_cols = sql.raw(indexedColumns.join(", "));
|
||||
const unindexed_cols =
|
||||
unindexedColumns.length > 0
|
||||
@@ -289,11 +288,9 @@ async function createFTS5Table(
|
||||
: sql.raw("");
|
||||
const new_indexed_cols = sql.raw(indexedColumns.join(", new."));
|
||||
const old_indexed_cols = sql.raw(indexedColumns.join(", old."));
|
||||
|
||||
await sql`CREATE VIRTUAL TABLE ${ref_fts} USING fts5(
|
||||
id UNINDEXED, ${unindexed_cols} ${indexed_cols}, content='${sql.raw(table)}'
|
||||
)`.execute(db);
|
||||
|
||||
insertConditions = [
|
||||
"(new.deleted is null or new.deleted == 0)",
|
||||
...insertConditions
|
||||
@@ -304,13 +301,11 @@ async function createFTS5Table(
|
||||
BEGIN
|
||||
INSERT INTO ${ref_fts}(rowid, id, ${indexed_cols}) VALUES (new.rowid, new.id, new.${new_indexed_cols});
|
||||
END;`.execute(db);
|
||||
|
||||
await sql`CREATE TRIGGER ${ref_ad} AFTER DELETE ON ${ref}
|
||||
BEGIN
|
||||
INSERT INTO ${ref_fts} (${ref_fts}, rowid, id, ${indexed_cols})
|
||||
VALUES ('delete', old.rowid, old.id, old.${old_indexed_cols});
|
||||
END;`.execute(db);
|
||||
|
||||
await sql`CREATE TRIGGER ${ref_au} AFTER UPDATE ON ${ref}
|
||||
BEGIN
|
||||
INSERT INTO ${ref_fts} (${ref_fts}, rowid, id, ${indexed_cols})
|
||||
|
||||
@@ -28,7 +28,7 @@ export class SQLCachedCollection<
|
||||
> implements DatabaseCollection<T, false>
|
||||
{
|
||||
private collection: SQLCollection<TCollectionType, T>;
|
||||
private cache = new Map<string, MaybeDeletedItem<T>>();
|
||||
private cache = new Map<string, MaybeDeletedItem<T> | undefined>();
|
||||
// private cachedItems?: T[];
|
||||
|
||||
constructor(
|
||||
@@ -41,6 +41,8 @@ export class SQLCachedCollection<
|
||||
|
||||
async init() {
|
||||
await this.collection.init();
|
||||
const records = await this.collection.records([]);
|
||||
this.cache = new Map(Object.entries(records));
|
||||
// const data = await this.collection.indexer.readMulti(
|
||||
// this.collection.indexer.indices
|
||||
// );
|
||||
@@ -114,10 +116,7 @@ export class SQLCachedCollection<
|
||||
return Array.from(this.cache.keys());
|
||||
}
|
||||
|
||||
items(
|
||||
ids: string[],
|
||||
_sortOptions?: GroupOptions
|
||||
): Record<string, MaybeDeletedItem<T> | undefined> {
|
||||
records(ids: string[]): Record<string, MaybeDeletedItem<T> | undefined> {
|
||||
const items: Record<string, MaybeDeletedItem<T> | undefined> = {};
|
||||
for (const id of ids) {
|
||||
items[id] = this.cache.get(id);
|
||||
@@ -125,13 +124,30 @@ export class SQLCachedCollection<
|
||||
return items;
|
||||
}
|
||||
|
||||
items(ids?: string[]): T[] {
|
||||
const items: T[] = [];
|
||||
if (ids) {
|
||||
for (const id of ids) {
|
||||
const item = this.cache.get(id);
|
||||
if (!item || isDeleted(item)) continue;
|
||||
items.push(item);
|
||||
}
|
||||
} else {
|
||||
for (const [_key, value] of this.cache) {
|
||||
if (!value || isDeleted(value)) continue;
|
||||
items.push(value);
|
||||
}
|
||||
}
|
||||
return items;
|
||||
}
|
||||
|
||||
*unsynced(
|
||||
after: number,
|
||||
chunkSize: number
|
||||
): IterableIterator<MaybeDeletedItem<T>[]> {
|
||||
let chunk: MaybeDeletedItem<T>[] = [];
|
||||
for (const [_key, value] of this.cache) {
|
||||
if (value.dateModified && value.dateModified > after) {
|
||||
if (value && value.dateModified && value.dateModified > after) {
|
||||
chunk.push(value);
|
||||
if (chunk.length === chunkSize) {
|
||||
yield chunk;
|
||||
@@ -144,7 +160,7 @@ export class SQLCachedCollection<
|
||||
|
||||
*stream(): IterableIterator<T> {
|
||||
for (const [_key, value] of this.cache) {
|
||||
if (!value.deleted) yield value as T;
|
||||
if (value && !value.deleted) yield value as T;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -27,7 +27,7 @@ import {
|
||||
SQLiteItem,
|
||||
isFalse
|
||||
} from ".";
|
||||
import { ExpressionOrFactory, SelectQueryBuilder, SqlBool } from "kysely";
|
||||
import { ExpressionOrFactory, SelectQueryBuilder, SqlBool, sql } from "kysely";
|
||||
import { VirtualizedGrouping } from "../utils/virtualized-grouping";
|
||||
import { groupArray } from "../utils/grouping";
|
||||
|
||||
@@ -165,13 +165,13 @@ export class SQLCollection<
|
||||
return ids.map((id) => id.id);
|
||||
}
|
||||
|
||||
async items(
|
||||
async records(
|
||||
ids: string[]
|
||||
): Promise<Record<string, MaybeDeletedItem<T> | undefined>> {
|
||||
const results = await this.db()
|
||||
.selectFrom<keyof DatabaseSchema>(this.type)
|
||||
.selectAll()
|
||||
.where("id", "in", ids)
|
||||
.$if(ids.length > 0, (eb) => eb.where("id", "in", ids))
|
||||
.execute();
|
||||
const items: Record<string, MaybeDeletedItem<T>> = {};
|
||||
for (const item of results) {
|
||||
@@ -229,9 +229,10 @@ export class SQLCollection<
|
||||
selector: (
|
||||
qb: SelectQueryBuilder<DatabaseSchema, keyof DatabaseSchema, unknown>
|
||||
) => SelectQueryBuilder<DatabaseSchema, keyof DatabaseSchema, unknown>,
|
||||
batchSize = 50
|
||||
batchSize?: number
|
||||
) {
|
||||
return new FilteredSelector<T>(
|
||||
this.type,
|
||||
this.db().selectFrom<keyof DatabaseSchema>(this.type).$call(selector),
|
||||
batchSize
|
||||
);
|
||||
@@ -240,12 +241,13 @@ export class SQLCollection<
|
||||
|
||||
export class FilteredSelector<T extends Item> {
|
||||
constructor(
|
||||
readonly type: keyof DatabaseSchema,
|
||||
readonly filter: SelectQueryBuilder<
|
||||
DatabaseSchema,
|
||||
keyof DatabaseSchema,
|
||||
unknown
|
||||
>,
|
||||
readonly batchSize: number
|
||||
readonly batchSize: number = 500
|
||||
) {}
|
||||
|
||||
async ids(sortOptions?: GroupOptions) {
|
||||
@@ -269,6 +271,15 @@ export class FilteredSelector<T extends Item> {
|
||||
.execute()) as T[];
|
||||
}
|
||||
|
||||
async records(ids?: string[], sortOptions?: GroupOptions) {
|
||||
const results = await this.items(ids, sortOptions);
|
||||
const items: Record<string, T> = {};
|
||||
for (const item of results) {
|
||||
items[item.id] = item as T;
|
||||
}
|
||||
return items;
|
||||
}
|
||||
|
||||
async has(id: string) {
|
||||
const { count } =
|
||||
(await this.filter
|
||||
@@ -307,7 +318,14 @@ export class FilteredSelector<T extends Item> {
|
||||
}
|
||||
|
||||
async grouped(options: GroupOptions) {
|
||||
const ids = await this.ids(options);
|
||||
console.time("getting items");
|
||||
const items = await this.filter
|
||||
.$call(this.buildSortExpression(options))
|
||||
.select(["id", options.sortBy, "type"])
|
||||
.execute();
|
||||
console.timeEnd("getting items");
|
||||
console.log(items.length);
|
||||
const ids = groupArray(items, options);
|
||||
return new VirtualizedGrouping<T>(
|
||||
ids,
|
||||
this.batchSize,
|
||||
@@ -321,8 +339,8 @@ export class FilteredSelector<T extends Item> {
|
||||
items[item.id] = item as T;
|
||||
}
|
||||
return items;
|
||||
},
|
||||
(ids, items) => groupArray(ids, items, options)
|
||||
}
|
||||
//(ids, items) => groupArray(ids, items, options)
|
||||
);
|
||||
}
|
||||
|
||||
@@ -331,9 +349,20 @@ export class FilteredSelector<T extends Item> {
|
||||
qb: SelectQueryBuilder<DatabaseSchema, keyof DatabaseSchema, T>
|
||||
) => {
|
||||
return qb
|
||||
.orderBy("conflicted desc")
|
||||
.orderBy("pinned desc")
|
||||
.orderBy(options.sortBy, options.sortDirection);
|
||||
.$if(this.type === "notes", (eb) => eb.orderBy("conflicted desc"))
|
||||
.$if(this.type === "notes" || this.type === "notebooks", (eb) =>
|
||||
eb.orderBy("pinned desc")
|
||||
)
|
||||
.$if(options.sortBy === "title", (eb) =>
|
||||
eb.orderBy(
|
||||
sql`${sql.raw(options.sortBy)} COLLATE NOCASE ${sql.raw(
|
||||
options.sortDirection
|
||||
)}`
|
||||
)
|
||||
)
|
||||
.$if(options.sortBy !== "title", (eb) =>
|
||||
eb.orderBy(options.sortBy, options.sortDirection)
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
21
packages/core/src/index.ts
Normal file
21
packages/core/src/index.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
/*
|
||||
This file is part of the Notesnook project (https://notesnook.com/)
|
||||
|
||||
Copyright (C) 2023 Streetwriters (Private) Limited
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
export * from "./types";
|
||||
export { VirtualizedGrouping } from "./utils/virtualized-grouping";
|
||||
@@ -39,6 +39,8 @@ export type GroupingKey =
|
||||
| "reminders";
|
||||
|
||||
export type ValueOf<T> = T[keyof T];
|
||||
export type Optional<T, K extends keyof T> = Pick<Partial<T>, K> & Omit<T, K>;
|
||||
export type RequiredBy<T, K extends keyof T> = Partial<Omit<T, K>> & Pick<T, K>; // Pick<, K> & Omit<T, K>;
|
||||
|
||||
export type GroupHeader = {
|
||||
type: "header";
|
||||
@@ -443,6 +445,6 @@ export function isTrashItem(item: MaybeDeletedItem<Item>): item is TrashItem {
|
||||
return !isDeleted(item) && item.type === "trash";
|
||||
}
|
||||
|
||||
export function isGroupHeader(item: GroupHeader | Item): item is GroupHeader {
|
||||
export function isGroupHeader(item: any): item is GroupHeader {
|
||||
return item.type === "header";
|
||||
}
|
||||
|
||||
@@ -17,7 +17,7 @@ You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import { expect, test, vi } from "vitest";
|
||||
import { test, vi } from "vitest";
|
||||
import { VirtualizedGrouping } from "../virtualized-grouping";
|
||||
|
||||
function item<T>(value: T) {
|
||||
@@ -123,40 +123,3 @@ test("reloading ids should clear all cached batches", async (t) => {
|
||||
t.expect(await grouping.item("1")).toStrictEqual(item("1"));
|
||||
t.expect(mocked).toHaveBeenLastCalledWith(["1", "2", "3"]);
|
||||
});
|
||||
|
||||
test("merge groups if last & first groups are the same (sequential)", async (t) => {
|
||||
const mocked = createMock();
|
||||
const grouping = new VirtualizedGrouping<string>(
|
||||
["1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12"],
|
||||
3,
|
||||
mocked,
|
||||
(ids) => [{ title: "Hello", id: ids[0] }]
|
||||
);
|
||||
expect((await grouping.item("1"))?.group?.title).toBe("Hello");
|
||||
expect((await grouping.item("4"))?.group).toBeUndefined();
|
||||
});
|
||||
|
||||
test("merge groups if last & first groups are the same (random)", async (t) => {
|
||||
const mocked = createMock();
|
||||
const grouping = new VirtualizedGrouping<string>(
|
||||
["1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12"],
|
||||
3,
|
||||
mocked,
|
||||
(ids) => [{ title: "Hello", id: ids[0] }]
|
||||
);
|
||||
expect((await grouping.item("1"))?.group?.title).toBe("Hello");
|
||||
expect((await grouping.item("7"))?.group).toBeUndefined();
|
||||
});
|
||||
|
||||
test("merge groups if last & first groups are the same (reverse)", async (t) => {
|
||||
const mocked = createMock();
|
||||
const grouping = new VirtualizedGrouping<string>(
|
||||
["1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12"],
|
||||
3,
|
||||
mocked,
|
||||
(ids) => [{ title: "Hello", id: ids[0] }]
|
||||
);
|
||||
expect((await grouping.item("7"))?.group?.title).toBe("Hello");
|
||||
expect((await grouping.item("1"))?.group?.title).toBe("Hello");
|
||||
expect((await grouping.item("7"))?.group).toBeUndefined();
|
||||
});
|
||||
|
||||
@@ -18,15 +18,23 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import { isReminderActive } from "../collections/reminders";
|
||||
import { GroupOptions, Item } from "../types";
|
||||
import { GroupHeader, GroupOptions, ItemType } from "../types";
|
||||
import { getWeekGroupFromTimestamp, MONTHS_FULL } from "./date";
|
||||
import { VirtualizedGroupHeader } from "./virtualized-grouping";
|
||||
|
||||
type PartialGroupableItem = {
|
||||
id: string;
|
||||
type?: ItemType | null;
|
||||
dateDeleted?: number | null;
|
||||
title?: string | null;
|
||||
filename?: string | null;
|
||||
dateEdited?: number | null;
|
||||
dateCreated?: number | null;
|
||||
};
|
||||
type EvaluateKeyFunction<T> = (item: T) => string;
|
||||
|
||||
export const getSortValue = <T extends Item>(
|
||||
export const getSortValue = (
|
||||
options: GroupOptions,
|
||||
item: T
|
||||
item: PartialGroupableItem
|
||||
) => {
|
||||
if (
|
||||
options.sortBy === "dateDeleted" &&
|
||||
@@ -43,18 +51,20 @@ export const getSortValue = <T extends Item>(
|
||||
const MILLISECONDS_IN_DAY = 1000 * 60 * 60 * 24;
|
||||
const MILLISECONDS_IN_WEEK = MILLISECONDS_IN_DAY * 7;
|
||||
|
||||
function getKeySelector(options: GroupOptions): EvaluateKeyFunction<Item> {
|
||||
return (item: Item) => {
|
||||
function getKeySelector(
|
||||
options: GroupOptions
|
||||
): EvaluateKeyFunction<PartialGroupableItem> {
|
||||
return (item) => {
|
||||
if ("pinned" in item && item.pinned) return "Pinned";
|
||||
else if ("conflicted" in item && item.conflicted) return "Conflicted";
|
||||
|
||||
const date = new Date();
|
||||
if (item.type === "reminder")
|
||||
return isReminderActive(item) ? "Active" : "Inactive";
|
||||
return "Active"; // isReminderActive(item) ? "Active" : "Inactive";
|
||||
else if (options.sortBy === "title")
|
||||
return getFirstCharacter(getTitle(item));
|
||||
else {
|
||||
const value = getSortValue(options, item);
|
||||
const value = getSortValue(options, item) || 0;
|
||||
switch (options.groupBy) {
|
||||
case "none":
|
||||
return "All";
|
||||
@@ -80,36 +90,42 @@ function getKeySelector(options: GroupOptions): EvaluateKeyFunction<Item> {
|
||||
}
|
||||
|
||||
export function groupArray(
|
||||
ids: string[],
|
||||
items: Record<string, Item>,
|
||||
items: PartialGroupableItem[],
|
||||
options: GroupOptions = {
|
||||
groupBy: "default",
|
||||
sortBy: "dateEdited",
|
||||
sortDirection: "desc"
|
||||
}
|
||||
): VirtualizedGroupHeader[] {
|
||||
const groups = new Map<string, VirtualizedGroupHeader>([
|
||||
["Conflicted", { title: "Conflicted", id: "" }],
|
||||
["Pinned", { title: "Pinned", id: "" }],
|
||||
["Active", { title: "Active", id: "" }],
|
||||
["Inactive", { title: "Inactive", id: "" }]
|
||||
): (string | GroupHeader)[] {
|
||||
const groups = new Map<string, string[]>([
|
||||
["Conflicted", []],
|
||||
["Pinned", []]
|
||||
]);
|
||||
|
||||
const keySelector = getKeySelector(options);
|
||||
for (const id of ids) {
|
||||
const item = items[id];
|
||||
if (!item) continue;
|
||||
|
||||
for (const item of items) {
|
||||
const groupTitle = keySelector(item);
|
||||
const group = groups.get(groupTitle) || {
|
||||
title: groupTitle,
|
||||
id: ""
|
||||
};
|
||||
if (group.id === "") group.id = id;
|
||||
const group = groups.get(groupTitle) || [];
|
||||
group.push(item.id);
|
||||
groups.set(groupTitle, group);
|
||||
}
|
||||
|
||||
return Array.from(groups.values());
|
||||
return flattenGroups(groups);
|
||||
}
|
||||
|
||||
function flattenGroups(groups: Map<string, string[]>) {
|
||||
const items: (string | GroupHeader)[] = [];
|
||||
groups.forEach((groupItems, groupTitle) => {
|
||||
if (groupItems.length <= 0) return;
|
||||
items.push({
|
||||
title: groupTitle,
|
||||
id: groupTitle.toLowerCase(),
|
||||
type: "header"
|
||||
});
|
||||
items.push(...groupItems);
|
||||
});
|
||||
|
||||
return items;
|
||||
}
|
||||
|
||||
function getFirstCharacter(str: string) {
|
||||
@@ -119,10 +135,6 @@ function getFirstCharacter(str: string) {
|
||||
return str[0].toUpperCase();
|
||||
}
|
||||
|
||||
function getTitle(item: Item): string {
|
||||
return item.type === "attachment"
|
||||
? item.filename
|
||||
: "title" in item
|
||||
? item.title
|
||||
: "Unknown";
|
||||
function getTitle(item: PartialGroupableItem): string {
|
||||
return item.filename || item.title || "Unknown";
|
||||
}
|
||||
|
||||
@@ -30,3 +30,14 @@ export function createObjectId(date = Date.now()): string {
|
||||
function swap16(val: number) {
|
||||
return ((val & 0xff) << 16) | (val & 0xff00) | ((val >> 16) & 0xff);
|
||||
}
|
||||
|
||||
export function getObjectIdTimestamp(id: string) {
|
||||
const timestamp = new Date();
|
||||
const time =
|
||||
id.charCodeAt(3) |
|
||||
(id.charCodeAt(2) << 8) |
|
||||
(id.charCodeAt(1) << 16) |
|
||||
(id.charCodeAt(0) << 24);
|
||||
timestamp.setTime(Math.floor(time) * 1000);
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
@@ -17,51 +17,57 @@ You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
export type VirtualizedGroupHeader = {
|
||||
title: string;
|
||||
id: string;
|
||||
};
|
||||
import { GroupHeader, isGroupHeader } from "../types";
|
||||
|
||||
type BatchOperator<T> = (
|
||||
ids: string[],
|
||||
items: Record<string, T>
|
||||
) => Promise<Record<string, unknown>>;
|
||||
type Batch<T> = { items: Record<string, T>; data?: Record<string, unknown> };
|
||||
export class VirtualizedGrouping<T> {
|
||||
private cache: Map<number, Record<string, T>> = new Map();
|
||||
private groups: Map<number, VirtualizedGroupHeader[]> = new Map();
|
||||
private cache: Map<number, Batch<T>> = new Map();
|
||||
private pending: Map<number, Promise<Batch<T>>> = new Map();
|
||||
groups: GroupHeader[] = [];
|
||||
|
||||
constructor(
|
||||
public ids: string[],
|
||||
public ids: (string | GroupHeader)[],
|
||||
private readonly batchSize: number,
|
||||
private readonly fetchItems: (ids: string[]) => Promise<Record<string, T>>,
|
||||
private readonly groupItems: (
|
||||
ids: string[],
|
||||
items: Record<string, T>
|
||||
) => VirtualizedGroupHeader[] = () => []
|
||||
private readonly fetchItems: (ids: string[]) => Promise<Record<string, T>>
|
||||
) {
|
||||
this.ids = ids;
|
||||
this.groups = ids.filter((i) => isGroupHeader(i)) as GroupHeader[];
|
||||
}
|
||||
|
||||
getKey(index: number) {
|
||||
const item = this.ids[index];
|
||||
if (isGroupHeader(item)) return item.id;
|
||||
return item;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get item from cache or request the appropriate batch for caching
|
||||
* and load it from there.
|
||||
*/
|
||||
async item(id: string) {
|
||||
item(id: string): Promise<T | undefined>;
|
||||
item(
|
||||
id: string,
|
||||
operate: BatchOperator<T>
|
||||
): Promise<{ item: T; data: unknown } | undefined>;
|
||||
async item(id: string, operate?: BatchOperator<T>) {
|
||||
const index = this.ids.indexOf(id);
|
||||
if (index <= -1) return;
|
||||
|
||||
const batchIndex = Math.floor(index / this.batchSize);
|
||||
const batch = this.cache.get(batchIndex) || (await this.load(batchIndex));
|
||||
const groups = this.groups.get(batchIndex);
|
||||
const { items, data } =
|
||||
this.cache.get(batchIndex) || (await this.loadBatch(batchIndex, operate));
|
||||
|
||||
const group = groups?.find((g) => g.id === id);
|
||||
if (group)
|
||||
return {
|
||||
group: { type: "header", id: group.title, title: group.title },
|
||||
item: batch[id]
|
||||
};
|
||||
return { item: batch[id] };
|
||||
return operate ? { item: items[id], data: data?.[id] } : items[id];
|
||||
}
|
||||
|
||||
/**
|
||||
* Reload the cache
|
||||
*/
|
||||
refresh(ids: string[]) {
|
||||
refresh(ids: (string | GroupHeader)[]) {
|
||||
this.ids = ids;
|
||||
this.cache.clear();
|
||||
}
|
||||
@@ -70,42 +76,40 @@ export class VirtualizedGrouping<T> {
|
||||
*
|
||||
* @param index
|
||||
*/
|
||||
private async load(batch: number) {
|
||||
const start = batch * this.batchSize;
|
||||
private async load(batchIndex: number, operate?: BatchOperator<T>) {
|
||||
const start = batchIndex * this.batchSize;
|
||||
const end = start + this.batchSize;
|
||||
const batchIds = this.ids.slice(start, end);
|
||||
const batchIds = this.ids
|
||||
.slice(start, end)
|
||||
.filter((id) => typeof id === "string") as string[];
|
||||
const items = await this.fetchItems(batchIds);
|
||||
const groups = this.groupItems(batchIds, items);
|
||||
|
||||
const lastBatchIndex = this.last;
|
||||
const prevGroups = this.groups.get(lastBatchIndex);
|
||||
if (prevGroups && prevGroups.length > 0 && groups.length > 0) {
|
||||
const lastGroup = prevGroups[prevGroups.length - 1];
|
||||
if (lastGroup.title === groups[0].title) {
|
||||
// if user is moving downwards, we remove the last group from the
|
||||
// current batch, otherwise we remove the first group from the previous
|
||||
// batch.
|
||||
lastBatchIndex < batch ? groups.pop() : prevGroups.shift();
|
||||
}
|
||||
}
|
||||
|
||||
this.cache.set(batch, items);
|
||||
this.groups.set(batch, groups);
|
||||
console.time("operate");
|
||||
const batch = {
|
||||
items,
|
||||
data: operate ? await operate(batchIds, items) : undefined
|
||||
};
|
||||
console.timeEnd("operate");
|
||||
this.cache.set(batchIndex, batch);
|
||||
this.clear();
|
||||
return items;
|
||||
return batch;
|
||||
}
|
||||
|
||||
private loadBatch(batch: number, operate?: BatchOperator<T>) {
|
||||
if (this.pending.has(batch)) return this.pending.get(batch)!;
|
||||
console.time("loading batch");
|
||||
const promise = this.load(batch, operate);
|
||||
this.pending.set(batch, promise);
|
||||
return promise.finally(() => {
|
||||
console.timeEnd("loading batch");
|
||||
this.pending.delete(batch);
|
||||
});
|
||||
}
|
||||
|
||||
private clear() {
|
||||
if (this.cache.size <= 2) return;
|
||||
for (const [key] of this.cache) {
|
||||
this.cache.delete(key);
|
||||
this.groups.delete(key);
|
||||
if (this.cache.size === 2) break;
|
||||
}
|
||||
}
|
||||
|
||||
private get last() {
|
||||
const keys = Array.from(this.cache.keys());
|
||||
return keys[keys.length - 1];
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user