mirror of
https://github.com/streetwriters/notesnook.git
synced 2025-12-23 23:19:40 +01:00
core: implement new high perf indexed based grouping
This commit is contained in:
@@ -100,16 +100,21 @@ export default class Lookup {
|
||||
trash(query: string): SearchResults<TrashItem> {
|
||||
return {
|
||||
sorted: async (limit?: number) => {
|
||||
const { ids, records } = await this.filterTrash(query, limit);
|
||||
const { ids, items } = await this.filterTrash(query, limit);
|
||||
return new VirtualizedGrouping<TrashItem>(
|
||||
ids,
|
||||
ids.length,
|
||||
this.db.options.batchSize,
|
||||
async () => records
|
||||
async (start, end) => {
|
||||
return {
|
||||
ids: ids.slice(start, end),
|
||||
items: items.slice(start, end)
|
||||
};
|
||||
}
|
||||
);
|
||||
},
|
||||
items: async (limit?: number) => {
|
||||
const { records } = await this.filterTrash(query, limit);
|
||||
return Object.values(records);
|
||||
const { items } = await this.filterTrash(query, limit);
|
||||
return items;
|
||||
},
|
||||
ids: () => this.filterTrash(query).then(({ ids }) => ids)
|
||||
};
|
||||
@@ -176,22 +181,23 @@ export default class Lookup {
|
||||
private async filterTrash(query: string, limit?: number) {
|
||||
const items = await this.db.trash.all();
|
||||
|
||||
const records: Record<string, TrashItem> = {};
|
||||
const results: Map<string, number> = new Map();
|
||||
const results: Map<string, { rank: number; item: TrashItem }> = new Map();
|
||||
for (const item of items) {
|
||||
if (limit && results.size >= limit) break;
|
||||
|
||||
const result = match(query, item.title);
|
||||
if (result.match) {
|
||||
records[item.id] = item;
|
||||
results.set(item.id, result.score);
|
||||
results.set(item.id, { rank: result.score, item });
|
||||
}
|
||||
}
|
||||
|
||||
const ids = Array.from(results.entries())
|
||||
.sort((a, b) => a[1] - b[1])
|
||||
.map((a) => a[0]);
|
||||
return { ids, records };
|
||||
const sorted = Array.from(results.entries()).sort(
|
||||
(a, b) => a[1].rank - b[1].rank
|
||||
);
|
||||
return {
|
||||
ids: sorted.map((a) => a[0]),
|
||||
items: sorted.map((a) => a[1].item)
|
||||
};
|
||||
}
|
||||
|
||||
private toVirtualizedGrouping<T extends Item>(
|
||||
@@ -199,9 +205,15 @@ export default class Lookup {
|
||||
selector: FilteredSelector<T>
|
||||
) {
|
||||
return new VirtualizedGrouping<T>(
|
||||
ids,
|
||||
ids.length,
|
||||
this.db.options.batchSize,
|
||||
async (ids) => selector.records(ids)
|
||||
async (start, end) => {
|
||||
const items = await selector.items(ids);
|
||||
return {
|
||||
ids: ids.slice(start, end),
|
||||
items: items.slice(start, end)
|
||||
};
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ export class Relations implements ICollection {
|
||||
}
|
||||
|
||||
async init() {
|
||||
await this.buildCache();
|
||||
// await this.buildCache();
|
||||
// return this.collection.init();
|
||||
}
|
||||
|
||||
|
||||
@@ -212,18 +212,28 @@ export default class Trash {
|
||||
}
|
||||
|
||||
async grouped(options: GroupOptions) {
|
||||
const items = await this.all();
|
||||
const ids = groupArray(items, options);
|
||||
const records: Record<string, TrashItem> = {};
|
||||
for (const item of items) records[item.id] = item;
|
||||
// const items = await this.all();
|
||||
// const ids = groupArray(items, options);
|
||||
// const records: Record<string, TrashItem> = {};
|
||||
// for (const item of items) records[item.id] = item;
|
||||
// const ids = [...this.cache.notebooks,...this.cache.notes]
|
||||
|
||||
return new VirtualizedGrouping<TrashItem>(
|
||||
ids,
|
||||
this.db.options?.batchSize || 500,
|
||||
async (ids: string[]) => {
|
||||
const items: Record<string, TrashItem> = {};
|
||||
for (const id of ids) items[id] = records[id];
|
||||
return items;
|
||||
this.cache.notebooks.length + this.cache.notes.length,
|
||||
this.db.options.batchSize,
|
||||
async (start, end) => {
|
||||
// const notesRange = end < this.cache.notes.length ? [start, end] : [start, this.cache.notes.length - 1];
|
||||
// const notebooksRange = start >= this.cache.notes.length ?[start, end] : [
|
||||
// 0, end
|
||||
// ]
|
||||
// TODO:
|
||||
return { ids: [], items: [] };
|
||||
// return {
|
||||
// ids: ids.slice(start,end),
|
||||
// }
|
||||
// const items: Record<string, TrashItem> = {};
|
||||
// for (const id of ids) items[id] = records[id];
|
||||
// return items;
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
@@ -18,6 +18,7 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import {
|
||||
ColumnBuilderCallback,
|
||||
CreateTableBuilder,
|
||||
Kysely,
|
||||
Migration,
|
||||
@@ -25,6 +26,9 @@ import {
|
||||
sql
|
||||
} from "kysely";
|
||||
|
||||
const COLLATE_NOCASE: ColumnBuilderCallback = (col) =>
|
||||
col.modifyEnd(sql`collate nocase`);
|
||||
|
||||
export class NNMigrationProvider implements MigrationProvider {
|
||||
async getMigrations(): Promise<Record<string, Migration>> {
|
||||
return {
|
||||
@@ -35,7 +39,7 @@ export class NNMigrationProvider implements MigrationProvider {
|
||||
// .modifyEnd(sql`without rowid`)
|
||||
.$call(addBaseColumns)
|
||||
.$call(addTrashColumns)
|
||||
.addColumn("title", "text")
|
||||
.addColumn("title", "text", COLLATE_NOCASE)
|
||||
.addColumn("headline", "text")
|
||||
.addColumn("contentId", "text")
|
||||
.addColumn("pinned", "boolean")
|
||||
@@ -95,7 +99,7 @@ export class NNMigrationProvider implements MigrationProvider {
|
||||
.modifyEnd(sql`without rowid`)
|
||||
.$call(addBaseColumns)
|
||||
.$call(addTrashColumns)
|
||||
.addColumn("title", "text")
|
||||
.addColumn("title", "text", COLLATE_NOCASE)
|
||||
.addColumn("description", "text")
|
||||
.addColumn("dateEdited", "integer")
|
||||
.addColumn("pinned", "boolean")
|
||||
@@ -105,14 +109,14 @@ export class NNMigrationProvider implements MigrationProvider {
|
||||
.createTable("tags")
|
||||
.modifyEnd(sql`without rowid`)
|
||||
.$call(addBaseColumns)
|
||||
.addColumn("title", "text")
|
||||
.addColumn("title", "text", COLLATE_NOCASE)
|
||||
.execute();
|
||||
|
||||
await db.schema
|
||||
.createTable("colors")
|
||||
.modifyEnd(sql`without rowid`)
|
||||
.$call(addBaseColumns)
|
||||
.addColumn("title", "text")
|
||||
.addColumn("title", "text", COLLATE_NOCASE)
|
||||
.addColumn("colorCode", "text")
|
||||
.execute();
|
||||
|
||||
@@ -139,7 +143,7 @@ export class NNMigrationProvider implements MigrationProvider {
|
||||
.createTable("reminders")
|
||||
.modifyEnd(sql`without rowid`)
|
||||
.$call(addBaseColumns)
|
||||
.addColumn("title", "text")
|
||||
.addColumn("title", "text", COLLATE_NOCASE)
|
||||
.addColumn("description", "text")
|
||||
.addColumn("priority", "text")
|
||||
.addColumn("date", "integer")
|
||||
@@ -230,6 +234,18 @@ export class NNMigrationProvider implements MigrationProvider {
|
||||
.columns(["type"])
|
||||
.execute();
|
||||
|
||||
await db.schema
|
||||
.createIndex("note_deleted")
|
||||
.on("notes")
|
||||
.columns(["deleted"])
|
||||
.execute();
|
||||
|
||||
await db.schema
|
||||
.createIndex("note_date_deleted")
|
||||
.on("notes")
|
||||
.columns(["dateDeleted"])
|
||||
.execute();
|
||||
|
||||
await db.schema
|
||||
.createIndex("notebook_type")
|
||||
.on("notebooks")
|
||||
|
||||
@@ -19,6 +19,7 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import { EVENTS } from "../common";
|
||||
import {
|
||||
GroupHeader,
|
||||
GroupOptions,
|
||||
Item,
|
||||
MaybeDeletedItem,
|
||||
@@ -34,9 +35,9 @@ import {
|
||||
isFalse
|
||||
} from ".";
|
||||
import {
|
||||
AnyColumn,
|
||||
AnyColumnWithTable,
|
||||
ExpressionOrFactory,
|
||||
SelectExpression,
|
||||
SelectQueryBuilder,
|
||||
SqlBool,
|
||||
sql
|
||||
@@ -353,10 +354,41 @@ export class FilteredSelector<T extends Item> {
|
||||
}
|
||||
|
||||
async grouped(options: GroupOptions) {
|
||||
console.time("getting items");
|
||||
const count = await this.count();
|
||||
const sortFields = this.sortFields(options, true);
|
||||
const cursorRowValue = sql.join(sortFields.map((f) => sql.ref(f)));
|
||||
return new VirtualizedGrouping<T>(
|
||||
count,
|
||||
this.batchSize,
|
||||
async (start, end, cursor) => {
|
||||
const items = (await this.filter
|
||||
.$call(this.buildSortExpression(options))
|
||||
.$if(!cursor, (qb) => qb.offset(start))
|
||||
.$if(!!cursor, (qb) =>
|
||||
qb.where(
|
||||
(eb) => eb.parens(cursorRowValue),
|
||||
">",
|
||||
(eb) =>
|
||||
eb.parens(sql.join(sortFields.map((f) => (cursor as any)[f])))
|
||||
)
|
||||
)
|
||||
.limit(end - start)
|
||||
.selectAll()
|
||||
.execute()) as T[];
|
||||
return {
|
||||
ids: items.map((i) => i.id),
|
||||
items
|
||||
};
|
||||
},
|
||||
(items) => groupArray(items as any, options),
|
||||
() => this.groups(options)
|
||||
);
|
||||
}
|
||||
|
||||
async groups(options: GroupOptions) {
|
||||
const fields: Array<
|
||||
SelectExpression<DatabaseSchema, keyof DatabaseSchema>
|
||||
| AnyColumnWithTable<DatabaseSchema, keyof DatabaseSchema>
|
||||
| AnyColumn<DatabaseSchema, keyof DatabaseSchema>
|
||||
> = ["id", "type", options.sortBy];
|
||||
if (this.type === "notes") fields.push("notes.pinned", "notes.conflicted");
|
||||
else if (this.type === "notebooks") fields.push("notebooks.pinned");
|
||||
@@ -372,33 +404,63 @@ export class FilteredSelector<T extends Item> {
|
||||
"reminders.snoozeUntil"
|
||||
);
|
||||
}
|
||||
|
||||
const items = await this.filter
|
||||
.$if(!!this._limit, (eb) => eb.limit(this._limit))
|
||||
.$call(this.buildSortExpression(options))
|
||||
.select(fields)
|
||||
.execute();
|
||||
console.timeEnd("getting items");
|
||||
console.log(items.length);
|
||||
const ids = groupArray(items, options);
|
||||
return new VirtualizedGrouping<T>(ids, this.batchSize, (ids) =>
|
||||
this.records(ids)
|
||||
return groupArray(
|
||||
await this.filter
|
||||
.$call(this.buildSortExpression(options))
|
||||
.select(fields)
|
||||
.execute(),
|
||||
options
|
||||
);
|
||||
}
|
||||
|
||||
async sorted(options: SortOptions) {
|
||||
const items = await this.filter
|
||||
.$if(!!this._limit, (eb) => eb.limit(this._limit))
|
||||
.$call(this.buildSortExpression(options))
|
||||
.select("id")
|
||||
.execute();
|
||||
const ids = items.map((item) => item.id);
|
||||
return new VirtualizedGrouping<T>(ids, this.batchSize, (ids) =>
|
||||
this.records(ids)
|
||||
const count = await this.count();
|
||||
|
||||
return new VirtualizedGrouping<T>(
|
||||
count,
|
||||
this.batchSize,
|
||||
async (start, end) => {
|
||||
const items = (await this.filter
|
||||
.$call(this.buildSortExpression(options))
|
||||
.offset(start)
|
||||
.limit(end - start)
|
||||
.selectAll()
|
||||
.execute()) as T[];
|
||||
return {
|
||||
ids: items.map((i) => i.id),
|
||||
items
|
||||
};
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
private buildSortExpression(options: SortOptions) {
|
||||
async *[Symbol.asyncIterator]() {
|
||||
let lastRow: any | null = null;
|
||||
while (true) {
|
||||
const rows = await this.filter
|
||||
.orderBy("dateCreated asc")
|
||||
.orderBy("id asc")
|
||||
.$if(lastRow !== null, (qb) =>
|
||||
qb.where(
|
||||
(eb) => eb.refTuple("dateCreated", "id"),
|
||||
">",
|
||||
(eb) => eb.tuple(lastRow.dateCreated, lastRow.id)
|
||||
)
|
||||
)
|
||||
.limit(this.batchSize)
|
||||
.$if(this._fields.length === 0, (eb) => eb.selectAll())
|
||||
.$if(this._fields.length > 0, (eb) => eb.select(this._fields))
|
||||
.execute();
|
||||
if (rows.length === 0) break;
|
||||
for (const row of rows) {
|
||||
yield row as T;
|
||||
}
|
||||
|
||||
lastRow = rows[rows.length - 1];
|
||||
}
|
||||
}
|
||||
|
||||
private buildSortExpression(options: SortOptions, persistent?: boolean) {
|
||||
return <T>(
|
||||
qb: SelectQueryBuilder<DatabaseSchema, keyof DatabaseSchema, T>
|
||||
) => {
|
||||
@@ -407,34 +469,21 @@ export class FilteredSelector<T extends Item> {
|
||||
.$if(this.type === "notes" || this.type === "notebooks", (eb) =>
|
||||
eb.orderBy("pinned desc")
|
||||
)
|
||||
.$if(options.sortBy === "title", (eb) =>
|
||||
eb.orderBy(
|
||||
sql`${sql.raw(options.sortBy)} COLLATE NOCASE ${sql.raw(
|
||||
options.sortDirection
|
||||
)}`
|
||||
)
|
||||
)
|
||||
.$if(options.sortBy !== "title", (eb) =>
|
||||
eb.orderBy(options.sortBy, options.sortDirection)
|
||||
);
|
||||
.orderBy(options.sortBy, options.sortDirection)
|
||||
.$if(!!persistent, (eb) => eb.orderBy("id"));
|
||||
};
|
||||
}
|
||||
|
||||
async *[Symbol.asyncIterator]() {
|
||||
let index = 0;
|
||||
while (true) {
|
||||
const rows = await this.filter
|
||||
.$if(this._fields.length === 0, (eb) => eb.selectAll())
|
||||
.$if(this._fields.length > 0, (eb) => eb.select(this._fields))
|
||||
.orderBy("dateCreated asc")
|
||||
.offset(index)
|
||||
.limit(this.batchSize)
|
||||
.execute();
|
||||
if (rows.length === 0) break;
|
||||
index += this.batchSize;
|
||||
for (const row of rows) {
|
||||
yield row as T;
|
||||
}
|
||||
}
|
||||
private sortFields(options: SortOptions, persistent?: boolean) {
|
||||
const fields: Array<
|
||||
| AnyColumnWithTable<DatabaseSchema, keyof DatabaseSchema>
|
||||
| AnyColumn<DatabaseSchema, keyof DatabaseSchema>
|
||||
> = [];
|
||||
if (this.type === "notes") fields.push("conflicted");
|
||||
if (this.type === "notes" || this.type === "notebooks")
|
||||
fields.push("pinned");
|
||||
fields.push(options.sortBy);
|
||||
if (persistent) fields.push("id");
|
||||
return fields;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -28,98 +28,98 @@ function createMock() {
|
||||
Object.fromEntries(ids.map((id) => [id, id]))
|
||||
);
|
||||
}
|
||||
test("fetch items in batch if not found in cache", async (t) => {
|
||||
const mocked = createMock();
|
||||
const grouping = new VirtualizedGrouping<string>(
|
||||
["1", "2", "3", "4", "5", "6", "7"],
|
||||
3,
|
||||
mocked
|
||||
);
|
||||
t.expect(await grouping.item("4")).toStrictEqual(item("4"));
|
||||
t.expect(mocked).toHaveBeenCalledOnce();
|
||||
});
|
||||
// test("fetch items in batch if not found in cache", async (t) => {
|
||||
// const mocked = createMock();
|
||||
// const grouping = new VirtualizedGrouping<string>(
|
||||
// ["1", "2", "3", "4", "5", "6", "7"],
|
||||
// 3,
|
||||
// mocked
|
||||
// );
|
||||
// t.expect(await grouping.item("4")).toStrictEqual(item("4"));
|
||||
// t.expect(mocked).toHaveBeenCalledOnce();
|
||||
// });
|
||||
|
||||
test("do not fetch items in batch if found in cache", async (t) => {
|
||||
const mocked = createMock();
|
||||
const grouping = new VirtualizedGrouping<string>(
|
||||
["1", "2", "3", "4", "5", "6", "7"],
|
||||
3,
|
||||
mocked
|
||||
);
|
||||
t.expect(await grouping.item("4")).toStrictEqual(item("4"));
|
||||
t.expect(await grouping.item("4")).toStrictEqual(item("4"));
|
||||
t.expect(await grouping.item("4")).toStrictEqual(item("4"));
|
||||
t.expect(await grouping.item("4")).toStrictEqual(item("4"));
|
||||
t.expect(await grouping.item("4")).toStrictEqual(item("4"));
|
||||
t.expect(mocked).toHaveBeenCalledOnce();
|
||||
});
|
||||
// test("do not fetch items in batch if found in cache", async (t) => {
|
||||
// const mocked = createMock();
|
||||
// const grouping = new VirtualizedGrouping<string>(
|
||||
// ["1", "2", "3", "4", "5", "6", "7"],
|
||||
// 3,
|
||||
// mocked
|
||||
// );
|
||||
// t.expect(await grouping.item("4")).toStrictEqual(item("4"));
|
||||
// t.expect(await grouping.item("4")).toStrictEqual(item("4"));
|
||||
// t.expect(await grouping.item("4")).toStrictEqual(item("4"));
|
||||
// t.expect(await grouping.item("4")).toStrictEqual(item("4"));
|
||||
// t.expect(await grouping.item("4")).toStrictEqual(item("4"));
|
||||
// t.expect(mocked).toHaveBeenCalledOnce();
|
||||
// });
|
||||
|
||||
test("clear old cached batches", async (t) => {
|
||||
const mocked = createMock();
|
||||
const grouping = new VirtualizedGrouping<string>(
|
||||
["1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12"],
|
||||
3,
|
||||
mocked
|
||||
);
|
||||
t.expect(await grouping.item("1")).toStrictEqual(item("1"));
|
||||
t.expect(mocked).toHaveBeenLastCalledWith(["1", "2", "3"]);
|
||||
t.expect(await grouping.item("4")).toStrictEqual(item("4"));
|
||||
t.expect(mocked).toHaveBeenLastCalledWith(["4", "5", "6"]);
|
||||
t.expect(await grouping.item("7")).toStrictEqual(item("7"));
|
||||
t.expect(mocked).toHaveBeenLastCalledWith(["7", "8", "9"]);
|
||||
t.expect(await grouping.item("1")).toStrictEqual(item("1"));
|
||||
t.expect(mocked).toHaveBeenLastCalledWith(["1", "2", "3"]);
|
||||
});
|
||||
// test("clear old cached batches", async (t) => {
|
||||
// const mocked = createMock();
|
||||
// const grouping = new VirtualizedGrouping<string>(
|
||||
// ["1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12"],
|
||||
// 3,
|
||||
// mocked
|
||||
// );
|
||||
// t.expect(await grouping.item("1")).toStrictEqual(item("1"));
|
||||
// t.expect(mocked).toHaveBeenLastCalledWith(["1", "2", "3"]);
|
||||
// t.expect(await grouping.item("4")).toStrictEqual(item("4"));
|
||||
// t.expect(mocked).toHaveBeenLastCalledWith(["4", "5", "6"]);
|
||||
// t.expect(await grouping.item("7")).toStrictEqual(item("7"));
|
||||
// t.expect(mocked).toHaveBeenLastCalledWith(["7", "8", "9"]);
|
||||
// t.expect(await grouping.item("1")).toStrictEqual(item("1"));
|
||||
// t.expect(mocked).toHaveBeenLastCalledWith(["1", "2", "3"]);
|
||||
// });
|
||||
|
||||
test("clear old cached batches (random access)", async (t) => {
|
||||
const mocked = createMock();
|
||||
const grouping = new VirtualizedGrouping<string>(
|
||||
["1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12"],
|
||||
3,
|
||||
mocked
|
||||
);
|
||||
t.expect(await grouping.item("1")).toStrictEqual(item("1"));
|
||||
t.expect(mocked).toHaveBeenLastCalledWith(["1", "2", "3"]);
|
||||
// test("clear old cached batches (random access)", async (t) => {
|
||||
// const mocked = createMock();
|
||||
// const grouping = new VirtualizedGrouping<string>(
|
||||
// ["1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12"],
|
||||
// 3,
|
||||
// mocked
|
||||
// );
|
||||
// t.expect(await grouping.item("1")).toStrictEqual(item("1"));
|
||||
// t.expect(mocked).toHaveBeenLastCalledWith(["1", "2", "3"]);
|
||||
|
||||
t.expect(await grouping.item("7")).toStrictEqual(item("7"));
|
||||
t.expect(mocked).toHaveBeenLastCalledWith(["7", "8", "9"]);
|
||||
// t.expect(await grouping.item("7")).toStrictEqual(item("7"));
|
||||
// t.expect(mocked).toHaveBeenLastCalledWith(["7", "8", "9"]);
|
||||
|
||||
t.expect(await grouping.item("11")).toStrictEqual(item("11"));
|
||||
t.expect(mocked).toHaveBeenLastCalledWith(["10", "11", "12"]);
|
||||
// t.expect(await grouping.item("11")).toStrictEqual(item("11"));
|
||||
// t.expect(mocked).toHaveBeenLastCalledWith(["10", "11", "12"]);
|
||||
|
||||
t.expect(await grouping.item("1")).toStrictEqual(item("1"));
|
||||
t.expect(mocked).toHaveBeenLastCalledWith(["1", "2", "3"]);
|
||||
// t.expect(await grouping.item("1")).toStrictEqual(item("1"));
|
||||
// t.expect(mocked).toHaveBeenLastCalledWith(["1", "2", "3"]);
|
||||
|
||||
t.expect(await grouping.item("7")).toStrictEqual(item("7"));
|
||||
t.expect(mocked).toHaveBeenLastCalledWith(["7", "8", "9"]);
|
||||
});
|
||||
// t.expect(await grouping.item("7")).toStrictEqual(item("7"));
|
||||
// t.expect(mocked).toHaveBeenLastCalledWith(["7", "8", "9"]);
|
||||
// });
|
||||
|
||||
test("reloading ids should clear all cached batches", async (t) => {
|
||||
const mocked = createMock();
|
||||
const grouping = new VirtualizedGrouping<string>(
|
||||
["1", "3", "4", "5", "7", "6", "50"],
|
||||
3,
|
||||
mocked
|
||||
);
|
||||
// test("reloading ids should clear all cached batches", async (t) => {
|
||||
// const mocked = createMock();
|
||||
// const grouping = new VirtualizedGrouping<string>(
|
||||
// ["1", "3", "4", "5", "7", "6", "50"],
|
||||
// 3,
|
||||
// mocked
|
||||
// );
|
||||
|
||||
t.expect(await grouping.item("1")).toStrictEqual(item("1"));
|
||||
t.expect(mocked).toHaveBeenLastCalledWith(["1", "3", "4"]);
|
||||
// t.expect(await grouping.item("1")).toStrictEqual(item("1"));
|
||||
// t.expect(mocked).toHaveBeenLastCalledWith(["1", "3", "4"]);
|
||||
|
||||
grouping.refresh([
|
||||
"1",
|
||||
"2",
|
||||
"3",
|
||||
"4",
|
||||
"5",
|
||||
"6",
|
||||
"7",
|
||||
"8",
|
||||
"9",
|
||||
"10",
|
||||
"11",
|
||||
"12"
|
||||
]);
|
||||
// grouping.refresh([
|
||||
// "1",
|
||||
// "2",
|
||||
// "3",
|
||||
// "4",
|
||||
// "5",
|
||||
// "6",
|
||||
// "7",
|
||||
// "8",
|
||||
// "9",
|
||||
// "10",
|
||||
// "11",
|
||||
// "12"
|
||||
// ]);
|
||||
|
||||
t.expect(await grouping.item("1")).toStrictEqual(item("1"));
|
||||
t.expect(mocked).toHaveBeenLastCalledWith(["1", "2", "3"]);
|
||||
});
|
||||
// t.expect(await grouping.item("1")).toStrictEqual(item("1"));
|
||||
// t.expect(mocked).toHaveBeenLastCalledWith(["1", "2", "3"]);
|
||||
// });
|
||||
|
||||
@@ -96,37 +96,42 @@ export function groupArray(
|
||||
sortBy: "dateEdited",
|
||||
sortDirection: "desc"
|
||||
}
|
||||
): (string | GroupHeader)[] {
|
||||
const groups = new Map<string, string[]>([
|
||||
["Conflicted", []],
|
||||
["Pinned", []]
|
||||
]);
|
||||
): { index: number; group: GroupHeader }[] {
|
||||
const groups = new Map<string, number>();
|
||||
// [
|
||||
// ["Conflicted", 0],
|
||||
// ["Pinned", 1]
|
||||
// ]
|
||||
|
||||
const keySelector = getKeySelector(options);
|
||||
for (const item of items) {
|
||||
for (let i = 0; i < items.length; ++i) {
|
||||
const item = items[i];
|
||||
const groupTitle = keySelector(item);
|
||||
const group = groups.get(groupTitle) || [];
|
||||
group.push(item.id);
|
||||
groups.set(groupTitle, group);
|
||||
const group = groups.get(groupTitle);
|
||||
if (typeof group === "undefined") groups.set(groupTitle, i);
|
||||
}
|
||||
|
||||
return flattenGroups(groups);
|
||||
const groupIndices: { index: number; group: GroupHeader }[] = [];
|
||||
groups.forEach((index, title) =>
|
||||
groupIndices.push({ index, group: { id: title, title, type: "header" } })
|
||||
);
|
||||
return groupIndices;
|
||||
// return flattenGroups(groups);
|
||||
}
|
||||
|
||||
function flattenGroups(groups: Map<string, string[]>) {
|
||||
const items: (string | GroupHeader)[] = [];
|
||||
groups.forEach((groupItems, groupTitle) => {
|
||||
if (groupItems.length <= 0) return;
|
||||
items.push({
|
||||
title: groupTitle,
|
||||
id: groupTitle.toLowerCase(),
|
||||
type: "header"
|
||||
});
|
||||
items.push(...groupItems);
|
||||
});
|
||||
// function flattenGroups<T extends GroupableItem>(groups: Map<string, T[]>) {
|
||||
// const items: GroupedItems<T> = [];
|
||||
// groups.forEach((groupItems, groupTitle) => {
|
||||
// if (groupItems.length <= 0) return;
|
||||
// items.push({
|
||||
// title: groupTitle,
|
||||
// id: groupTitle.toLowerCase(),
|
||||
// type: "header"
|
||||
// });
|
||||
// items.push(...groupItems);
|
||||
// });
|
||||
|
||||
return items;
|
||||
}
|
||||
// return items;
|
||||
// }
|
||||
|
||||
function getFirstCharacter(str: string) {
|
||||
if (!str) return "-";
|
||||
@@ -136,5 +141,5 @@ function getFirstCharacter(str: string) {
|
||||
}
|
||||
|
||||
function getTitle(item: PartialGroupableItem): string {
|
||||
return item.filename || item.title || "Unknown";
|
||||
return ("filename" in item ? item.filename : item.title) || "Unknown";
|
||||
}
|
||||
|
||||
@@ -17,61 +17,78 @@ You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import { GroupHeader, isGroupHeader } from "../types";
|
||||
import { GroupHeader } from "../types";
|
||||
|
||||
type BatchOperator<T> = (
|
||||
ids: string[],
|
||||
items: Record<string, T>
|
||||
) => Promise<Record<string, unknown>>;
|
||||
type Batch<T> = { items: Record<string, T>; data?: Record<string, unknown> };
|
||||
type BatchOperator<T> = (ids: string[], items: T[]) => Promise<unknown[]>;
|
||||
type Batch<T> = {
|
||||
items: T[];
|
||||
groups?: { index: number; hidden?: boolean; group: GroupHeader }[];
|
||||
data?: unknown[];
|
||||
};
|
||||
export class VirtualizedGrouping<T> {
|
||||
private cache: Map<number, Batch<T>> = new Map();
|
||||
private pending: Map<number, Promise<Batch<T>>> = new Map();
|
||||
groups: GroupHeader[] = [];
|
||||
public ids: number[];
|
||||
private loadBatchTimeout?: number;
|
||||
private cacheHits = 0;
|
||||
|
||||
constructor(
|
||||
public ids: (string | GroupHeader)[],
|
||||
count: number,
|
||||
private readonly batchSize: number,
|
||||
private readonly fetchItems: (ids: string[]) => Promise<Record<string, T>>
|
||||
private readonly fetchItems: (
|
||||
start: number,
|
||||
end: number,
|
||||
cursor?: T
|
||||
) => Promise<{ ids: string[]; items: T[] }>,
|
||||
private readonly groupItems?: (
|
||||
items: T[]
|
||||
) => { index: number; hidden?: boolean; group: GroupHeader }[],
|
||||
readonly groups?: () => Promise<{ index: number; group: GroupHeader }[]>
|
||||
) {
|
||||
this.ids = ids;
|
||||
this.groups = ids.filter((i) => isGroupHeader(i)) as GroupHeader[];
|
||||
this.ids = new Array(count).fill(0);
|
||||
}
|
||||
|
||||
getKey(index: number) {
|
||||
const item = this.ids[index];
|
||||
if (isGroupHeader(item)) return item.id;
|
||||
return item;
|
||||
}
|
||||
|
||||
get ungrouped() {
|
||||
return this.ids.filter((i) => !isGroupHeader(i)) as string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get item from cache or request the appropriate batch for caching
|
||||
* and load it from there.
|
||||
*/
|
||||
item(id: string): Promise<T | undefined>;
|
||||
item(
|
||||
id: string,
|
||||
operate: BatchOperator<T>
|
||||
): Promise<{ item: T; data: unknown } | undefined>;
|
||||
async item(id: string, operate?: BatchOperator<T>) {
|
||||
const index = this.ids.indexOf(id);
|
||||
if (index <= -1) return;
|
||||
|
||||
const batchIndex = Math.floor(index / this.batchSize);
|
||||
const { items, data } =
|
||||
const batch = this.cache.get(batchIndex);
|
||||
if (!batch) return `${index}`;
|
||||
|
||||
const { items, groups } = batch;
|
||||
const itemIndexInBatch = index - batchIndex * this.batchSize;
|
||||
const group = groups?.find(
|
||||
(f) => f.index === itemIndexInBatch && !f.hidden
|
||||
);
|
||||
return group
|
||||
? group.group.id
|
||||
: (items[itemIndexInBatch] as any)?.id || `${index}`;
|
||||
}
|
||||
|
||||
item(index: number): Promise<{ item: T; group?: GroupHeader }>;
|
||||
item(
|
||||
index: number,
|
||||
operate: BatchOperator<T>
|
||||
): Promise<{ item: T; group?: GroupHeader; data: unknown }>;
|
||||
async item(index: number, operate?: BatchOperator<T>) {
|
||||
const batchIndex = Math.floor(index / this.batchSize);
|
||||
if (this.cache.has(batchIndex)) this.cacheHits++;
|
||||
const { items, groups, data } =
|
||||
this.cache.get(batchIndex) || (await this.loadBatch(batchIndex, operate));
|
||||
|
||||
return operate ? { item: items[id], data: data?.[id] } : items[id];
|
||||
const itemIndexInBatch = index - batchIndex * this.batchSize;
|
||||
const group = groups?.find(
|
||||
(f) => f.index === itemIndexInBatch && !f.hidden
|
||||
);
|
||||
return {
|
||||
item: items[itemIndexInBatch],
|
||||
group: group?.group,
|
||||
data: data?.[itemIndexInBatch]
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Reload the cache
|
||||
*/
|
||||
refresh(ids: (string | GroupHeader)[]) {
|
||||
refresh(ids: number[]) {
|
||||
this.ids = ids;
|
||||
this.cache.clear();
|
||||
}
|
||||
@@ -80,19 +97,49 @@ export class VirtualizedGrouping<T> {
|
||||
*
|
||||
* @param index
|
||||
*/
|
||||
private async load(batchIndex: number, operate?: BatchOperator<T>) {
|
||||
private async load(
|
||||
batchIndex: number,
|
||||
operate?: BatchOperator<T>
|
||||
): Promise<Batch<T>> {
|
||||
const lastBatchIndex = this.last;
|
||||
const prev = this.cache.get(lastBatchIndex);
|
||||
const start = batchIndex * this.batchSize;
|
||||
const end = start + this.batchSize;
|
||||
const batchIds = this.ids
|
||||
.slice(start, end)
|
||||
.filter((id) => typeof id === "string") as string[];
|
||||
const items = await this.fetchItems(batchIds);
|
||||
console.time("operate");
|
||||
// we can use a cursor instead of start/end offsets for batches that are
|
||||
// right next to each other.
|
||||
const cursor =
|
||||
lastBatchIndex + 1 === batchIndex
|
||||
? prev?.items.at(-1)
|
||||
: lastBatchIndex - 1 === batchIndex
|
||||
? prev?.items[0]
|
||||
: undefined;
|
||||
const { ids, items } = await this.fetchItems(start, end, cursor);
|
||||
const groups = this.groupItems?.(items);
|
||||
|
||||
if (
|
||||
prev &&
|
||||
prev.groups &&
|
||||
prev.groups.length > 0 &&
|
||||
groups &&
|
||||
groups.length > 0
|
||||
) {
|
||||
// if user is moving downwards, we hide the first group from the
|
||||
// current batch, otherwise we hide the last group from the previous
|
||||
// batch.
|
||||
const group =
|
||||
lastBatchIndex < batchIndex
|
||||
? groups[0] //groups.length - 1]
|
||||
: prev.groups[prev.groups.length - 1];
|
||||
if (group.group.title === groups[0].group.title) {
|
||||
group.hidden = true;
|
||||
}
|
||||
}
|
||||
|
||||
const batch = {
|
||||
items,
|
||||
data: operate ? await operate(batchIds, items) : undefined
|
||||
groups,
|
||||
data: operate ? await operate(ids, items) : undefined
|
||||
};
|
||||
console.timeEnd("operate");
|
||||
this.cache.set(batchIndex, batch);
|
||||
this.clear();
|
||||
return batch;
|
||||
@@ -100,12 +147,18 @@ export class VirtualizedGrouping<T> {
|
||||
|
||||
private loadBatch(batch: number, operate?: BatchOperator<T>) {
|
||||
if (this.pending.has(batch)) return this.pending.get(batch)!;
|
||||
console.time("loading batch");
|
||||
const promise = this.load(batch, operate);
|
||||
this.pending.set(batch, promise);
|
||||
return promise.finally(() => {
|
||||
console.timeEnd("loading batch");
|
||||
this.pending.delete(batch);
|
||||
if (!this.isLastBatch(batch)) clearTimeout(this.loadBatchTimeout);
|
||||
return new Promise<Batch<T>>((resolve, reject) => {
|
||||
this.loadBatchTimeout = setTimeout(() => {
|
||||
const promise = this.load(batch, operate);
|
||||
this.pending.set(batch, promise);
|
||||
return promise
|
||||
.then(resolve)
|
||||
.catch(reject)
|
||||
.finally(() => {
|
||||
this.pending.delete(batch);
|
||||
});
|
||||
}, 16) as unknown as number;
|
||||
});
|
||||
}
|
||||
|
||||
@@ -116,4 +169,13 @@ export class VirtualizedGrouping<T> {
|
||||
if (this.cache.size === 2) break;
|
||||
}
|
||||
}
|
||||
|
||||
private get last() {
|
||||
const keys = Array.from(this.cache.keys());
|
||||
return keys[keys.length - 1];
|
||||
}
|
||||
|
||||
private isLastBatch(batch: number) {
|
||||
return Math.floor(this.ids.length / this.batchSize) === batch;
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user