Revert "feat: add hypersearch for searching"

This reverts commit 5362e54428.
This commit is contained in:
thecodrr
2020-11-16 15:01:16 +05:00
parent c81c0feda8
commit caaa808174
7 changed files with 37 additions and 94 deletions

View File

@@ -17,31 +17,31 @@ beforeEach(async () => {
//TODO //TODO
test("search notes", () => test("search notes", () =>
noteTest({ noteTest({
content: { type: "delta", data: [{ insert: "5" }], text: "5" }, content: { delta: [{ insert: "5" }], text: "5" },
}).then(async ({ db }) => { }).then(async ({ db }) => {
await db.notes.add(TEST_NOTE); await db.notes.add(TEST_NOTE);
let filtered = db.lookup.notes(db.notes.all, "5"); let filtered = await db.lookup.notes(db.notes.all, "5");
expect(filtered.length).toBe(1); expect(filtered.length).toBe(1);
})); }));
test("search notes with a locked note", () => test("search notes with a locked note", () =>
noteTest({ noteTest({
content: { type: "delta", data: [{ insert: "5" }], text: "5" }, content: { delta: [{ insert: "5" }], text: "5" },
}).then(async ({ db }) => { }).then(async ({ db }) => {
const noteId = await db.notes.add(TEST_NOTE); const noteId = await db.notes.add(TEST_NOTE);
await db.vault.create("password"); await db.vault.create("password");
await db.vault.add(noteId); await db.vault.add(noteId);
let filtered = db.lookup.notes(db.notes.all, "Thi"); let filtered = await db.lookup.notes(db.notes.all, "I am a");
expect(filtered.length).toBe(1); expect(filtered.length).toBe(1);
})); }));
test("search notes with an empty note", () => test("search notes with an empty note", () =>
noteTest({ noteTest({
content: { type: "delta", data: [{ insert: "5" }], text: "5" }, content: { delta: [{ insert: "5" }], text: "5" },
}).then(async ({ db }) => { }).then(async ({ db }) => {
await db.notes.add({ await db.notes.add({
title: "hello world", title: "hello world",
content: { type: "delta", data: [{ insert: "\n" }] }, content: { delta: [], text: "" },
}); });
let filtered = await db.lookup.notes(db.notes.all, "hello world"); let filtered = await db.lookup.notes(db.notes.all, "hello world");
expect(filtered.length).toBe(1); expect(filtered.length).toBe(1);

View File

@@ -14,22 +14,27 @@ export default class Lookup {
} }
notes(notes, query) { notes(notes, query) {
let contentIds = this._db.content._collection.search.searchDocs(query); return new Promise((resolve) => {
let noteIds = this._db.notes._collection.search.searchDocs(query); const results = [];
return notes.filter((note) => { let index = 0,
return ( max = notes.length;
contentIds.findIndex((content) => note.id === content.noteId) > -1 || notes.forEach(async (note) => {
noteIds.findIndex((n) => n.id === note.id) > -1 const text =
); note.locked || !note.content
? ""
: await this._db.text.get(note.content.text);
const title = note.title;
if (fzs(query, text + title)) results.push(note);
if (++index >= max) return resolve(results);
});
}); });
} }
notebooks(array, query) { notebooks(array, query) {
const notebooksIds = this._db.notebooks._collection.search.searchDocs(
query
);
return tfun.filter( return tfun.filter(
(nb) => notebooksIds.findIndex((notebook) => notebook.id === nb.id) > -1 (nb) =>
fzs(query, nb.title + " " + nb.description) ||
nb.topics.some((topic) => fuzzysearch(query, topic.title))
)(array); )(array);
} }

View File

@@ -51,10 +51,7 @@ const tests = [
type: "note", type: "note",
title: "someTitle", title: "someTitle",
}), }),
getMainCollectionParams("content", { getMainCollectionParams("content", { ops: [{ insert: "true" }] }),
data: [{ insert: "true" }],
type: "delta",
}),
]; ];
describe.each(tests)("%s preparation", (collection, add, addExtra) => { describe.each(tests)("%s preparation", (collection, add, addExtra) => {

View File

@@ -19,8 +19,8 @@ export default class CachedCollection extends IndexedCollection {
this.map.clear(); this.map.clear();
} }
async updateItem(item, index = true) { async updateItem(item) {
await super.updateItem(item, index); await super.updateItem(item);
this.map.set(item.id, item); this.map.set(item.id, item);
EV.publish("db:write", item); EV.publish("db:write", item);
} }

View File

@@ -1,15 +1,8 @@
import Indexer from "./indexer"; import Indexer from "./indexer";
import HyperSearch from "hypersearch";
import { getSchema } from "./schemas";
export default class IndexedCollection { export default class IndexedCollection {
constructor(context, type) { constructor(context, type) {
this.indexer = new Indexer(context, type); this.indexer = new Indexer(context, type);
this.type = type;
this.search = new HyperSearch({
schema: getSchema(type),
tokenizer: "forward",
});
} }
clear() { clear() {
@@ -18,11 +11,11 @@ export default class IndexedCollection {
async init() { async init() {
await this.indexer.init(); await this.indexer.init();
const index = await this.indexer.read(`${this.type}-index`);
if (index) this.search.import(index);
} }
async addItem(item) { async addItem(item) {
if (!item.id) throw new Error("The item must contain the id field.");
const exists = await this.exists(item.id); const exists = await this.exists(item.id);
if (!exists) item.dateCreated = item.dateCreated || Date.now(); if (!exists) item.dateCreated = item.dateCreated || Date.now();
await this.updateItem(item); await this.updateItem(item);
@@ -31,32 +24,22 @@ export default class IndexedCollection {
} }
} }
async updateItem(item, index = true) { async updateItem(item) {
if (!item.id) throw new Error("The item must contain the id field."); if (!item.id) throw new Error("The item must contain the id field.");
// if item is newly synced, remote will be true. // if item is newly synced, remote will be true.
item.dateEdited = item.remote ? item.dateEdited : Date.now(); item.dateEdited = item.remote ? item.dateEdited : Date.now();
// the item has become local now, so remove the flag. // the item has become local now, so remove the flag.
delete item.remote; delete item.remote;
await this.indexer.write(item.id, item); await this.indexer.write(item.id, item);
if (index && (this.type === "notes" || this.type === "notebooks")) {
this.search.addDoc(item);
this.indexer.write(`${this.type}-index`, this.search.export());
}
} }
async removeItem(id) { removeItem(id) {
await this.updateItem( return this.updateItem({
{ id,
id, deleted: true,
deleted: true, dateCreated: Date.now(),
dateCreated: Date.now(), dateEdited: Date.now(),
dateEdited: Date.now(), });
},
false
);
if (this.type === "notes" || this.type === "notebooks")
this.search.remove(id);
} }
exists(id) { exists(id) {

View File

@@ -1,42 +0,0 @@
const { getContentFromData } = require("../contenttypes");
const schemas = {
content: {
id: { asId: true, store: false },
noteId: { store: true, index: false },
data: {
resolve: (doc) => {
if (doc.data.iv) return "";
const content = getContentFromData(doc.type, doc.data);
return content._text;
},
store: false,
index: true,
},
},
notes: {
id: { asId: true, store: false },
title: true,
// pinned: {
// resolve: (doc) => (doc.pinned ? "pinned:true" : "pinned:false"),
// index: true,
// tokenizer: "strict",
// splitter: "-",
// },
},
notebooks: {
id: { asId: true, store: false },
title: true,
description: true,
topics: {
resolve: (doc) => {
if (!doc.topics) return "";
return doc.topics.map((v) => v.title || v).join(" ");
},
},
},
};
export function getSchema(type) {
return schemas[type];
}

View File

@@ -2704,8 +2704,8 @@ http-signature@~1.2.0:
sshpk "^1.7.0" sshpk "^1.7.0"
"hypersearch@https://github.com/streetwriters/hypersearch": "hypersearch@https://github.com/streetwriters/hypersearch":
version "0.0.4" version "0.0.1"
resolved "https://github.com/streetwriters/hypersearch#9cad0f7c2d93f19bf06d7fa6602357b89cdba38d" resolved "https://github.com/streetwriters/hypersearch#bbeac8ad63c3f70e94dfd05b0f00f44ecc989a19"
iconv-lite@0.4.24: iconv-lite@0.4.24:
version "0.4.24" version "0.4.24"