[WEB-5128] refactor: remove local database dependencies and unused code (#8109)

This commit is contained in:
Prateek Shourya
2025-11-13 18:32:15 +05:30
committed by GitHub
parent 64f90b828b
commit 80670b2b3f
50 changed files with 36 additions and 6242 deletions

View File

@@ -4,7 +4,6 @@
build/*
out/*
public/*
core/local-db/worker/wa-sqlite/src/*
dist/*
node_modules/*
.turbo/*

View File

@@ -62,7 +62,7 @@ export const IssueView: FC<IIssueView> = observer((props) => {
const {
setPeekIssue,
isAnyModalOpen,
issue: { getIssueById, getIsLocalDBIssueDescription },
issue: { getIssueById },
} = useIssueDetail();
const { isAnyModalOpen: isAnyEpicModalOpen } = useIssueDetail(EIssueServiceType.EPICS);
const issue = getIssueById(issueId);
@@ -72,8 +72,6 @@ export const IssueView: FC<IIssueView> = observer((props) => {
if (embedIssue && embedRemoveCurrentNotification) embedRemoveCurrentNotification();
};
const isLocalDBIssueDescription = getIsLocalDBIssueDescription(issueId);
const toggleDeleteIssueModal = (value: boolean) => setIsDeleteIssueModalOpen(value);
const toggleArchiveIssueModal = (value: boolean) => setIsArchiveIssueModalOpen(value);
const toggleDuplicateIssueModal = (value: boolean) => setIsDuplicateIssueModalOpen(value);
@@ -177,7 +175,7 @@ export const IssueView: FC<IIssueView> = observer((props) => {
projectId={projectId}
issueId={issueId}
issueOperations={issueOperations}
disabled={disabled || isLocalDBIssueDescription}
disabled={disabled}
isArchived={is_archived}
isSubmitting={isSubmitting}
setIsSubmitting={(value) => setIsSubmitting(value)}
@@ -218,7 +216,7 @@ export const IssueView: FC<IIssueView> = observer((props) => {
projectId={projectId}
issueId={issueId}
issueOperations={issueOperations}
disabled={disabled || isLocalDBIssueDescription}
disabled={disabled}
isArchived={is_archived}
isSubmitting={isSubmitting}
setIsSubmitting={(value) => setIsSubmitting(value)}

View File

@@ -77,8 +77,6 @@ export const WORKSPACE_STATES = (workspaceSlug: string) => `WORKSPACE_STATES_${w
export const WORKSPACE_SIDEBAR_PREFERENCES = (workspaceSlug: string) =>
`WORKSPACE_SIDEBAR_PREFERENCES_${workspaceSlug.toUpperCase()}`;
export const WORKSPACE_DB = (workspaceSlug: string) => `WORKSPACE_DB_${workspaceSlug.toUpperCase()}`;
export const PROJECT_GITHUB_REPOSITORY = (projectId: string) => `PROJECT_GITHUB_REPOSITORY_${projectId.toUpperCase()}`;
// cycles

View File

@@ -36,9 +36,7 @@ import { useProjectState } from "@/hooks/store/use-project-state";
import { useProjectView } from "@/hooks/store/use-project-view";
import { useUserPermissions } from "@/hooks/store/user";
import { useTimeLineChart } from "@/hooks/use-timeline-chart";
// local
import { persistence } from "@/local-db/storage.sqlite";
// plane web constants
interface IProjectAuthWrapper {
workspaceSlug: string;
projectId?: string;
@@ -86,21 +84,6 @@ export const ProjectAuthWrapper: FC<IProjectAuthWrapper> = observer((props) => {
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
useSWR(
workspaceSlug && projectId ? `PROJECT_SYNC_ISSUES_${workspaceSlug.toString()}_${projectId.toString()}` : null,
workspaceSlug && projectId
? () => {
persistence.syncIssues(projectId.toString());
}
: null,
{
revalidateIfStale: true,
revalidateOnFocus: true,
revalidateOnReconnect: true,
refreshInterval: 5 * 60 * 1000,
}
);
// fetching project details
useSWR(
workspaceSlug && projectId ? PROJECT_DETAILS(workspaceSlug.toString(), projectId.toString()) : null,

View File

@@ -6,7 +6,6 @@ import Image from "next/image";
import Link from "next/link";
import { useParams } from "next/navigation";
import useSWR from "swr";
import useSWRImmutable from "swr/immutable";
// ui
import { LogOut } from "lucide-react";
import { EUserPermissions, EUserPermissionsLevel } from "@plane/constants";
@@ -29,7 +28,6 @@ import {
WORKSPACE_FAVORITE,
WORKSPACE_STATES,
WORKSPACE_SIDEBAR_PREFERENCES,
WORKSPACE_DB,
} from "@/constants/fetch-keys";
import { useFavorite } from "@/hooks/store/use-favorite";
import { useMember } from "@/hooks/store/use-member";
@@ -38,8 +36,6 @@ import { useProjectState } from "@/hooks/store/use-project-state";
import { useWorkspace } from "@/hooks/store/use-workspace";
import { useUser, useUserPermissions } from "@/hooks/store/user";
import { usePlatformOS } from "@/hooks/use-platform-os";
// local
import { persistence } from "@/local-db/storage.sqlite";
interface IWorkspaceAuthWrapper {
children: ReactNode;
@@ -120,20 +116,6 @@ export const WorkspaceAuthWrapper: FC<IWorkspaceAuthWrapper> = observer((props)
{ revalidateIfStale: false, revalidateOnFocus: false }
);
// initialize the local database
const { isLoading: isDBInitializing } = useSWRImmutable(
workspaceSlug ? WORKSPACE_DB(workspaceSlug.toString()) : null,
workspaceSlug
? async () => {
// persistence.reset();
await persistence.initialize(workspaceSlug.toString());
// Load common data
persistence.syncWorkspace();
return true;
}
: null
);
const handleSignOut = async () => {
await signOut().catch(() =>
setToast({
@@ -145,7 +127,7 @@ export const WorkspaceAuthWrapper: FC<IWorkspaceAuthWrapper> = observer((props)
};
// if list of workspaces are not there then we have to render the spinner
if (isParentLoading || allWorkspaces === undefined || loader || isDBInitializing) {
if (isParentLoading || allWorkspaces === undefined || loader) {
return (
<div className="grid h-full place-items-center bg-custom-background-100 p-4 rounded-lg border border-custom-border-200">
<div className="flex flex-col items-center gap-3 text-center">

View File

@@ -1,492 +0,0 @@
import * as Comlink from "comlink";
import { set } from "lodash-es";
// plane
import { EIssueGroupBYServerToProperty } from "@plane/constants";
import type { TIssue, TIssueParams } from "@plane/types";
// lib
import { rootStore } from "@/lib/store-context";
// services
import { IssueService } from "@/services/issue/issue.service";
//
import { ARRAY_FIELDS, BOOLEAN_FIELDS } from "./utils/constants";
import { getSubIssuesWithDistribution } from "./utils/data.utils";
import createIndexes from "./utils/indexes";
import { addIssuesBulk, syncDeletesToLocal } from "./utils/load-issues";
import { loadWorkSpaceData } from "./utils/load-workspace";
import { issueFilterCountQueryConstructor, issueFilterQueryConstructor } from "./utils/query-constructor";
import { runQuery } from "./utils/query-executor";
import { sanitizeWorkItemQueries } from "./utils/query-sanitizer.ts";
import { createTables } from "./utils/tables";
import { clearOPFS, getGroupedIssueResults, getSubGroupedIssueResults, log, logError } from "./utils/utils";
const DB_VERSION = 1.3;
const PAGE_SIZE = 500;
const BATCH_SIZE = 50;
type TProjectStatus = {
issues: { status: undefined | "loading" | "ready" | "error" | "syncing"; sync: Promise<void> | undefined };
};
type TDBStatus = "initializing" | "ready" | "error" | undefined;
export class Storage {
db: any;
status: TDBStatus = undefined;
dbName = "plane";
projectStatus: Record<string, TProjectStatus> = {};
workspaceSlug: string = "";
constructor() {
this.db = null;
if (typeof window !== "undefined") {
window.addEventListener("beforeunload", this.closeDBConnection);
}
}
closeDBConnection = async () => {
if (this.db) {
await this.db.close();
}
};
reset = () => {
if (this.db) {
this.db.close();
}
this.db = null;
this.status = undefined;
this.projectStatus = {};
this.workspaceSlug = "";
};
clearStorage = async (force = false) => {
try {
await this.db?.close();
await clearOPFS(force);
this.reset();
} catch (e) {
console.error("Error clearing sqlite sync storage", e);
}
};
private initializeWorker = async (workspaceSlug: string) => {
const { DBClass } = await import("./worker/db");
const worker = new Worker(new URL("./worker/db.ts", import.meta.url));
const MyWorker = Comlink.wrap<typeof DBClass>(worker);
// Add cleanup on window unload
window.addEventListener("unload", () => worker.terminate());
this.workspaceSlug = workspaceSlug;
this.dbName = workspaceSlug;
const instance = await new MyWorker();
await instance.init(workspaceSlug);
this.db = {
exec: instance.exec,
close: instance.close,
};
};
initialize = async (workspaceSlug: string): Promise<boolean> => {
if (!rootStore.user.localDBEnabled) return false; // return if the window gets hidden
if (workspaceSlug !== this.workspaceSlug) {
this.reset();
}
try {
await this._initialize(workspaceSlug);
return true;
} catch (err) {
logError(err);
this.status = "error";
return false;
}
};
_initialize = async (workspaceSlug: string): Promise<boolean> => {
if (this.status === "initializing") {
console.warn(`Initialization already in progress for workspace ${workspaceSlug}`);
return false;
}
if (this.status === "ready") {
console.warn(`Already initialized for workspace ${workspaceSlug}`);
return true;
}
if (this.status === "error") {
console.warn(`Initialization failed for workspace ${workspaceSlug}`);
return false;
}
try {
this.workspaceSlug = workspaceSlug;
this.dbName = workspaceSlug;
await this.initializeWorker(workspaceSlug);
const dbVersion = await this.getOption("DB_VERSION");
log("Stored db version", dbVersion);
log("Current db version", DB_VERSION);
// Check if the database version matches the current version
// If there's a mismatch, clear storage to avoid compatibility issues
if (
dbVersion !== undefined &&
dbVersion !== "" &&
!isNaN(Number(dbVersion)) &&
Number(dbVersion) !== DB_VERSION
) {
log("Database version mismatch detected - clearing storage to ensure compatibility");
await this.clearStorage();
await this.initializeWorker(workspaceSlug);
} else {
log("Database version matches current version - proceeding with data load");
}
this.status = "ready";
// Your SQLite code here.
await createTables();
await this.setOption("DB_VERSION", DB_VERSION.toString());
return true;
} catch (error) {
this.status = "error";
this.db = null;
throw new Error(`Failed to initialize database worker: ${error}`);
}
};
syncWorkspace = async () => {
if (!rootStore.user.localDBEnabled) return;
const syncInProgress = await this.getIsWriteInProgress("sync_workspace");
if (syncInProgress) {
log("Sync in progress, skipping");
return;
}
try {
this.setOption("sync_workspace", new Date().toUTCString());
await loadWorkSpaceData(this.workspaceSlug);
this.deleteOption("sync_workspace");
} catch (e) {
logError(e);
this.deleteOption("sync_workspace");
}
};
syncProject = async (projectId: string) => {
if (
// document.hidden ||
!rootStore.user.localDBEnabled
)
return false; // return if the window gets hidden
// Load labels, members, states, modules, cycles
await this.syncIssues(projectId);
// // Sync rest of the projects
// const projects = await getProjectIds();
// // Exclude the one we just synced
// const projectsToSync = projects.filter((p: string) => p !== projectId);
// for (const project of projectsToSync) {
// await delay(8000);
// await this.syncIssues(project);
// }
// this.setOption("workspace_synced_at", new Date().toISOString());
};
syncIssues = async (projectId: string) => {
if (!rootStore.user.localDBEnabled || !this.db) {
return false;
}
try {
const sync = this._syncIssues(projectId);
this.setSync(projectId, sync);
await sync;
} catch (e) {
logError(e);
this.setStatus(projectId, "error");
}
};
_syncIssues = async (projectId: string) => {
log("### Sync started");
let status = this.getStatus(projectId);
if (status === "loading" || status === "syncing") {
log(`Project ${projectId} is already loading or syncing`);
return;
}
const syncPromise = this.getSync(projectId);
if (syncPromise) {
// Redundant check?
return;
}
const queryParams: { cursor: string; updated_at__gt?: string; description: boolean } = {
cursor: `${PAGE_SIZE}:0:0`,
description: true,
};
const syncedAt = await this.getLastSyncTime(projectId);
const projectSync = await this.getOption(projectId);
if (syncedAt) {
queryParams["updated_at__gt"] = syncedAt;
}
this.setStatus(projectId, projectSync === "ready" ? "syncing" : "loading");
status = this.getStatus(projectId);
log(`### ${projectSync === "ready" ? "Syncing" : "Loading"} issues to local db for project ${projectId}`);
const start = performance.now();
const issueService = new IssueService();
const response = await issueService.getIssuesForSync(this.workspaceSlug, projectId, queryParams);
await addIssuesBulk(response.results, BATCH_SIZE);
if (response.total_pages > 1) {
const promiseArray = [];
for (let i = 1; i < response.total_pages; i++) {
queryParams.cursor = `${PAGE_SIZE}:${i}:0`;
promiseArray.push(issueService.getIssuesForSync(this.workspaceSlug, projectId, queryParams));
}
const pages = await Promise.all(promiseArray);
for (const page of pages) {
await addIssuesBulk(page.results, BATCH_SIZE);
}
}
if (syncedAt) {
await syncDeletesToLocal(this.workspaceSlug, projectId, { updated_at__gt: syncedAt });
}
log("### Time taken to add work items", performance.now() - start);
if (status === "loading") {
await createIndexes();
}
this.setOption(projectId, "ready");
this.setStatus(projectId, "ready");
this.setSync(projectId, undefined);
};
getIssueCount = async (projectId: string) => {
const count = await runQuery(`select count(*) as count from issues where project_id='${projectId}'`);
return count[0]["count"];
};
getLastUpdatedIssue = async (projectId: string) => {
const lastUpdatedIssue = await runQuery(
`select id, name, updated_at , sequence_id from issues WHERE project_id='${projectId}' AND is_local_update IS NULL order by datetime(updated_at) desc limit 1 `
);
if (lastUpdatedIssue.length) {
return lastUpdatedIssue[0];
}
return;
};
getLastSyncTime = async (projectId: string) => {
const issue = await this.getLastUpdatedIssue(projectId);
if (!issue) {
return false;
}
return issue.updated_at;
};
getIssues = async (
workspaceSlug: string,
projectId: string,
queries: Partial<Record<TIssueParams, string | boolean>> | undefined,
config: any
) => {
log("#### Queries", queries);
const currentProjectStatus = this.getStatus(projectId);
if (
!currentProjectStatus ||
this.status !== "ready" ||
currentProjectStatus === "loading" ||
currentProjectStatus === "error" ||
!rootStore.user.localDBEnabled
) {
if (rootStore.user.localDBEnabled) {
log(`Project ${projectId} is loading, falling back to server`);
}
const issueService = new IssueService();
// Ignore projectStatus if projectId is not provided
if (projectId) {
return await issueService.getIssuesFromServer(workspaceSlug, projectId, queries, config);
}
if (this.status !== "ready" && !rootStore.user.localDBEnabled) {
return;
}
}
const sanitizedQueries = sanitizeWorkItemQueries(workspaceSlug, projectId, queries);
const { cursor, group_by, sub_group_by } = sanitizedQueries || {};
const query = issueFilterQueryConstructor(this.workspaceSlug, projectId, sanitizedQueries);
log("#### Query", query);
const countQuery = issueFilterCountQueryConstructor(this.workspaceSlug, projectId, sanitizedQueries);
const start = performance.now();
let issuesRaw: any[] = [];
let count: any[];
try {
[issuesRaw, count] = await Promise.all([runQuery(query), runQuery(countQuery)]);
} catch (e) {
log("Unable to get work items from local db, falling back to server");
logError(e);
const issueService = new IssueService();
return await issueService.getIssuesFromServer(workspaceSlug, projectId, queries, config);
}
const end = performance.now();
const { total_count } = count[0];
const [pageSize, page, offset] = cursor && typeof cursor === "string" ? cursor.split(":") : [];
const groupByProperty: string =
EIssueGroupBYServerToProperty[group_by as keyof typeof EIssueGroupBYServerToProperty];
const subGroupByProperty =
EIssueGroupBYServerToProperty[sub_group_by as keyof typeof EIssueGroupBYServerToProperty];
const parsingStart = performance.now();
let issueResults = issuesRaw.map((issue: any) => formatLocalIssue(issue));
log("#### Work item Results", issueResults.length);
const parsingEnd = performance.now();
const grouping = performance.now();
if (groupByProperty && page === "0") {
if (subGroupByProperty) {
issueResults = getSubGroupedIssueResults(issueResults);
} else {
issueResults = getGroupedIssueResults(issueResults);
}
}
const groupCount = group_by ? Object.keys(issueResults).length : undefined;
// const subGroupCount = sub_group_by ? Object.keys(issueResults[Object.keys(issueResults)[0]]).length : undefined;
const groupingEnd = performance.now();
const times = {
IssueQuery: end - start,
Parsing: parsingEnd - parsingStart,
Grouping: groupingEnd - grouping,
};
if ((window as any).DEBUG) {
console.table(times);
}
const total_pages = Math.ceil(total_count / Number(pageSize));
const next_page_results = total_pages > parseInt(page) + 1;
const out = {
results: issueResults,
next_cursor: `${pageSize}:${parseInt(page) + 1}:${Number(offset) + Number(pageSize)}`,
prev_cursor: `${pageSize}:${parseInt(page) - 1}:${Number(offset) - Number(pageSize)}`,
total_results: total_count,
total_count,
next_page_results,
total_pages,
};
return out;
};
getIssue = async (issueId: string) => {
try {
if (!rootStore.user.localDBEnabled || this.status !== "ready") return;
const issues = await runQuery(`select * from issues where id='${issueId}'`);
if (Array.isArray(issues) && issues.length) {
return formatLocalIssue(issues[0]);
}
} catch (err) {
logError(err);
console.warn("unable to fetch issue from local db");
}
return;
};
getSubIssues = async (workspaceSlug: string, projectId: string, issueId: string) => {
const workspace_synced_at = await this.getOption("workspace_synced_at");
if (!workspace_synced_at) {
const issueService = new IssueService();
return await issueService.subIssues(workspaceSlug, projectId, issueId);
}
return await getSubIssuesWithDistribution(issueId);
};
getStatus = (projectId: string) => this.projectStatus[projectId]?.issues?.status || undefined;
setStatus = (projectId: string, status: "loading" | "ready" | "error" | "syncing" | undefined = undefined) => {
set(this.projectStatus, `${projectId}.issues.status`, status);
};
getSync = (projectId: string) => this.projectStatus[projectId]?.issues?.sync;
setSync = (projectId: string, sync: Promise<void> | undefined) => {
set(this.projectStatus, `${projectId}.issues.sync`, sync);
};
getOption = async (key: string, fallback?: string | boolean | number) => {
try {
const options = await runQuery(`select * from options where key='${key}'`);
if (options.length) {
return options[0].value;
}
return fallback;
} catch (e) {
return fallback;
}
};
setOption = async (key: string, value: string) => {
await runQuery(`insert or replace into options (key, value) values ('${key}', '${value}')`);
};
deleteOption = async (key: string) => {
await runQuery(` DELETE FROM options where key='${key}'`);
};
getOptions = async (keys: string[]) => {
const options = await runQuery(`select * from options where key in ('${keys.join("','")}')`);
return options.reduce((acc: any, option: any) => {
acc[option.key] = option.value;
return acc;
}, {});
};
getIsWriteInProgress = async (op: string) => {
const writeStartTime = await this.getOption(op, false);
if (writeStartTime) {
// Check if it has been more than 5seconds
const current = new Date();
const start = new Date(writeStartTime);
if (current.getTime() - start.getTime() < 5000) {
return true;
}
return false;
}
return false;
};
}
export const persistence = new Storage();
/**
* format the issue fetched from local db into an issue
* @param issue
* @returns
*/
export const formatLocalIssue = (issue: any) => {
const currIssue = issue;
ARRAY_FIELDS.forEach((field: string) => {
currIssue[field] = currIssue[field] ? JSON.parse(currIssue[field]) : [];
});
// Convert boolean fields to actual boolean values
BOOLEAN_FIELDS.forEach((field: string) => {
currIssue[field] = currIssue[field] === 1;
});
return currIssue as TIssue & { group_id?: string; total_issues: number; sub_group_id?: string };
};

View File

@@ -1,23 +0,0 @@
export const ARRAY_FIELDS = ["label_ids", "assignee_ids", "module_ids"];
export const BOOLEAN_FIELDS = ["is_draft"];
export const GROUP_BY_MAP = {
state_id: "state_id",
priority: "priority",
cycle_id: "cycle_id",
created_by: "created_by",
// Array Props
issue_module__module_id: "module_ids",
labels__id: "label_ids",
assignees__id: "assignee_ids",
target_date: "target_date",
};
export const PRIORITY_MAP = {
low: 1,
medium: 2,
high: 3,
urgent: 4,
none: 0,
};

View File

@@ -1,30 +0,0 @@
import { runQuery } from "./query-executor";
export const getProjectIds = async () => {
const q = `select project_id from states where project_id is not null group by project_id`;
return await runQuery(q);
};
export const getSubIssues = async (issueId: string) => {
const q = `select * from issues where parent_id = '${issueId}'`;
return await runQuery(q);
};
export const getSubIssueDistribution = async (issueId: string) => {
const q = `select s.'group', group_concat(i.id) as issues from issues i left join states s on s.id = i.state_id where i.parent_id = '${issueId}' group by s.'group'`;
const result = await runQuery(q);
if (!result.length) {
return {};
}
return result.reduce((acc: Record<string, string[]>, item: { group: string; issues: string }) => {
acc[item.group] = item.issues.split(",");
return acc;
}, {});
};
export const getSubIssuesWithDistribution = async (issueId: string) => {
const promises = [getSubIssues(issueId), getSubIssueDistribution(issueId)];
const [sub_issues, state_distribution] = await Promise.all(promises);
return { sub_issues, state_distribution };
};

View File

@@ -1,67 +0,0 @@
import { persistence } from "../storage.sqlite";
import { log } from "./utils";
export const createIssueIndexes = async () => {
const columns = [
"state_id",
"sort_order",
// "priority",
"priority_proxy",
"project_id",
"created_by",
"cycle_id",
"sequence_id",
];
const promises: Promise<any>[] = [];
promises.push(persistence.db.exec({ sql: `CREATE UNIQUE INDEX issues_issue_id_idx ON issues (id)` }));
columns.forEach((column) => {
promises.push(
persistence.db.exec({ sql: `CREATE INDEX issues_issue_${column}_idx ON issues (project_id, ${column})` })
);
});
await Promise.all(promises);
};
export const createIssueMetaIndexes = async () => {
// Drop indexes
await persistence.db.exec({ sql: `CREATE INDEX issue_meta_all_idx ON issue_meta (issue_id,key,value)` });
};
export const createWorkSpaceIndexes = async () => {
const promises: Promise<any>[] = [];
// Labels
promises.push(persistence.db.exec({ sql: `CREATE INDEX labels_name_idx ON labels (id,name,project_id)` }));
// Modules
promises.push(persistence.db.exec({ sql: `CREATE INDEX modules_name_idx ON modules (id,name,project_id)` }));
// States
promises.push(persistence.db.exec({ sql: `CREATE INDEX states_name_idx ON states (id,name,project_id)` }));
// Cycles
promises.push(persistence.db.exec({ sql: `CREATE INDEX cycles_name_idx ON cycles (id,name,project_id)` }));
// Members
promises.push(persistence.db.exec({ sql: `CREATE INDEX members_name_idx ON members (id,first_name)` }));
// Estimate Points @todo
promises.push(persistence.db.exec({ sql: `CREATE INDEX estimate_points_name_idx ON estimate_points (id,value)` }));
// Options
promises.push(persistence.db.exec({ sql: `CREATE INDEX options_key_idx ON options (key)` }));
await Promise.all(promises);
};
const createIndexes = async () => {
log("### Creating indexes");
const start = performance.now();
const promises = [createIssueIndexes(), createIssueMetaIndexes(), createWorkSpaceIndexes()];
try {
await Promise.all(promises);
} catch (e) {
console.log((e as Error).message);
}
log("### Indexes created in", `${performance.now() - start}ms`);
};
export default createIndexes;

View File

@@ -1,133 +0,0 @@
import type { TIssue } from "@plane/types";
import { rootStore } from "@/lib/store-context";
import { IssueService } from "@/services/issue";
import { persistence } from "../storage.sqlite";
import { ARRAY_FIELDS, PRIORITY_MAP } from "./constants";
import { issueSchema } from "./schemas";
import { log } from "./utils";
export const PROJECT_OFFLINE_STATUS: Record<string, boolean> = {};
export const addIssue = async (issue: any) => {
if (document.hidden || !rootStore.user.localDBEnabled || !persistence.db) return;
await persistence.db.exec("BEGIN;");
await stageIssueInserts(issue);
await persistence.db.exec("COMMIT;");
};
export const addIssuesBulk = async (issues: any, batchSize = 50) => {
if (!rootStore.user.localDBEnabled || !persistence.db) return;
if (!issues.length) return;
const insertStart = performance.now();
await persistence.db.exec("BEGIN;");
for (let i = 0; i < issues.length; i += batchSize) {
const batch = issues.slice(i, i + batchSize);
const promises = [];
for (let j = 0; j < batch.length; j++) {
const issue = batch[j];
if (!issue.type_id) {
issue.type_id = "";
}
promises.push(stageIssueInserts(issue));
}
await Promise.all(promises);
}
await persistence.db.exec("COMMIT;");
const insertEnd = performance.now();
log("Inserted issues in ", `${insertEnd - insertStart}ms`, batchSize, issues.length);
};
export const deleteIssueFromLocal = async (issue_id: any) => {
if (!rootStore.user.localDBEnabled || !persistence.db) return;
const deleteQuery = `DELETE from issues where id='${issue_id}'`;
const deleteMetaQuery = `delete from issue_meta where issue_id='${issue_id}'`;
await persistence.db.exec("BEGIN;");
await persistence.db.exec(deleteQuery);
await persistence.db.exec(deleteMetaQuery);
await persistence.db.exec("COMMIT;");
};
// @todo: Update deletes the issue description from local. Implement a separate update.
export const updateIssue = async (issue: TIssue & { is_local_update: number }) => {
if (document.hidden || !rootStore.user.localDBEnabled || !persistence.db) return;
const issue_id = issue.id;
// delete the issue and its meta data
await deleteIssueFromLocal(issue_id);
await addIssue(issue);
};
export const syncDeletesToLocal = async (workspaceId: string, projectId: string, queries: any) => {
if (!rootStore.user.localDBEnabled || !persistence.db) return;
const issueService = new IssueService();
const response = await issueService.getDeletedIssues(workspaceId, projectId, queries);
if (Array.isArray(response)) {
response.map(async (issue) => deleteIssueFromLocal(issue));
}
};
const stageIssueInserts = async (issue: any) => {
const issue_id = issue.id;
issue.priority_proxy = PRIORITY_MAP[issue.priority as keyof typeof PRIORITY_MAP];
const keys = Object.keys(issueSchema);
const sanitizedIssue = keys.reduce((acc: any, key) => {
if (issue[key] || issue[key] === 0) {
acc[key] = issue[key];
}
return acc;
}, {});
const columns = "'" + Object.keys(sanitizedIssue).join("','") + "'";
const values = Object.values(sanitizedIssue)
.map((value) => {
if (value === null) {
return "";
}
if (typeof value === "object") {
return `'${JSON.stringify(value).replace(/'/g, "''")}'`;
}
if (typeof value === "string") {
return `'${value.replace(/'/g, "''")}'`;
}
return value;
})
.join(", ");
const query = `INSERT OR REPLACE INTO issues (${columns}) VALUES (${values});`;
await persistence.db.exec(query);
await persistence.db.exec({
sql: `DELETE from issue_meta where issue_id='${issue_id}'`,
});
const metaPromises: Promise<any>[] = [];
ARRAY_FIELDS.forEach((field) => {
const values = issue[field];
if (values && values.length) {
values.forEach((val: any) => {
const p = persistence.db.exec({
sql: `INSERT OR REPLACE into issue_meta(issue_id,key,value) values (?,?,?) `,
bind: [issue_id, field, val],
});
metaPromises.push(p);
});
} else {
// Added for empty fields?
const p = persistence.db.exec({
sql: `INSERT OR REPLACE into issue_meta(issue_id,key,value) values (?,?,?) `,
bind: [issue_id, field, ""],
});
metaPromises.push(p);
}
});
await Promise.all(metaPromises);
};

View File

@@ -1,303 +0,0 @@
import { difference } from "lodash-es";
import type { IEstimate, IEstimatePoint, IWorkspaceMember, TIssue } from "@plane/types";
import { EstimateService } from "@/plane-web/services/project/estimate.service";
import { CycleService } from "@/services/cycle.service";
import { IssueLabelService } from "@/services/issue/issue_label.service";
import { ModuleService } from "@/services/module.service";
import { ProjectStateService } from "@/services/project";
import { WorkspaceService } from "@/services/workspace.service";
import { persistence } from "../storage.sqlite";
import { updateIssue } from "./load-issues";
import type { Schema } from "./schemas";
import { cycleSchema, estimatePointSchema, labelSchema, memberSchema, moduleSchema, stateSchema } from "./schemas";
import { log } from "./utils";
const stageInserts = async (table: string, schema: Schema, data: any) => {
const keys = Object.keys(schema);
// Pick only the keys that are in the schema
const filteredData = keys.reduce((acc: any, key) => {
if (data[key] || data[key] === 0) {
acc[key] = data[key];
}
return acc;
}, {});
const columns = "'" + Object.keys(filteredData).join("','") + "'";
// Add quotes to column names
const values = Object.values(filteredData)
.map((value) => {
if (value === null) {
return "";
}
if (typeof value === "object") {
return `'${JSON.stringify(value).replace(/'/g, "''")}'`;
}
if (typeof value === "string") {
return `'${value.replace(/'/g, "''")}'`;
}
return value;
})
.join(", ");
const query = `INSERT OR REPLACE INTO ${table} (${columns}) VALUES (${values});`;
await persistence.db.exec(query);
};
const batchInserts = async (data: any[], table: string, schema: Schema, batchSize = 500) => {
for (let i = 0; i < data.length; i += batchSize) {
const batch = data.slice(i, i + batchSize);
for (let j = 0; j < batch.length; j++) {
const item = batch[j];
await stageInserts(table, schema, item);
}
}
};
export const getLabels = async (workspaceSlug: string) => {
const issueLabelService = new IssueLabelService();
const objects = await issueLabelService.getWorkspaceIssueLabels(workspaceSlug);
return objects;
};
export const getModules = async (workspaceSlug: string) => {
const moduleService = new ModuleService();
const objects = await moduleService.getWorkspaceModules(workspaceSlug);
return objects;
};
export const getCycles = async (workspaceSlug: string) => {
const cycleService = new CycleService();
const objects = await cycleService.getWorkspaceCycles(workspaceSlug);
return objects;
};
export const getStates = async (workspaceSlug: string) => {
const stateService = new ProjectStateService();
const objects = await stateService.getWorkspaceStates(workspaceSlug);
return objects;
};
export const getEstimatePoints = async (workspaceSlug: string) => {
const estimateService = new EstimateService();
const estimates = await estimateService.fetchWorkspaceEstimates(workspaceSlug);
let objects: IEstimatePoint[] = [];
(estimates || []).forEach((estimate: IEstimate) => {
if (estimate?.points) {
objects = objects.concat(estimate.points);
}
});
return objects;
};
export const getMembers = async (workspaceSlug: string) => {
const workspaceService = new WorkspaceService();
const members = await workspaceService.fetchWorkspaceMembers(workspaceSlug);
const objects = members.map((member: IWorkspaceMember) => member.member);
return objects;
};
const syncLabels = async (currentLabels: any) => {
const currentIdList = currentLabels.map((label: any) => label.id);
const existingLabels = await persistence.db.exec("SELECT id FROM labels;");
const existingIdList = existingLabels ? existingLabels.map((label: any) => label.id) : [];
const deletedIds = difference(existingIdList, currentIdList);
await syncIssuesWithDeletedLabels(deletedIds as string[]);
};
export const syncIssuesWithDeletedLabels = async (deletedLabelIds: string[]) => {
if (!deletedLabelIds.length) {
return;
}
// Ideally we should use recursion to fetch all the issues, but 10000 issues is more than enough for now.
const issues = await persistence.getIssues("", "", { labels: deletedLabelIds.join(","), cursor: "10000:0:0" }, {});
if (issues?.results && Array.isArray(issues.results)) {
const promises = issues.results.map(async (issue: TIssue) => {
const updatedIssue = {
...issue,
label_ids: issue.label_ids.filter((id: string) => !deletedLabelIds.includes(id)),
is_local_update: 1,
};
// We should await each update because it uses a transaction. But transaction are handled in the query executor.
updateIssue(updatedIssue);
});
await Promise.all(promises);
}
};
const syncModules = async (currentModules: any) => {
const currentIdList = currentModules.map((module: any) => module.id);
const existingModules = await persistence.db.exec("SELECT id FROM modules;");
const existingIdList = existingModules ? existingModules.map((module: any) => module.id) : [];
const deletedIds = difference(existingIdList, currentIdList);
await syncIssuesWithDeletedModules(deletedIds as string[]);
};
export const syncIssuesWithDeletedModules = async (deletedModuleIds: string[]) => {
if (!deletedModuleIds.length) {
return;
}
const issues = await persistence.getIssues("", "", { module: deletedModuleIds.join(","), cursor: "10000:0:0" }, {});
if (issues?.results && Array.isArray(issues.results)) {
const promises = issues.results.map(async (issue: TIssue) => {
const updatedIssue = {
...issue,
module_ids: issue.module_ids?.filter((id: string) => !deletedModuleIds.includes(id)) || [],
is_local_update: 1,
};
updateIssue(updatedIssue);
});
await Promise.all(promises);
}
};
const syncCycles = async (currentCycles: any) => {
const currentIdList = currentCycles.map((cycle: any) => cycle.id);
const existingCycles = await persistence.db.exec("SELECT id FROM cycles;");
const existingIdList = existingCycles ? existingCycles.map((cycle: any) => cycle.id) : [];
const deletedIds = difference(existingIdList, currentIdList);
await syncIssuesWithDeletedCycles(deletedIds as string[]);
};
export const syncIssuesWithDeletedCycles = async (deletedCycleIds: string[]) => {
if (!deletedCycleIds.length) {
return;
}
const issues = await persistence.getIssues("", "", { cycle: deletedCycleIds.join(","), cursor: "10000:0:0" }, {});
if (issues?.results && Array.isArray(issues.results)) {
const promises = issues.results.map(async (issue: TIssue) => {
const updatedIssue = {
...issue,
cycle_id: null,
is_local_update: 1,
};
updateIssue(updatedIssue);
});
await Promise.all(promises);
}
};
const syncStates = async (currentStates: any) => {
const currentIdList = currentStates.map((state: any) => state.id);
const existingStates = await persistence.db.exec("SELECT id FROM states;");
const existingIdList = existingStates ? existingStates.map((state: any) => state.id) : [];
const deletedIds = difference(existingIdList, currentIdList);
await syncIssuesWithDeletedStates(deletedIds as string[]);
};
export const syncIssuesWithDeletedStates = async (deletedStateIds: string[]) => {
if (!deletedStateIds.length) {
return;
}
const issues = await persistence.getIssues("", "", { state: deletedStateIds.join(","), cursor: "10000:0:0" }, {});
if (issues?.results && Array.isArray(issues.results)) {
const promises = issues.results.map(async (issue: TIssue) => {
const updatedIssue = {
...issue,
state_id: null,
is_local_update: 1,
};
updateIssue(updatedIssue);
});
await Promise.all(promises);
}
};
const syncMembers = async (currentMembers: any) => {
const currentIdList = currentMembers.map((member: any) => member.id);
const existingMembers = await persistence.db.exec("SELECT id FROM members;");
const existingIdList = existingMembers ? existingMembers.map((member: any) => member.id) : [];
const deletedIds = difference(existingIdList, currentIdList);
await syncIssuesWithDeletedMembers(deletedIds as string[]);
};
export const syncIssuesWithDeletedMembers = async (deletedMemberIds: string[]) => {
if (!deletedMemberIds.length) {
return;
}
const issues = await persistence.getIssues(
"",
"",
{ assignees: deletedMemberIds.join(","), cursor: "10000:0:0" },
{}
);
if (issues?.results && Array.isArray(issues.results)) {
const promises = issues.results.map(async (issue: TIssue) => {
const updatedIssue = {
...issue,
assignee_ids: issue.assignee_ids.filter((id: string) => !deletedMemberIds.includes(id)),
is_local_update: 1,
};
updateIssue(updatedIssue);
});
await Promise.all(promises);
}
};
export const loadWorkSpaceData = async (workspaceSlug: string) => {
if (!persistence.db || !persistence.db.exec) {
return;
}
log("Loading workspace data");
const promises = [];
promises.push(getLabels(workspaceSlug));
promises.push(getModules(workspaceSlug));
promises.push(getCycles(workspaceSlug));
promises.push(getStates(workspaceSlug));
promises.push(getEstimatePoints(workspaceSlug));
promises.push(getMembers(workspaceSlug));
const [labels, modules, cycles, states, estimates, members] = await Promise.all(promises);
// @todo: we don't need this manual sync here, when backend adds these changes to issue activity and updates the updated_at of the issue.
await syncLabels(labels);
await syncModules(modules);
await syncCycles(cycles);
await syncStates(states);
// TODO: Not handling sync estimates yet, as we don't know the new estimate point assigned.
// Backend should update the updated_at of the issue when estimate point is updated, or we should have realtime sync on the issues table.
// await syncEstimates(estimates);
await syncMembers(members);
const start = performance.now();
await persistence.db.exec("BEGIN;");
await persistence.db.exec("DELETE FROM labels WHERE 1=1;");
await batchInserts(labels, "labels", labelSchema);
await persistence.db.exec("COMMIT;");
await persistence.db.exec("BEGIN;");
await persistence.db.exec("DELETE FROM modules WHERE 1=1;");
await batchInserts(modules, "modules", moduleSchema);
await persistence.db.exec("COMMIT;");
await persistence.db.exec("BEGIN;");
await persistence.db.exec("DELETE FROM cycles WHERE 1=1;");
await batchInserts(cycles, "cycles", cycleSchema);
await persistence.db.exec("COMMIT;");
await persistence.db.exec("BEGIN;");
await persistence.db.exec("DELETE FROM states WHERE 1=1;");
await batchInserts(states, "states", stateSchema);
await persistence.db.exec("COMMIT;");
await persistence.db.exec("BEGIN;");
await persistence.db.exec("DELETE FROM estimate_points WHERE 1=1;");
await batchInserts(estimates, "estimate_points", estimatePointSchema);
await persistence.db.exec("COMMIT;");
await persistence.db.exec("BEGIN;");
await persistence.db.exec("DELETE FROM members WHERE 1=1;");
await batchInserts(members, "members", memberSchema);
await persistence.db.exec("COMMIT;");
const end = performance.now();
log("Time taken to load workspace data", end - start);
};

View File

@@ -1,169 +0,0 @@
import {
getFilteredRowsForGrouping,
getIssueFieldsFragment,
getMetaKeys,
getOrderByFragment,
singleFilterConstructor,
translateQueryParams,
} from "./query.utils";
import { log } from "./utils";
export const SPECIAL_ORDER_BY = {
labels__name: "labels",
"-labels__name": "labels",
assignees__first_name: "members",
"-assignees__first_name": "members",
issue_module__module__name: "modules",
"-issue_module__module__name": "modules",
issue_cycle__cycle__name: "cycles",
"-issue_cycle__cycle__name": "cycles",
state__name: "states",
"-state__name": "states",
estimate_point__key: "estimate_point",
"-estimate_point__key": "estimate_point",
};
export const issueFilterQueryConstructor = (workspaceSlug: string, projectId: string, queries: any) => {
const {
cursor,
per_page,
group_by,
sub_group_by,
order_by = "-created_at",
...otherProps
} = translateQueryParams(queries);
const [pageSize, page, offset] = cursor.split(":");
let sql = "";
const fieldsFragment = getIssueFieldsFragment();
if (sub_group_by) {
const orderByString = getOrderByFragment(order_by);
sql = getFilteredRowsForGrouping(projectId, queries);
sql += `, ranked_issues AS ( SELECT fi.*,
ROW_NUMBER() OVER (PARTITION BY group_id, sub_group_id ${orderByString}) as rank,
COUNT(*) OVER (PARTITION by group_id, sub_group_id) as total_issues from fi)
SELECT ri.*, ${fieldsFragment}
FROM ranked_issues ri
JOIN issues i ON ri.id = i.id
WHERE rank <= ${per_page}
`;
return sql;
}
if (group_by) {
const orderByString = getOrderByFragment(order_by);
sql = getFilteredRowsForGrouping(projectId, queries);
sql += `, ranked_issues AS ( SELECT fi.*,
ROW_NUMBER() OVER (PARTITION BY group_id ${orderByString}) as rank,
COUNT(*) OVER (PARTITION by group_id) as total_issues FROM fi)
SELECT ri.*, ${fieldsFragment}
FROM ranked_issues ri
JOIN issues i ON ri.id = i.id
WHERE rank <= ${per_page}
`;
return sql;
}
if (order_by && Object.keys(SPECIAL_ORDER_BY).includes(order_by)) {
const name = order_by.replace("-", "");
const orderByString = getOrderByFragment(order_by, "i.");
sql = `WITH sorted_issues AS (`;
sql += getFilteredRowsForGrouping(projectId, queries);
sql += `SELECT fi.* , `;
if (order_by.includes("assignee")) {
sql += ` s.first_name as ${name} `;
} else if (order_by.includes("estimate")) {
sql += ` s.key as ${name} `;
} else {
sql += ` s.name as ${name} `;
}
sql += `FROM fi `;
if (order_by && Object.keys(SPECIAL_ORDER_BY).includes(order_by)) {
if (order_by.includes("cycle")) {
sql += `
LEFT JOIN cycles s on fi.cycle_id = s.id`;
}
if (order_by.includes("estimate_point__key")) {
sql += `
LEFT JOIN estimate_points s on fi.estimate_point = s.id`;
}
if (order_by.includes("state")) {
sql += `
LEFT JOIN states s on fi.state_id = s.id`;
}
if (order_by.includes("label")) {
sql += `
LEFT JOIN issue_meta sm ON fi.id = sm.issue_id AND sm.key = 'label_ids'
LEFT JOIN labels s ON s.id = sm.value`;
}
if (order_by.includes("module")) {
sql += `
LEFT JOIN issue_meta sm ON fi.id = sm.issue_id AND sm.key = 'module_ids'
LEFT JOIN modules s ON s.id = sm.value`;
}
if (order_by.includes("assignee")) {
sql += `
LEFT JOIN issue_meta sm ON fi.id = sm.issue_id AND sm.key = 'assignee_ids'
LEFT JOIN members s ON s.id = sm.value`;
}
sql += ` ORDER BY ${name} ASC NULLS LAST`;
}
sql += `)`;
sql += `SELECT ${fieldsFragment}, group_concat(si.${name}) as ${name} from sorted_issues si JOIN issues i ON si.id = i.id
`;
sql += ` group by i.id ${orderByString} LIMIT ${pageSize} OFFSET ${offset * 1 + page * pageSize};`;
return sql;
}
const filterJoinFields = getMetaKeys(queries);
const orderByString = getOrderByFragment(order_by);
sql = `SELECT ${fieldsFragment}`;
if (otherProps.state_group) {
sql += `, states.'group' as state_group`;
}
sql += ` from issues i
`;
if (otherProps.state_group) {
sql += `LEFT JOIN states ON i.state_id = states.id `;
}
filterJoinFields.forEach((field: string) => {
const value = otherProps[field] || "";
sql += ` INNER JOIN issue_meta ${field} ON i.id = ${field}.issue_id AND ${field}.key = '${field}' AND ${field}.value IN ('${value.split(",").join("','")}')
`;
});
sql += ` WHERE 1=1 `;
if (projectId) {
sql += ` AND i.project_id = '${projectId}' `;
}
sql += ` ${singleFilterConstructor(otherProps)} group by i.id `;
sql += orderByString;
// Add offset and paging to query
sql += ` LIMIT ${pageSize} OFFSET ${offset * 1 + page * pageSize};`;
return sql;
};
export const issueFilterCountQueryConstructor = (workspaceSlug: string, projectId: string, queries: any) => {
//@todo Very crude way to extract count from the actual query. Needs to be refactored
// Remove group by from the query to fallback to non group query
const { group_by, sub_group_by, order_by, ...otherProps } = queries;
let sql = issueFilterQueryConstructor(workspaceSlug, projectId, otherProps);
const fieldsFragment = getIssueFieldsFragment();
sql = sql.replace(`SELECT ${fieldsFragment}`, "SELECT COUNT(DISTINCT i.id) as total_count");
// Remove everything after group by i.id
sql = `${sql.split("group by i.id")[0]};`;
return sql;
};

View File

@@ -1,11 +0,0 @@
import { persistence } from "../storage.sqlite";
export const runQuery = async (sql: string) => {
const data = await persistence.db?.exec({
sql,
rowMode: "object",
returnValue: "resultRows",
});
return data;
};

View File

@@ -1,38 +0,0 @@
// plane constants
import { EUserPermissions } from "@plane/constants";
import type { TIssueParams } from "@plane/types";
// root store
import { rootStore } from "@/lib/store-context";
export const sanitizeWorkItemQueries = (
workspaceSlug: string,
projectId: string,
queries: Partial<Record<TIssueParams, string | boolean>> | undefined
): Partial<Record<TIssueParams, string | boolean>> | undefined => {
// Get current project details and user id and role for the project
const currentProject = rootStore.projectRoot.project.getProjectById(projectId);
const currentUserId = rootStore.user.data?.id;
const currentUserRole = rootStore.user.permission.getProjectRoleByWorkspaceSlugAndProjectId(workspaceSlug, projectId);
// Only apply this restriction for guests when guest_view_all_features is disabled
if (
currentUserId &&
currentUserRole === EUserPermissions.GUEST &&
currentProject?.guest_view_all_features === false
) {
// Sanitize the created_by filter if it doesn't exist or if it exists and includes the current user id
const existingCreatedByFilter = queries?.created_by;
const shouldApplyFilter =
!existingCreatedByFilter ||
(typeof existingCreatedByFilter === "string" && existingCreatedByFilter.includes(currentUserId));
if (shouldApplyFilter) {
queries = {
...queries,
created_by: currentUserId,
};
}
}
return queries;
};

View File

@@ -1,357 +0,0 @@
import { ARRAY_FIELDS, GROUP_BY_MAP, PRIORITY_MAP } from "./constants";
import { SPECIAL_ORDER_BY } from "./query-constructor";
import { issueSchema } from "./schemas";
import { wrapDateTime } from "./utils";
export const translateQueryParams = (queries: any) => {
const {
group_by,
layout,
sub_group_by,
labels,
assignees,
state,
cycle,
module,
priority,
type,
issue_type,
...otherProps
} = queries;
const order_by = queries.order_by;
if (state) otherProps.state_id = state;
if (cycle) otherProps.cycle_id = cycle;
if (module) otherProps.module_ids = module;
if (labels) otherProps.label_ids = labels;
if (assignees) otherProps.assignee_ids = assignees;
if (group_by) otherProps.group_by = GROUP_BY_MAP[group_by as keyof typeof GROUP_BY_MAP];
if (sub_group_by) otherProps.sub_group_by = GROUP_BY_MAP[sub_group_by as keyof typeof GROUP_BY_MAP];
if (priority) {
otherProps.priority_proxy = priority
.split(",")
.map((priority: string) => PRIORITY_MAP[priority as keyof typeof PRIORITY_MAP])
.join(",");
}
if (type) {
otherProps.state_group = type === "backlog" ? "backlog" : "unstarted,started";
}
if (issue_type) {
otherProps.type_id = issue_type;
}
if (order_by?.includes("priority")) {
otherProps.order_by = order_by.replace("priority", "priority_proxy");
}
// Fix invalid orderby when switching from spreadsheet layout
if (layout !== "spreadsheet" && Object.keys(SPECIAL_ORDER_BY).includes(order_by)) {
otherProps.order_by = "sort_order";
}
// For each property value, replace None with empty string
Object.keys(otherProps).forEach((key) => {
if (otherProps[key] === "None") {
otherProps[key] = "";
}
});
return otherProps;
};
export const getOrderByFragment = (order_by: string, table = "") => {
let orderByString = "";
if (!order_by) return orderByString;
if (order_by.startsWith("-")) {
orderByString += ` ORDER BY ${wrapDateTime(order_by.slice(1))} DESC NULLS LAST, ${table}sequence_id DESC`;
} else {
orderByString += ` ORDER BY ${wrapDateTime(order_by)} ASC NULLS LAST, ${table}sequence_id DESC`;
}
return orderByString;
};
export const isMetaJoinRequired = (groupBy: string, subGroupBy: string) =>
ARRAY_FIELDS.includes(groupBy) || ARRAY_FIELDS.includes(subGroupBy);
export const getMetaKeysFragment = (queries: any) => {
const { group_by, sub_group_by, ...otherProps } = translateQueryParams(queries);
const fields: Set<string> = new Set();
if (ARRAY_FIELDS.includes(group_by)) {
fields.add(group_by);
}
if (ARRAY_FIELDS.includes(sub_group_by)) {
fields.add(sub_group_by);
}
const keys = Object.keys(otherProps);
keys.forEach((field: string) => {
if (ARRAY_FIELDS.includes(field)) {
fields.add(field);
}
});
const sql = ` ('${Array.from(fields).join("','")}')`;
return sql;
};
export const getMetaKeys = (queries: any): string[] => {
const { group_by, sub_group_by, ...otherProps } = translateQueryParams(queries);
const fields: Set<string> = new Set();
if (ARRAY_FIELDS.includes(group_by)) {
fields.add(group_by);
}
if (ARRAY_FIELDS.includes(sub_group_by)) {
fields.add(sub_group_by);
}
const keys = Object.keys(otherProps);
keys.forEach((field: string) => {
if (ARRAY_FIELDS.includes(field)) {
fields.add(field);
}
});
return Array.from(fields);
};
const areJoinsRequired = (queries: any) => {
const { group_by, sub_group_by, ...otherProps } = translateQueryParams(queries);
if (ARRAY_FIELDS.includes(group_by) || ARRAY_FIELDS.includes(sub_group_by)) {
return true;
}
if (Object.keys(otherProps).some((field) => ARRAY_FIELDS.includes(field))) {
return true;
}
return false;
};
// Apply filters to the query
export const getFilteredRowsForGrouping = (projectId: string, queries: any) => {
const { group_by, sub_group_by, ...otherProps } = translateQueryParams(queries);
const filterJoinFields = getMetaKeys(otherProps);
const temp = getSingleFilterFields(queries);
const issueTableFilterFields = temp.length ? "," + temp.join(",") : "";
const joinsRequired = areJoinsRequired(queries);
let sql = "";
if (!joinsRequired) {
sql = `WITH fi as (SELECT i.id,i.created_at, i.sequence_id ${issueTableFilterFields}`;
if (group_by) {
if (group_by === "target_date") {
sql += `, date(i.${group_by}) as group_id`;
} else {
sql += `, i.${group_by} as group_id`;
}
}
if (sub_group_by) {
sql += `, i.${sub_group_by} as sub_group_id`;
}
sql += ` FROM issues i `;
if (otherProps.state_group) {
sql += `LEFT JOIN states ON i.state_id = states.id `;
}
sql += `WHERE 1=1 `;
if (projectId) {
sql += ` AND i.project_id = '${projectId}'
`;
}
sql += `${singleFilterConstructor(otherProps)})
`;
return sql;
}
sql = `WITH fi AS (`;
sql += `SELECT i.id,i.created_at,i.sequence_id ${issueTableFilterFields} `;
if (group_by) {
if (ARRAY_FIELDS.includes(group_by)) {
sql += `, ${group_by}.value as group_id
`;
} else if (group_by === "target_date") {
sql += `, date(i.${group_by}) as group_id
`;
} else {
sql += `, i.${group_by} as group_id
`;
}
}
if (sub_group_by) {
if (ARRAY_FIELDS.includes(sub_group_by)) {
sql += `, ${sub_group_by}.value as sub_group_id
`;
} else {
sql += `, i.${sub_group_by} as sub_group_id
`;
}
}
sql += ` from issues i
`;
if (otherProps.state_group) {
sql += `LEFT JOIN states ON i.state_id = states.id `;
}
filterJoinFields.forEach((field: string) => {
sql += ` INNER JOIN issue_meta ${field} ON i.id = ${field}.issue_id AND ${field}.key = '${field}' AND ${field}.value IN ('${otherProps[field].split(",").join("','")}')
`;
});
// If group by field is not already joined, join it
if (ARRAY_FIELDS.includes(group_by) && !filterJoinFields.includes(group_by)) {
sql += ` LEFT JOIN issue_meta ${group_by} ON i.id = ${group_by}.issue_id AND ${group_by}.key = '${group_by}'
`;
}
if (ARRAY_FIELDS.includes(sub_group_by) && !filterJoinFields.includes(sub_group_by)) {
sql += ` LEFT JOIN issue_meta ${sub_group_by} ON i.id = ${sub_group_by}.issue_id AND ${sub_group_by}.key = '${sub_group_by}'
`;
}
sql += ` WHERE 1=1 `;
if (projectId) {
sql += ` AND i.project_id = '${projectId}'
`;
}
sql += singleFilterConstructor(otherProps);
sql += `)
`;
return sql;
};
export const singleFilterConstructor = (queries: any) => {
const {
order_by,
cursor,
per_page,
group_by,
sub_group_by,
state_group,
sub_issue,
target_date,
start_date,
...filters
} = translateQueryParams(queries);
let sql = "";
if (!sub_issue) {
sql += ` AND parent_id IS NULL
`;
}
if (target_date) {
sql += createDateFilter("target_date", target_date);
}
if (start_date) {
sql += createDateFilter("start_date", start_date);
}
if (state_group) {
sql += ` AND state_group in ('${state_group.split(",").join("','")}')
`;
}
const keys = Object.keys(filters);
keys.forEach((key) => {
const value = filters[key] ? filters[key].split(",") : "";
if (!ARRAY_FIELDS.includes(key)) {
if (!value) {
sql += ` AND ${key} IS NULL`;
return;
}
sql += ` AND ${key} in ('${value.join("','")}')
`;
}
});
//
return sql;
};
const createDateFilter = (key: string, q: string) => {
let sql = " ";
// get todays date in YYYY-MM-DD format
const queries = q.split(",");
const customRange: string[] = [];
let isAnd = true;
queries.forEach((query: string) => {
const [date, type, from] = query.split(";");
if (from) {
// Assuming type is always after
let after = "";
const [_length, unit] = date.split("_");
const length = parseInt(_length);
if (unit === "weeks") {
// get date in yyyy-mm-dd format one week from now
after = new Date(new Date().setDate(new Date().getDate() + length * 7)).toISOString().split("T")[0];
}
if (unit === "months") {
after = new Date(new Date().setDate(new Date().getDate() + length * 30)).toISOString().split("T")[0];
}
sql += ` ${isAnd ? "AND" : "OR"} ${key} >= date('${after}')`;
isAnd = false;
// sql += ` AND ${key} ${type === "after" ? ">=" : "<="} date('${date}', '${today}')`;
} else {
customRange.push(query);
}
});
if (customRange.length === 2) {
const end = customRange.find((date) => date.includes("before"))?.split(";")[0];
const start = customRange.find((date) => date.includes("after"))?.split(";")[0];
if (end && start) {
sql += ` ${isAnd ? "AND" : "OR"} ${key} BETWEEN date('${start}') AND date('${end}')`;
}
}
if (customRange.length === 1) {
sql += ` AND ${key}=date('${customRange[0].split(";")[0]}')`;
}
return sql;
};
const getSingleFilterFields = (queries: any) => {
const { order_by, cursor, per_page, group_by, sub_group_by, sub_issue, state_group, ...otherProps } =
translateQueryParams(queries);
const fields = new Set();
if (order_by && !order_by.includes("created_at") && !Object.keys(SPECIAL_ORDER_BY).includes(order_by))
fields.add(order_by.replace("-", ""));
const keys = Object.keys(otherProps);
keys.forEach((field: string) => {
if (!ARRAY_FIELDS.includes(field)) {
fields.add(field);
}
});
if (order_by?.includes("state__name")) {
fields.add("state_id");
}
if (order_by?.includes("cycle__name")) {
fields.add("cycle_id");
}
if (state_group) {
fields.add("states.'group' as state_group");
}
if (order_by?.includes("estimate_point__key")) {
fields.add("estimate_point");
}
return Array.from(fields);
};
export const getIssueFieldsFragment = () => {
const { description_html, ...filtered } = issueSchema;
const keys = Object.keys(filtered);
const sql = ` ${keys.map((key) => `i.${key}`).join(`,
`)}`;
return sql;
};

View File

@@ -1,136 +0,0 @@
export type Schema = {
[key: string]: string;
};
export const issueSchema: Schema = {
id: "TEXT UNIQUE",
name: "TEXT",
state_id: "TEXT",
sort_order: "REAL",
completed_at: "TEXT",
estimate_point: "REAL",
priority: "TEXT",
priority_proxy: "INTEGER",
start_date: "TEXT",
target_date: "TEXT",
sequence_id: "INTEGER",
project_id: "TEXT",
parent_id: "TEXT",
created_at: "TEXT",
updated_at: "TEXT",
created_by: "TEXT",
updated_by: "TEXT",
is_draft: "INTEGER",
archived_at: "TEXT",
state__group: "TEXT",
sub_issues_count: "INTEGER",
cycle_id: "TEXT",
link_count: "INTEGER",
attachment_count: "INTEGER",
type_id: "TEXT",
label_ids: "TEXT",
assignee_ids: "TEXT",
module_ids: "TEXT",
description_html: "TEXT",
is_local_update: "INTEGER",
};
export const issueMetaSchema: Schema = {
issue_id: "TEXT",
key: "TEXT",
value: "TEXT",
};
export const moduleSchema: Schema = {
id: "TEXT UNIQUE",
workspace_id: "TEXT",
project_id: "TEXT",
name: "TEXT",
description: "TEXT",
description_text: "TEXT",
description_html: "TEXT",
start_date: "TEXT",
target_date: "TEXT",
status: "TEXT",
lead_id: "TEXT",
member_ids: "TEXT",
view_props: "TEXT",
sort_order: "INTEGER",
external_source: "TEXT",
external_id: "TEXT",
logo_props: "TEXT",
total_issues: "INTEGER",
cancelled_issues: "INTEGER",
completed_issues: "INTEGER",
started_issues: "INTEGER",
unstarted_issues: "INTEGER",
backlog_issues: "INTEGER",
created_at: "TEXT",
updated_at: "TEXT",
archived_at: "TEXT",
};
export const labelSchema: Schema = {
id: "TEXT UNIQUE",
name: "TEXT",
color: "TEXT",
parent: "TEXT",
project_id: "TEXT",
workspace_id: "TEXT",
sort_order: "INTEGER",
};
export const cycleSchema: Schema = {
id: "TEXT UNIQUE",
workspace_id: "TEXT",
project_id: "TEXT",
name: "TEXT",
description: "TEXT",
start_date: "TEXT",
end_date: "TEXT",
owned_by_id: "TEXT",
view_props: "TEXT",
sort_order: "INTEGER",
external_source: "TEXT",
external_id: "TEXT",
progress_snapshot: "TEXT",
logo_props: "TEXT",
total_issues: "INTEGER",
cancelled_issues: "INTEGER",
completed_issues: "INTEGER",
started_issues: "INTEGER",
unstarted_issues: "INTEGER",
backlog_issues: "INTEGER",
};
export const stateSchema: Schema = {
id: "TEXT UNIQUE",
project_id: "TEXT",
workspace_id: "TEXT",
name: "TEXT",
color: "TEXT",
group: "TEXT",
default: "BOOLEAN",
description: "TEXT",
sequence: "INTEGER",
};
export const estimatePointSchema: Schema = {
id: "TEXT UNIQUE",
key: "TEXT",
value: "REAL",
};
export const memberSchema: Schema = {
id: "TEXT UNIQUE",
first_name: "TEXT",
last_name: "TEXT",
avatar: "TEXT",
is_bot: "BOOLEAN",
display_name: "TEXT",
email: "TEXT",
};
export const optionsSchema: Schema = {
key: "TEXT UNIQUE",
value: "TEXT",
};

View File

@@ -1,41 +0,0 @@
import { persistence } from "../storage.sqlite";
import type { Schema } from "./schemas";
import {
labelSchema,
moduleSchema,
issueMetaSchema,
issueSchema,
stateSchema,
cycleSchema,
estimatePointSchema,
memberSchema,
optionsSchema,
} from "./schemas";
import { log } from "./utils";
const createTableSQLfromSchema = (tableName: string, schema: Schema) => {
let sql = `CREATE TABLE IF NOT EXISTS ${tableName} (`;
sql += Object.keys(schema)
.map((key) => `'${key}' ${schema[key]}`)
.join(", ");
sql += `);`;
log("#####", sql);
return sql;
};
export const createTables = async () => {
//@todo use promise.all or send all statements in one go
await persistence.db.exec("BEGIN;");
await persistence.db.exec(createTableSQLfromSchema("issues", issueSchema));
await persistence.db.exec(createTableSQLfromSchema("issue_meta", issueMetaSchema));
await persistence.db.exec(createTableSQLfromSchema("modules", moduleSchema));
await persistence.db.exec(createTableSQLfromSchema("labels", labelSchema));
await persistence.db.exec(createTableSQLfromSchema("states", stateSchema));
await persistence.db.exec(createTableSQLfromSchema("cycles", cycleSchema));
await persistence.db.exec(createTableSQLfromSchema("estimate_points", estimatePointSchema));
await persistence.db.exec(createTableSQLfromSchema("members", memberSchema));
await persistence.db.exec(createTableSQLfromSchema("options", optionsSchema));
await persistence.db.exec("COMMIT;");
};

View File

@@ -1,206 +0,0 @@
import { pick } from "lodash-es";
import type { TIssue } from "@plane/types";
import { rootStore } from "@/lib/store-context";
import { persistence } from "../storage.sqlite";
import { updateIssue } from "./load-issues";
export const log = (...args: any) => {
if ((window as any).DEBUG) {
console.log(...args);
}
};
export const logError = (e: any) => {
if (e?.result?.errorClass === "SQLite3Error") {
e = parseSQLite3Error(e);
}
console.error(e);
};
export const logInfo = console.info;
export const addIssueToPersistanceLayer = async (issue: TIssue) => {
try {
const issuePartial = pick({ ...JSON.parse(JSON.stringify(issue)) }, [
"id",
"name",
"state_id",
"sort_order",
"completed_at",
"estimate_point",
"priority",
"start_date",
"target_date",
"sequence_id",
"project_id",
"parent_id",
"created_at",
"updated_at",
"created_by",
"updated_by",
"is_draft",
"archived_at",
"state__group",
"cycle_id",
"link_count",
"attachment_count",
"sub_issues_count",
"assignee_ids",
"label_ids",
"module_ids",
"type_id",
"description_html",
]);
await updateIssue({ ...issuePartial, is_local_update: 1 });
} catch (e) {
logError("Error while adding issue to db");
}
};
export const updatePersistentLayer = async (issueIds: string | string[]) => {
if (typeof issueIds === "string") {
issueIds = [issueIds];
}
issueIds.forEach(async (issueId) => {
const dbIssue = await persistence.getIssue(issueId);
const issue = rootStore.issue.issues.getIssueById(issueId);
const updatedIssue = dbIssue ? { ...dbIssue, ...issue } : issue;
if (updatedIssue) {
addIssueToPersistanceLayer(updatedIssue);
}
});
};
export const wrapDateTime = (field: string) => {
const DATE_TIME_FIELDS = ["created_at", "updated_at", "completed_at", "start_date", "target_date"];
if (DATE_TIME_FIELDS.includes(field)) {
return `datetime(${field})`;
}
return field;
};
export const getGroupedIssueResults = (issueResults: (TIssue & { group_id?: string; total_issues: number })[]): any => {
const groupedResults: {
[key: string]: {
results: TIssue[];
total_results: number;
};
} = {};
for (const issue of issueResults) {
const { group_id, total_issues } = issue;
const groupId = group_id ? group_id : "None";
if (groupedResults?.[groupId] !== undefined && Array.isArray(groupedResults?.[groupId]?.results)) {
groupedResults?.[groupId]?.results.push(issue);
} else {
groupedResults[groupId] = { results: [issue], total_results: total_issues };
}
}
return groupedResults;
};
export const getSubGroupedIssueResults = (
issueResults: (TIssue & { group_id?: string; total_issues: number; sub_group_id?: string })[]
): any => {
const subGroupedResults: {
[key: string]: {
results: {
[key: string]: {
results: TIssue[];
total_results: number;
};
};
total_results: number;
};
} = {};
for (const issue of issueResults) {
const { group_id, total_issues, sub_group_id } = issue;
const groupId = group_id ? group_id : "None";
const subGroupId = sub_group_id ? sub_group_id : "None";
if (subGroupedResults?.[groupId] === undefined) {
subGroupedResults[groupId] = { results: {}, total_results: 0 };
}
if (
subGroupedResults[groupId].results[subGroupId] !== undefined &&
Array.isArray(subGroupedResults[groupId].results[subGroupId]?.results)
) {
subGroupedResults[groupId].results[subGroupId]?.results.push(issue);
} else {
subGroupedResults[groupId].results[subGroupId] = { results: [issue], total_results: total_issues };
}
}
const groupByKeys = Object.keys(subGroupedResults);
for (const groupByKey of groupByKeys) {
let totalIssues = 0;
const groupedResults = subGroupedResults[groupByKey]?.results ?? {};
const subGroupByKeys = Object.keys(groupedResults);
for (const subGroupByKey of subGroupByKeys) {
const subGroupedResultsCount = groupedResults[subGroupByKey].total_results ?? 0;
totalIssues += subGroupedResultsCount;
}
subGroupedResults[groupByKey].total_results = totalIssues;
}
return subGroupedResults;
};
export const delay = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms));
const parseSQLite3Error = (error: any) => {
error.result = JSON.stringify(error.result);
return error;
};
export const isChrome = () => {
const userAgent = navigator.userAgent;
return userAgent.includes("Chrome") && !userAgent.includes("Edg") && !userAgent.includes("OPR");
};
export const clearOPFS = async (force = false) => {
const storageManager = window.navigator.storage;
const root = await storageManager.getDirectory();
if (force && isChrome()) {
await (root as any).remove({ recursive: true });
return;
}
// ts-ignore
for await (const entry of (root as any)?.values()) {
if (entry.kind === "directory" && entry.name.startsWith(".ahp-")) {
// A lock with the same name as the directory protects it from
// being deleted.
if (force) {
// don't wait for the lock
try {
await root.removeEntry(entry.name, { recursive: true });
} catch (e) {
console.log(e);
}
} else {
await navigator.locks.request(entry.name, { ifAvailable: true }, async (lock) => {
if (lock) {
log?.(`Deleting temporary directory ${entry.name}`);
try {
await root.removeEntry(entry.name, { recursive: true });
} catch (e) {
console.log(e);
}
} else {
log?.(`Temporary directory ${entry.name} is in use`);
}
});
}
} else {
root.removeEntry(entry.name);
}
}
};

View File

@@ -1,136 +0,0 @@
import * as Comlink from "comlink";
import { OPFSCoopSyncVFS as MyVFS } from "./wa-sqlite/src/OPFSCoopSyncVFS";
import * as SQLite from "./wa-sqlite/src/sqlite-api";
import SQLiteESMFactory from "./wa-sqlite/src/wa-sqlite.mjs";
type TQueryProps = {
sql: string;
rowMode: string;
returnValue: string;
bind: any[];
};
const mergeToObject = (columns: string[], row: any[]) => {
const obj: any = {};
columns.forEach((column, index) => {
obj[column] = row[index];
});
return obj;
};
interface SQLiteInstance {
db: unknown;
exec: (sql: string) => Promise<unknown[]>;
}
export class DBClass {
private instance: SQLiteInstance = {} as SQLiteInstance;
private sqlite3: any;
private tp: Promise<any>[] = [];
private tpResolver: any = [];
async init(dbName: string) {
if (!dbName || typeof dbName !== "string") {
throw new Error("Invalid database name");
}
try {
const m = await SQLiteESMFactory();
this.sqlite3 = SQLite.Factory(m);
const vfs = await MyVFS.create("plane", m);
this.sqlite3.vfs_register(vfs, true);
// Fallback in rare cases where the database is not initialized in time
const p = new Promise((resolve) => setTimeout(() => resolve(false), 2000));
const dbPromise = this.sqlite3.open_v2(
`${dbName}.sqlite3`,
this.sqlite3.OPEN_READWRITE | this.sqlite3.OPEN_CREATE,
"plane"
);
const db = await Promise.any([dbPromise, p]);
if (!db) {
throw new Error("Failed to initialize in time");
}
this.instance.db = db;
this.instance.exec = async (sql: string) => {
const rows: any[] = [];
await this.sqlite3.exec(db, sql, (row: any[], columns: string[]) => {
rows.push(mergeToObject(columns, row));
});
return rows;
};
return true;
} catch (error) {
throw new Error(`Failed to initialize database: ${(error as any)?.message}`);
}
}
runQuery(sql: string) {
return this.instance?.exec?.(sql);
}
async exec(props: string | TQueryProps) {
// @todo this will fail if the transaction is started any other way
// eg: BEGIN, OR BEGIN TRANSACTION
if (props === "BEGIN;") {
let promiseToAwait;
if (this.tp.length > 0) {
promiseToAwait = this.tp.shift();
}
const p = new Promise((resolve, reject) => {
this.tpResolver.push({ resolve, reject });
});
this.tp.push(p);
if (promiseToAwait) {
await promiseToAwait;
}
}
let sql: string, bind: any[];
if (typeof props === "string") {
sql = props;
} else {
({ sql, bind } = props);
if (bind) {
for await (const stmt of this.sqlite3.statements(this.instance.db, sql)) {
bind.forEach((b, i) => {
this.sqlite3.bind(stmt, i + 1, b);
});
const rows = [];
do {
const columns = await this.sqlite3.column_names(stmt);
const row = await this.sqlite3.row(stmt);
rows.push(mergeToObject(columns, row));
} while ((await this.sqlite3.step(stmt)) === SQLite.SQLITE_ROW);
return rows;
}
}
}
if (sql === "COMMIT;" && this.tp) {
await this.instance?.exec?.(sql);
if (this.tp.length > 0) {
const { resolve } = this.tpResolver.shift();
resolve();
}
return;
}
return await this.instance?.exec?.(sql);
}
async close() {
try {
if (!this.instance.db) {
return;
}
await this.sqlite3.close(this.instance.db);
// Clear instance to prevent usage after closing
this.instance = {} as SQLiteInstance;
} catch (error) {
throw new Error(`Failed to close database: ${(error as any)?.message}`);
}
}
}
Comlink.expose(DBClass);

View File

@@ -1,508 +0,0 @@
// Copyright 2024 Roy T. Hashimoto. All Rights Reserved.
import * as VFS from './VFS.js';
const AsyncFunction = Object.getPrototypeOf(async function(){}).constructor;
// Convenience base class for a JavaScript VFS.
// The raw xOpen, xRead, etc. function signatures receive only C primitives
// which aren't easy to work with. This class provides corresponding calls
// like jOpen, jRead, etc., which receive JavaScript-friendlier arguments
// such as string, Uint8Array, and DataView.
export class FacadeVFS extends VFS.Base {
/**
* @param {string} name
* @param {object} module
*/
constructor(name, module) {
super(name, module);
}
/**
* Override to indicate which methods are asynchronous.
* @param {string} methodName
* @returns {boolean}
*/
hasAsyncMethod(methodName) {
// The input argument is a string like "xOpen", so convert to "jOpen".
// Then check if the method exists and is async.
const jMethodName = `j${methodName.slice(1)}`;
return this[jMethodName] instanceof AsyncFunction;
}
/**
* Return the filename for a file id for use by mixins.
* @param {number} pFile
* @returns {string}
*/
getFilename(pFile) {
throw new Error('unimplemented');
}
/**
* @param {string?} filename
* @param {number} pFile
* @param {number} flags
* @param {DataView} pOutFlags
* @returns {number|Promise<number>}
*/
jOpen(filename, pFile, flags, pOutFlags) {
return VFS.SQLITE_CANTOPEN;
}
/**
* @param {string} filename
* @param {number} syncDir
* @returns {number|Promise<number>}
*/
jDelete(filename, syncDir) {
return VFS.SQLITE_OK;
}
/**
* @param {string} filename
* @param {number} flags
* @param {DataView} pResOut
* @returns {number|Promise<number>}
*/
jAccess(filename, flags, pResOut) {
return VFS.SQLITE_OK;
}
/**
* @param {string} filename
* @param {Uint8Array} zOut
* @returns {number|Promise<number>}
*/
jFullPathname(filename, zOut) {
// Copy the filename to the output buffer.
const { read, written } = new TextEncoder().encodeInto(filename, zOut);
if (read < filename.length) return VFS.SQLITE_IOERR;
if (written >= zOut.length) return VFS.SQLITE_IOERR;
zOut[written] = 0;
return VFS.SQLITE_OK;
}
/**
* @param {Uint8Array} zBuf
* @returns {number|Promise<number>}
*/
jGetLastError(zBuf) {
return VFS.SQLITE_OK;
}
/**
* @param {number} pFile
* @returns {number|Promise<number>}
*/
jClose(pFile) {
return VFS.SQLITE_OK;
}
/**
* @param {number} pFile
* @param {Uint8Array} pData
* @param {number} iOffset
* @returns {number|Promise<number>}
*/
jRead(pFile, pData, iOffset) {
pData.fill(0);
return VFS.SQLITE_IOERR_SHORT_READ;
}
/**
* @param {number} pFile
* @param {Uint8Array} pData
* @param {number} iOffset
* @returns {number|Promise<number>}
*/
jWrite(pFile, pData, iOffset) {
return VFS.SQLITE_IOERR_WRITE;
}
/**
* @param {number} pFile
* @param {number} size
* @returns {number|Promise<number>}
*/
jTruncate(pFile, size) {
return VFS.SQLITE_OK;
}
/**
* @param {number} pFile
* @param {number} flags
* @returns {number|Promise<number>}
*/
jSync(pFile, flags) {
return VFS.SQLITE_OK;
}
/**
* @param {number} pFile
* @param {DataView} pSize
* @returns {number|Promise<number>}
*/
jFileSize(pFile, pSize) {
return VFS.SQLITE_OK;
}
/**
* @param {number} pFile
* @param {number} lockType
* @returns {number|Promise<number>}
*/
jLock(pFile, lockType) {
return VFS.SQLITE_OK;
}
/**
* @param {number} pFile
* @param {number} lockType
* @returns {number|Promise<number>}
*/
jUnlock(pFile, lockType) {
return VFS.SQLITE_OK;
}
/**
* @param {number} pFile
* @param {DataView} pResOut
* @returns {number|Promise<number>}
*/
jCheckReservedLock(pFile, pResOut) {
pResOut.setInt32(0, 0, true);
return VFS.SQLITE_OK;
}
/**
* @param {number} pFile
* @param {number} op
* @param {DataView} pArg
* @returns {number|Promise<number>}
*/
jFileControl(pFile, op, pArg) {
return VFS.SQLITE_NOTFOUND;
}
/**
* @param {number} pFile
* @returns {number|Promise<number>}
*/
jSectorSize(pFile) {
return super.xSectorSize(pFile);
}
/**
* @param {number} pFile
* @returns {number|Promise<number>}
*/
jDeviceCharacteristics(pFile) {
return 0;
}
/**
* @param {number} pVfs
* @param {number} zName
* @param {number} pFile
* @param {number} flags
* @param {number} pOutFlags
* @returns {number|Promise<number>}
*/
xOpen(pVfs, zName, pFile, flags, pOutFlags) {
const filename = this.#decodeFilename(zName, flags);
const pOutFlagsView = this.#makeTypedDataView('Int32', pOutFlags);
this['log']?.('jOpen', filename, pFile, '0x' + flags.toString(16));
return this.jOpen(filename, pFile, flags, pOutFlagsView);
}
/**
* @param {number} pVfs
* @param {number} zName
* @param {number} syncDir
* @returns {number|Promise<number>}
*/
xDelete(pVfs, zName, syncDir) {
const filename = this._module.UTF8ToString(zName);
this['log']?.('jDelete', filename, syncDir);
return this.jDelete(filename, syncDir);
}
/**
* @param {number} pVfs
* @param {number} zName
* @param {number} flags
* @param {number} pResOut
* @returns {number|Promise<number>}
*/
xAccess(pVfs, zName, flags, pResOut) {
const filename = this._module.UTF8ToString(zName);
const pResOutView = this.#makeTypedDataView('Int32', pResOut);
this['log']?.('jAccess', filename, flags);
return this.jAccess(filename, flags, pResOutView);
}
/**
* @param {number} pVfs
* @param {number} zName
* @param {number} nOut
* @param {number} zOut
* @returns {number|Promise<number>}
*/
xFullPathname(pVfs, zName, nOut, zOut) {
const filename = this._module.UTF8ToString(zName);
const zOutArray = this._module.HEAPU8.subarray(zOut, zOut + nOut);
this['log']?.('jFullPathname', filename, nOut);
return this.jFullPathname(filename, zOutArray);
}
/**
* @param {number} pVfs
* @param {number} nBuf
* @param {number} zBuf
* @returns {number|Promise<number>}
*/
xGetLastError(pVfs, nBuf, zBuf) {
const zBufArray = this._module.HEAPU8.subarray(zBuf, zBuf + nBuf);
this['log']?.('jGetLastError', nBuf);
return this.jGetLastError(zBufArray);
}
/**
* @param {number} pFile
* @returns {number|Promise<number>}
*/
xClose(pFile) {
this['log']?.('jClose', pFile);
return this.jClose(pFile);
}
/**
* @param {number} pFile
* @param {number} pData
* @param {number} iAmt
* @param {number} iOffsetLo
* @param {number} iOffsetHi
* @returns {number|Promise<number>}
*/
xRead(pFile, pData, iAmt, iOffsetLo, iOffsetHi) {
const pDataArray = this.#makeDataArray(pData, iAmt);
const iOffset = delegalize(iOffsetLo, iOffsetHi);
this['log']?.('jRead', pFile, iAmt, iOffset);
return this.jRead(pFile, pDataArray, iOffset);
}
/**
* @param {number} pFile
* @param {number} pData
* @param {number} iAmt
* @param {number} iOffsetLo
* @param {number} iOffsetHi
* @returns {number|Promise<number>}
*/
xWrite(pFile, pData, iAmt, iOffsetLo, iOffsetHi) {
const pDataArray = this.#makeDataArray(pData, iAmt);
const iOffset = delegalize(iOffsetLo, iOffsetHi);
this['log']?.('jWrite', pFile, pDataArray, iOffset);
return this.jWrite(pFile, pDataArray, iOffset);
}
/**
* @param {number} pFile
* @param {number} sizeLo
* @param {number} sizeHi
* @returns {number|Promise<number>}
*/
xTruncate(pFile, sizeLo, sizeHi) {
const size = delegalize(sizeLo, sizeHi);
this['log']?.('jTruncate', pFile, size);
return this.jTruncate(pFile, size);
}
/**
* @param {number} pFile
* @param {number} flags
* @returns {number|Promise<number>}
*/
xSync(pFile, flags) {
this['log']?.('jSync', pFile, flags);
return this.jSync(pFile, flags);
}
/**
*
* @param {number} pFile
* @param {number} pSize
* @returns {number|Promise<number>}
*/
xFileSize(pFile, pSize) {
const pSizeView = this.#makeTypedDataView('BigInt64', pSize);
this['log']?.('jFileSize', pFile);
return this.jFileSize(pFile, pSizeView);
}
/**
* @param {number} pFile
* @param {number} lockType
* @returns {number|Promise<number>}
*/
xLock(pFile, lockType) {
this['log']?.('jLock', pFile, lockType);
return this.jLock(pFile, lockType);
}
/**
* @param {number} pFile
* @param {number} lockType
* @returns {number|Promise<number>}
*/
xUnlock(pFile, lockType) {
this['log']?.('jUnlock', pFile, lockType);
return this.jUnlock(pFile, lockType);
}
/**
* @param {number} pFile
* @param {number} pResOut
* @returns {number|Promise<number>}
*/
xCheckReservedLock(pFile, pResOut) {
const pResOutView = this.#makeTypedDataView('Int32', pResOut);
this['log']?.('jCheckReservedLock', pFile);
return this.jCheckReservedLock(pFile, pResOutView);
}
/**
* @param {number} pFile
* @param {number} op
* @param {number} pArg
* @returns {number|Promise<number>}
*/
xFileControl(pFile, op, pArg) {
const pArgView = new DataView(
this._module.HEAPU8.buffer,
this._module.HEAPU8.byteOffset + pArg);
this['log']?.('jFileControl', pFile, op, pArgView);
return this.jFileControl(pFile, op, pArgView);
}
/**
* @param {number} pFile
* @returns {number|Promise<number>}
*/
xSectorSize(pFile) {
this['log']?.('jSectorSize', pFile);
return this.jSectorSize(pFile);
}
/**
* @param {number} pFile
* @returns {number|Promise<number>}
*/
xDeviceCharacteristics(pFile) {
this['log']?.('jDeviceCharacteristics', pFile);
return this.jDeviceCharacteristics(pFile);
}
/**
* Wrapped DataView for pointer arguments.
* Pointers to a single value are passed using DataView. A Proxy
* wrapper prevents use of incorrect type or endianness.
* @param {'Int32'|'BigInt64'} type
* @param {number} byteOffset
* @returns {DataView}
*/
#makeTypedDataView(type, byteOffset) {
const byteLength = type === 'Int32' ? 4 : 8;
const getter = `get${type}`;
const setter = `set${type}`;
const makeDataView = () => new DataView(
this._module.HEAPU8.buffer,
this._module.HEAPU8.byteOffset + byteOffset,
byteLength);
let dataView = makeDataView();
return new Proxy(dataView, {
get(_, prop) {
if (dataView.buffer.byteLength === 0) {
// WebAssembly memory resize detached the buffer.
dataView = makeDataView();
}
if (prop === getter) {
return function(byteOffset, littleEndian) {
if (!littleEndian) throw new Error('must be little endian');
return dataView[prop](byteOffset, littleEndian);
}
}
if (prop === setter) {
return function(byteOffset, value, littleEndian) {
if (!littleEndian) throw new Error('must be little endian');
return dataView[prop](byteOffset, value, littleEndian);
}
}
if (typeof prop === 'string' && (prop.match(/^(get)|(set)/))) {
throw new Error('invalid type');
}
const result = dataView[prop];
return typeof result === 'function' ? result.bind(dataView) : result;
}
});
}
/**
* @param {number} byteOffset
* @param {number} byteLength
*/
#makeDataArray(byteOffset, byteLength) {
let target = this._module.HEAPU8.subarray(byteOffset, byteOffset + byteLength);
return new Proxy(target, {
get: (_, prop, receiver) => {
if (target.buffer.byteLength === 0) {
// WebAssembly memory resize detached the buffer.
target = this._module.HEAPU8.subarray(byteOffset, byteOffset + byteLength);
}
const result = target[prop];
return typeof result === 'function' ? result.bind(target) : result;
}
});
}
#decodeFilename(zName, flags) {
if (flags & VFS.SQLITE_OPEN_URI) {
// The first null-terminated string is the URI path. Subsequent
// strings are query parameter keys and values.
// https://www.sqlite.org/c3ref/open.html#urifilenamesinsqlite3open
let pName = zName;
let state = 1;
const charCodes = [];
while (state) {
const charCode = this._module.HEAPU8[pName++];
if (charCode) {
charCodes.push(charCode);
} else {
if (!this._module.HEAPU8[pName]) state = null;
switch (state) {
case 1: // path
charCodes.push('?'.charCodeAt(0));
state = 2;
break;
case 2: // key
charCodes.push('='.charCodeAt(0));
state = 3;
break;
case 3: // value
charCodes.push('&'.charCodeAt(0));
state = 2;
break;
}
}
}
return new TextDecoder().decode(new Uint8Array(charCodes));
}
return zName ? this._module.UTF8ToString(zName) : null;
}
}
// Emscripten "legalizes" 64-bit integer arguments by passing them as
// two 32-bit signed integers.
function delegalize(lo32, hi32) {
return (hi32 * 0x100000000) + lo32 + (lo32 < 0 ? 2**32 : 0);
}

View File

@@ -1,592 +0,0 @@
// Copyright 2024 Roy T. Hashimoto. All Rights Reserved.
import { FacadeVFS } from "./FacadeVFS.js";
import * as VFS from "./VFS.js";
const DEFAULT_TEMPORARY_FILES = 10;
const LOCK_NOTIFY_INTERVAL = 1000;
const DB_RELATED_FILE_SUFFIXES = ["", "-journal", "-wal"];
const finalizationRegistry = new FinalizationRegistry((releaser) => releaser());
class File {
/** @type {string} */ path;
/** @type {number} */ flags;
/** @type {FileSystemSyncAccessHandle} */ accessHandle;
/** @type {PersistentFile?} */ persistentFile;
constructor(path, flags) {
this.path = path;
this.flags = flags;
}
}
class PersistentFile {
/** @type {FileSystemFileHandle} */ fileHandle;
/** @type {FileSystemSyncAccessHandle} */ accessHandle = null;
// The following properties are for main database files.
/** @type {boolean} */ isLockBusy = false;
/** @type {boolean} */ isFileLocked = false;
/** @type {boolean} */ isRequestInProgress = false;
/** @type {function} */ handleLockReleaser = null;
/** @type {BroadcastChannel} */ handleRequestChannel;
/** @type {boolean} */ isHandleRequested = false;
constructor(fileHandle) {
this.fileHandle = fileHandle;
}
}
export class OPFSCoopSyncVFS extends FacadeVFS {
/** @type {Map<number, File>} */ mapIdToFile = new Map();
lastError = null;
log = null; //function(...args) { console.log(`[${contextName}]`, ...args) };
/** @type {Map<string, PersistentFile>} */ persistentFiles = new Map();
/** @type {Map<string, FileSystemSyncAccessHandle>} */ boundAccessHandles = new Map();
/** @type {Set<FileSystemSyncAccessHandle>} */ unboundAccessHandles = new Set();
/** @type {Set<string>} */ accessiblePaths = new Set();
releaser = null;
static async create(name, module) {
const vfs = new OPFSCoopSyncVFS(name, module);
await Promise.all([vfs.isReady(), vfs.#initialize(DEFAULT_TEMPORARY_FILES)]);
return vfs;
}
constructor(name, module) {
super(name, module);
}
async #initialize(nTemporaryFiles) {
// Delete temporary directories no longer in use.
const root = await navigator.storage.getDirectory();
// @ts-ignore
for await (const entry of root.values()) {
if (entry.kind === "directory" && entry.name.startsWith(".ahp-")) {
// A lock with the same name as the directory protects it from
// being deleted.
await navigator.locks.request(entry.name, { ifAvailable: true }, async (lock) => {
if (lock) {
this.log?.(`Deleting temporary directory ${entry.name}`);
await root.removeEntry(entry.name, { recursive: true });
} else {
this.log?.(`Temporary directory ${entry.name} is in use`);
}
});
}
}
// Create our temporary directory.
const tmpDirName = `.ahp-${Math.random().toString(36).slice(2)}`;
this.releaser = await new Promise((resolve) => {
navigator.locks.request(tmpDirName, () => {
return new Promise((release) => {
resolve(release);
});
});
});
finalizationRegistry.register(this, this.releaser);
const tmpDir = await root.getDirectoryHandle(tmpDirName, { create: true });
// Populate temporary directory.
for (let i = 0; i < nTemporaryFiles; i++) {
const tmpFile = await tmpDir.getFileHandle(`${i}.tmp`, { create: true });
const tmpAccessHandle = await tmpFile.createSyncAccessHandle();
this.unboundAccessHandles.add(tmpAccessHandle);
}
}
/**
* @param {string?} zName
* @param {number} fileId
* @param {number} flags
* @param {DataView} pOutFlags
* @returns {number}
*/
jOpen(zName, fileId, flags, pOutFlags) {
try {
const url = new URL(zName || Math.random().toString(36).slice(2), "file://");
const path = url.pathname;
if (flags & VFS.SQLITE_OPEN_MAIN_DB) {
const persistentFile = this.persistentFiles.get(path);
if (persistentFile?.isRequestInProgress) {
// Should not reach here unless SQLite itself retries an open.
// Otherwise, asynchronous operations started on a previous
// open try should have completed.
return VFS.SQLITE_BUSY;
} else if (!persistentFile) {
// This is the usual starting point for opening a database.
// Register a Promise that resolves when the database and related
// files are ready to be used.
this.log?.(`creating persistent file for ${path}`);
const create = !!(flags & VFS.SQLITE_OPEN_CREATE);
this._module.retryOps.push(
(async () => {
try {
// Get the path directory handle.
let dirHandle = await navigator.storage.getDirectory();
const directories = path.split("/").filter((d) => d);
const filename = directories.pop();
for (const directory of directories) {
dirHandle = await dirHandle.getDirectoryHandle(directory, { create });
}
// Get file handles for the database and related files,
// and create persistent file instances.
for (const suffix of DB_RELATED_FILE_SUFFIXES) {
const fileHandle = await dirHandle.getFileHandle(filename + suffix, { create });
await this.#createPersistentFile(fileHandle);
}
// Get access handles for the files.
const file = new File(path, flags);
file.persistentFile = this.persistentFiles.get(path);
await this.#requestAccessHandle(file);
} catch (e) {
// Use an invalid persistent file to signal this error
// for the retried open.
const persistentFile = new PersistentFile(null);
this.persistentFiles.set(path, persistentFile);
console.error(e);
}
})()
);
return VFS.SQLITE_BUSY;
} else if (!persistentFile.fileHandle) {
// The asynchronous open operation failed.
this.persistentFiles.delete(path);
return VFS.SQLITE_CANTOPEN;
} else if (!persistentFile.accessHandle) {
// This branch is reached if the database was previously opened
// and closed.
this._module.retryOps.push(
(async () => {
const file = new File(path, flags);
file.persistentFile = this.persistentFiles.get(path);
await this.#requestAccessHandle(file);
})()
);
return VFS.SQLITE_BUSY;
}
}
if (!this.accessiblePaths.has(path) && !(flags & VFS.SQLITE_OPEN_CREATE)) {
throw new Error(`File ${path} not found`);
}
const file = new File(path, flags);
this.mapIdToFile.set(fileId, file);
if (this.persistentFiles.has(path)) {
file.persistentFile = this.persistentFiles.get(path);
} else if (this.boundAccessHandles.has(path)) {
// This temporary file was previously created and closed. Reopen
// the same access handle.
file.accessHandle = this.boundAccessHandles.get(path);
} else if (this.unboundAccessHandles.size) {
// Associate an unbound access handle to this file.
file.accessHandle = this.unboundAccessHandles.values().next().value;
file.accessHandle.truncate(0);
this.unboundAccessHandles.delete(file.accessHandle);
this.boundAccessHandles.set(path, file.accessHandle);
}
this.accessiblePaths.add(path);
pOutFlags.setInt32(0, flags, true);
return VFS.SQLITE_OK;
} catch (e) {
this.lastError = e;
return VFS.SQLITE_CANTOPEN;
}
}
/**
* @param {string} zName
* @param {number} syncDir
* @returns {number}
*/
jDelete(zName, syncDir) {
try {
const url = new URL(zName, "file://");
const path = url.pathname;
if (this.persistentFiles.has(path)) {
const persistentFile = this.persistentFiles.get(path);
persistentFile.accessHandle.truncate(0);
} else {
this.boundAccessHandles.get(path)?.truncate(0);
}
this.accessiblePaths.delete(path);
return VFS.SQLITE_OK;
} catch (e) {
this.lastError = e;
return VFS.SQLITE_IOERR_DELETE;
}
}
/**
* @param {string} zName
* @param {number} flags
* @param {DataView} pResOut
* @returns {number}
*/
jAccess(zName, flags, pResOut) {
try {
const url = new URL(zName, "file://");
const path = url.pathname;
pResOut.setInt32(0, this.accessiblePaths.has(path) ? 1 : 0, true);
return VFS.SQLITE_OK;
} catch (e) {
this.lastError = e;
return VFS.SQLITE_IOERR_ACCESS;
}
}
/**
* @param {number} fileId
* @returns {number}
*/
jClose(fileId) {
try {
const file = this.mapIdToFile.get(fileId);
this.mapIdToFile.delete(fileId);
if (file?.flags & VFS.SQLITE_OPEN_MAIN_DB) {
if (file.persistentFile?.handleLockReleaser) {
this.#releaseAccessHandle(file);
}
} else if (file?.flags & VFS.SQLITE_OPEN_DELETEONCLOSE) {
file.accessHandle.truncate(0);
this.accessiblePaths.delete(file.path);
if (!this.persistentFiles.has(file.path)) {
this.boundAccessHandles.delete(file.path);
this.unboundAccessHandles.add(file.accessHandle);
}
}
return VFS.SQLITE_OK;
} catch (e) {
this.lastError = e;
return VFS.SQLITE_IOERR_CLOSE;
}
}
/**
* @param {number} fileId
* @param {Uint8Array} pData
* @param {number} iOffset
* @returns {number}
*/
jRead(fileId, pData, iOffset) {
try {
const file = this.mapIdToFile.get(fileId);
// On Chrome (at least), passing pData to accessHandle.read() is
// an error because pData is a Proxy of a Uint8Array. Calling
// subarray() produces a real Uint8Array and that works.
const accessHandle = file.accessHandle || file.persistentFile.accessHandle;
const bytesRead = accessHandle.read(pData.subarray(), { at: iOffset });
// Opening a database file performs one read without a xLock call.
if (file.flags & VFS.SQLITE_OPEN_MAIN_DB && !file.persistentFile.isFileLocked) {
this.#releaseAccessHandle(file);
}
if (bytesRead < pData.byteLength) {
pData.fill(0, bytesRead);
return VFS.SQLITE_IOERR_SHORT_READ;
}
return VFS.SQLITE_OK;
} catch (e) {
this.lastError = e;
return VFS.SQLITE_IOERR_READ;
}
}
/**
* @param {number} fileId
* @param {Uint8Array} pData
* @param {number} iOffset
* @returns {number}
*/
jWrite(fileId, pData, iOffset) {
try {
const file = this.mapIdToFile.get(fileId);
// On Chrome (at least), passing pData to accessHandle.write() is
// an error because pData is a Proxy of a Uint8Array. Calling
// subarray() produces a real Uint8Array and that works.
const accessHandle = file.accessHandle || file.persistentFile.accessHandle;
const nBytes = accessHandle.write(pData.subarray(), { at: iOffset });
if (nBytes !== pData.byteLength) throw new Error("short write");
return VFS.SQLITE_OK;
} catch (e) {
this.lastError = e;
return VFS.SQLITE_IOERR_WRITE;
}
}
/**
* @param {number} fileId
* @param {number} iSize
* @returns {number}
*/
jTruncate(fileId, iSize) {
try {
const file = this.mapIdToFile.get(fileId);
const accessHandle = file.accessHandle || file.persistentFile.accessHandle;
accessHandle.truncate(iSize);
return VFS.SQLITE_OK;
} catch (e) {
this.lastError = e;
return VFS.SQLITE_IOERR_TRUNCATE;
}
}
/**
* @param {number} fileId
* @param {number} flags
* @returns {number}
*/
jSync(fileId, flags) {
try {
const file = this.mapIdToFile.get(fileId);
const accessHandle = file.accessHandle || file.persistentFile.accessHandle;
accessHandle.flush();
return VFS.SQLITE_OK;
} catch (e) {
this.lastError = e;
return VFS.SQLITE_IOERR_FSYNC;
}
}
/**
* @param {number} fileId
* @param {DataView} pSize64
* @returns {number}
*/
jFileSize(fileId, pSize64) {
try {
const file = this.mapIdToFile.get(fileId);
const accessHandle = file.accessHandle || file.persistentFile.accessHandle;
const size = accessHandle.getSize();
pSize64.setBigInt64(0, BigInt(size), true);
return VFS.SQLITE_OK;
} catch (e) {
this.lastError = e;
return VFS.SQLITE_IOERR_FSTAT;
}
}
/**
* @param {number} fileId
* @param {number} lockType
* @returns {number}
*/
jLock(fileId, lockType) {
const file = this.mapIdToFile.get(fileId);
if (file.persistentFile.isRequestInProgress) {
file.persistentFile.isLockBusy = true;
return VFS.SQLITE_BUSY;
}
file.persistentFile.isFileLocked = true;
if (!file.persistentFile.handleLockReleaser) {
// Start listening for notifications from other connections.
// This is before we actually get access handles, but waiting to
// listen until then allows a race condition where notifications
// are missed.
file.persistentFile.handleRequestChannel.onmessage = () => {
this.log?.(`received notification for ${file.path}`);
if (file.persistentFile.isFileLocked) {
// We're still using the access handle, so mark it to be
// released when we're done.
file.persistentFile.isHandleRequested = true;
} else {
// Release the access handles immediately.
this.#releaseAccessHandle(file);
}
file.persistentFile.handleRequestChannel.onmessage = null;
};
this.#requestAccessHandle(file);
this.log?.("returning SQLITE_BUSY");
file.persistentFile.isLockBusy = true;
return VFS.SQLITE_BUSY;
}
file.persistentFile.isLockBusy = false;
return VFS.SQLITE_OK;
}
/**
* @param {number} fileId
* @param {number} lockType
* @returns {number}
*/
jUnlock(fileId, lockType) {
const file = this.mapIdToFile.get(fileId);
if (lockType === VFS.SQLITE_LOCK_NONE) {
// Don't change any state if this unlock is because xLock returned
// SQLITE_BUSY.
if (!file.persistentFile.isLockBusy) {
if (file.persistentFile.isHandleRequested) {
// Another connection wants the access handle.
this.#releaseAccessHandle(file);
this.isHandleRequested = false;
}
file.persistentFile.isFileLocked = false;
}
}
return VFS.SQLITE_OK;
}
/**
* @param {number} fileId
* @param {number} op
* @param {DataView} pArg
* @returns {number|Promise<number>}
*/
jFileControl(fileId, op, pArg) {
try {
const file = this.mapIdToFile.get(fileId);
switch (op) {
case VFS.SQLITE_FCNTL_PRAGMA:
const key = extractString(pArg, 4);
const value = extractString(pArg, 8);
this.log?.("xFileControl", file.path, "PRAGMA", key, value);
switch (key.toLowerCase()) {
case "journal_mode":
if (value && !["off", "memory", "delete", "wal"].includes(value.toLowerCase())) {
throw new Error('journal_mode must be "off", "memory", "delete", or "wal"');
}
break;
}
break;
}
} catch (e) {
this.lastError = e;
return VFS.SQLITE_IOERR;
}
return VFS.SQLITE_NOTFOUND;
}
/**
* @param {Uint8Array} zBuf
* @returns
*/
jGetLastError(zBuf) {
if (this.lastError) {
console.error(this.lastError);
const outputArray = zBuf.subarray(0, zBuf.byteLength - 1);
const { written } = new TextEncoder().encodeInto(this.lastError.message, outputArray);
zBuf[written] = 0;
}
return VFS.SQLITE_OK;
}
/**
* @param {FileSystemFileHandle} fileHandle
* @returns {Promise<PersistentFile>}
*/
async #createPersistentFile(fileHandle) {
const persistentFile = new PersistentFile(fileHandle);
const root = await navigator.storage.getDirectory();
const relativePath = await root.resolve(fileHandle);
const path = `/${relativePath.join("/")}`;
persistentFile.handleRequestChannel = new BroadcastChannel(`ahp:${path}`);
this.persistentFiles.set(path, persistentFile);
const f = await fileHandle.getFile();
if (f.size) {
this.accessiblePaths.add(path);
}
return persistentFile;
}
/**
* @param {File} file
*/
#requestAccessHandle(file) {
console.assert(!file.persistentFile.handleLockReleaser);
if (!file.persistentFile.isRequestInProgress) {
file.persistentFile.isRequestInProgress = true;
this._module.retryOps.push(
(async () => {
// Acquire the Web Lock.
file.persistentFile.handleLockReleaser = await this.#acquireLock(file.persistentFile);
// Get access handles for the database and releated files in parallel.
this.log?.(`creating access handles for ${file.path}`);
await Promise.all(
DB_RELATED_FILE_SUFFIXES.map(async (suffix) => {
const persistentFile = this.persistentFiles.get(file.path + suffix);
if (persistentFile) {
persistentFile.accessHandle = await persistentFile.fileHandle.createSyncAccessHandle();
}
})
);
file.persistentFile.isRequestInProgress = false;
})()
);
return this._module.retryOps.at(-1);
}
return Promise.resolve();
}
/**
* @param {File} file
*/
async #releaseAccessHandle(file) {
DB_RELATED_FILE_SUFFIXES.forEach(async (suffix) => {
const persistentFile = this.persistentFiles.get(file.path + suffix);
if (persistentFile) {
persistentFile.accessHandle?.close();
persistentFile.accessHandle = null;
}
});
this.log?.(`access handles closed for ${file.path}`);
file.persistentFile.handleLockReleaser?.();
file.persistentFile.handleLockReleaser = null;
this.log?.(`lock released for ${file.path}`);
}
/**
* @param {PersistentFile} persistentFile
* @returns {Promise<function>} lock releaser
*/
#acquireLock(persistentFile) {
return new Promise((resolve) => {
// Tell other connections we want the access handle.
const lockName = persistentFile.handleRequestChannel.name;
const notify = () => {
this.log?.(`notifying for ${lockName}`);
persistentFile.handleRequestChannel.postMessage(null);
};
const notifyId = setInterval(notify, LOCK_NOTIFY_INTERVAL);
setTimeout(notify);
this.log?.(`lock requested: ${lockName}`);
navigator.locks.request(lockName, (lock) => {
// We have the lock. Stop asking other connections for it.
this.log?.(`lock acquired: ${lockName}`, lock);
clearInterval(notifyId);
return new Promise(resolve);
});
});
}
}
function extractString(dataView, offset) {
const p = dataView.getUint32(offset, true);
if (p) {
const chars = new Uint8Array(dataView.buffer, p);
return new TextDecoder().decode(chars.subarray(0, chars.indexOf(0)));
}
return null;
}

View File

@@ -1,222 +0,0 @@
// Copyright 2024 Roy T. Hashimoto. All Rights Reserved.
import * as VFS from './sqlite-constants.js';
export * from './sqlite-constants.js';
const DEFAULT_SECTOR_SIZE = 512;
// Base class for a VFS.
export class Base {
name;
mxPathname = 64;
_module;
/**
* @param {string} name
* @param {object} module
*/
constructor(name, module) {
this.name = name;
this._module = module;
}
/**
* @returns {void|Promise<void>}
*/
close() {
}
/**
* @returns {boolean|Promise<boolean>}
*/
isReady() {
return true;
}
/**
* Overload in subclasses to indicate which methods are asynchronous.
* @param {string} methodName
* @returns {boolean}
*/
hasAsyncMethod(methodName) {
return false;
}
/**
* @param {number} pVfs
* @param {number} zName
* @param {number} pFile
* @param {number} flags
* @param {number} pOutFlags
* @returns {number|Promise<number>}
*/
xOpen(pVfs, zName, pFile, flags, pOutFlags) {
return VFS.SQLITE_CANTOPEN;
}
/**
* @param {number} pVfs
* @param {number} zName
* @param {number} syncDir
* @returns {number|Promise<number>}
*/
xDelete(pVfs, zName, syncDir) {
return VFS.SQLITE_OK;
}
/**
* @param {number} pVfs
* @param {number} zName
* @param {number} flags
* @param {number} pResOut
* @returns {number|Promise<number>}
*/
xAccess(pVfs, zName, flags, pResOut) {
return VFS.SQLITE_OK;
}
/**
* @param {number} pVfs
* @param {number} zName
* @param {number} nOut
* @param {number} zOut
* @returns {number|Promise<number>}
*/
xFullPathname(pVfs, zName, nOut, zOut) {
return VFS.SQLITE_OK;
}
/**
* @param {number} pVfs
* @param {number} nBuf
* @param {number} zBuf
* @returns {number|Promise<number>}
*/
xGetLastError(pVfs, nBuf, zBuf) {
return VFS.SQLITE_OK;
}
/**
* @param {number} pFile
* @returns {number|Promise<number>}
*/
xClose(pFile) {
return VFS.SQLITE_OK;
}
/**
* @param {number} pFile
* @param {number} pData
* @param {number} iAmt
* @param {number} iOffsetLo
* @param {number} iOffsetHi
* @returns {number|Promise<number>}
*/
xRead(pFile, pData, iAmt, iOffsetLo, iOffsetHi) {
return VFS.SQLITE_OK;
}
/**
* @param {number} pFile
* @param {number} pData
* @param {number} iAmt
* @param {number} iOffsetLo
* @param {number} iOffsetHi
* @returns {number|Promise<number>}
*/
xWrite(pFile, pData, iAmt, iOffsetLo, iOffsetHi) {
return VFS.SQLITE_OK;
}
/**
* @param {number} pFile
* @param {number} sizeLo
* @param {number} sizeHi
* @returns {number|Promise<number>}
*/
xTruncate(pFile, sizeLo, sizeHi) {
return VFS.SQLITE_OK;
}
/**
* @param {number} pFile
* @param {number} flags
* @returns {number|Promise<number>}
*/
xSync(pFile, flags) {
return VFS.SQLITE_OK;
}
/**
*
* @param {number} pFile
* @param {number} pSize
* @returns {number|Promise<number>}
*/
xFileSize(pFile, pSize) {
return VFS.SQLITE_OK;
}
/**
* @param {number} pFile
* @param {number} lockType
* @returns {number|Promise<number>}
*/
xLock(pFile, lockType) {
return VFS.SQLITE_OK;
}
/**
* @param {number} pFile
* @param {number} lockType
* @returns {number|Promise<number>}
*/
xUnlock(pFile, lockType) {
return VFS.SQLITE_OK;
}
/**
* @param {number} pFile
* @param {number} pResOut
* @returns {number|Promise<number>}
*/
xCheckReservedLock(pFile, pResOut) {
return VFS.SQLITE_OK;
}
/**
* @param {number} pFile
* @param {number} op
* @param {number} pArg
* @returns {number|Promise<number>}
*/
xFileControl(pFile, op, pArg) {
return VFS.SQLITE_NOTFOUND;
}
/**
* @param {number} pFile
* @returns {number|Promise<number>}
*/
xSectorSize(pFile) {
return DEFAULT_SECTOR_SIZE;
}
/**
* @param {number} pFile
* @returns {number|Promise<number>}
*/
xDeviceCharacteristics(pFile) {
return 0;
}
}
export const FILE_TYPE_MASK = [
VFS.SQLITE_OPEN_MAIN_DB,
VFS.SQLITE_OPEN_MAIN_JOURNAL,
VFS.SQLITE_OPEN_TEMP_DB,
VFS.SQLITE_OPEN_TEMP_JOURNAL,
VFS.SQLITE_OPEN_TRANSIENT_DB,
VFS.SQLITE_OPEN_SUBJOURNAL,
VFS.SQLITE_OPEN_SUPER_JOURNAL,
VFS.SQLITE_OPEN_WAL
].reduce((mask, element) => mask | element);

View File

@@ -1,899 +0,0 @@
// Copyright 2021 Roy T. Hashimoto. All Rights Reserved.
import * as SQLite from "./sqlite-constants.js";
export * from "./sqlite-constants.js";
const MAX_INT64 = 0x7fffffffffffffffn;
const MIN_INT64 = -0x8000000000000000n;
const AsyncFunction = Object.getPrototypeOf(async function () {}).constructor;
export class SQLiteError extends Error {
constructor(message, code) {
super(message);
this.code = code;
}
}
const async = true;
/**
* Builds a Javascript API from the Emscripten module. This API is still
* low-level and closely corresponds to the C API exported by the module,
* but differs in some specifics like throwing exceptions on errors.
* @param {*} Module SQLite Emscripten module
* @returns {SQLiteAPI}
*/
export function Factory(Module) {
/** @type {SQLiteAPI} */ const sqlite3 = {};
Module.retryOps = [];
const sqliteFreeAddress = Module._getSqliteFree();
// Allocate some space for 32-bit returned values.
const tmp = Module._malloc(8);
const tmpPtr = [tmp, tmp + 4];
// Convert a JS string to a C string. sqlite3_malloc is used to allocate
// memory (use sqlite3_free to deallocate).
function createUTF8(s) {
if (typeof s !== "string") return 0;
const utf8 = new TextEncoder().encode(s);
const zts = Module._sqlite3_malloc(utf8.byteLength + 1);
Module.HEAPU8.set(utf8, zts);
Module.HEAPU8[zts + utf8.byteLength] = 0;
return zts;
}
/**
* Concatenate 32-bit numbers into a 64-bit (signed) BigInt.
* @param {number} lo32
* @param {number} hi32
* @returns {bigint}
*/
function cvt32x2ToBigInt(lo32, hi32) {
return (BigInt(hi32) << 32n) | (BigInt(lo32) & 0xffffffffn);
}
/**
* Concatenate 32-bit numbers and return as number or BigInt, depending
* on the value.
* @param {number} lo32
* @param {number} hi32
* @returns {number|bigint}
*/
const cvt32x2AsSafe = (function () {
const hiMax = BigInt(Number.MAX_SAFE_INTEGER) >> 32n;
const hiMin = BigInt(Number.MIN_SAFE_INTEGER) >> 32n;
return function (lo32, hi32) {
if (hi32 > hiMax || hi32 < hiMin) {
// Can't be expressed as a Number so use BigInt.
return cvt32x2ToBigInt(lo32, hi32);
} else {
// Combine the upper and lower 32-bit numbers. The complication is
// that lo32 is a signed integer which makes manipulating its bits
// a little tricky - the sign bit gets handled separately.
return hi32 * 0x100000000 + (lo32 & 0x7fffffff) - (lo32 & 0x80000000);
}
};
})();
const databases = new Set();
function verifyDatabase(db) {
if (!databases.has(db)) {
throw new SQLiteError("not a database", SQLite.SQLITE_MISUSE);
}
}
const mapStmtToDB = new Map();
function verifyStatement(stmt) {
if (!mapStmtToDB.has(stmt)) {
throw new SQLiteError("not a statement", SQLite.SQLITE_MISUSE);
}
}
sqlite3.bind_collection = function (stmt, bindings) {
verifyStatement(stmt);
const isArray = Array.isArray(bindings);
const nBindings = sqlite3.bind_parameter_count(stmt);
for (let i = 1; i <= nBindings; ++i) {
const key = isArray ? i - 1 : sqlite3.bind_parameter_name(stmt, i);
const value = bindings[key];
if (value !== undefined) {
sqlite3.bind(stmt, i, value);
}
}
return SQLite.SQLITE_OK;
};
sqlite3.bind = function (stmt, i, value) {
verifyStatement(stmt);
switch (typeof value) {
case "number":
if (value === (value | 0)) {
return sqlite3.bind_int(stmt, i, value);
} else {
return sqlite3.bind_double(stmt, i, value);
}
case "string":
return sqlite3.bind_text(stmt, i, value);
default:
if (value instanceof Uint8Array || Array.isArray(value)) {
return sqlite3.bind_blob(stmt, i, value);
} else if (value === null) {
return sqlite3.bind_null(stmt, i);
} else if (typeof value === "bigint") {
return sqlite3.bind_int64(stmt, i, value);
} else if (value === undefined) {
// Existing binding (or NULL) will be used.
return SQLite.SQLITE_NOTICE;
} else {
console.warn("unknown binding converted to null", value);
return sqlite3.bind_null(stmt, i);
}
}
};
sqlite3.bind_blob = (function () {
const fname = "sqlite3_bind_blob";
const f = Module.cwrap(fname, ...decl("nnnnn:n"));
return function (stmt, i, value) {
verifyStatement(stmt);
// @ts-ignore
const byteLength = value.byteLength ?? value.length;
const ptr = Module._sqlite3_malloc(byteLength);
Module.HEAPU8.subarray(ptr).set(value);
const result = f(stmt, i, ptr, byteLength, sqliteFreeAddress);
return check(fname, result, mapStmtToDB.get(stmt));
};
})();
sqlite3.bind_parameter_count = (function () {
const fname = "sqlite3_bind_parameter_count";
const f = Module.cwrap(fname, ...decl("n:n"));
return function (stmt) {
verifyStatement(stmt);
const result = f(stmt);
return result;
};
})();
sqlite3.bind_double = (function () {
const fname = "sqlite3_bind_double";
const f = Module.cwrap(fname, ...decl("nnn:n"));
return function (stmt, i, value) {
verifyStatement(stmt);
const result = f(stmt, i, value);
return check(fname, result, mapStmtToDB.get(stmt));
};
})();
sqlite3.bind_int = (function () {
const fname = "sqlite3_bind_int";
const f = Module.cwrap(fname, ...decl("nnn:n"));
return function (stmt, i, value) {
verifyStatement(stmt);
if (value > 0x7fffffff || value < -0x80000000) return SQLite.SQLITE_RANGE;
const result = f(stmt, i, value);
return check(fname, result, mapStmtToDB.get(stmt));
};
})();
sqlite3.bind_int64 = (function () {
const fname = "sqlite3_bind_int64";
const f = Module.cwrap(fname, ...decl("nnnn:n"));
return function (stmt, i, value) {
verifyStatement(stmt);
if (value > MAX_INT64 || value < MIN_INT64) return SQLite.SQLITE_RANGE;
const lo32 = value & 0xffffffffn;
const hi32 = value >> 32n;
const result = f(stmt, i, Number(lo32), Number(hi32));
return check(fname, result, mapStmtToDB.get(stmt));
};
})();
sqlite3.bind_null = (function () {
const fname = "sqlite3_bind_null";
const f = Module.cwrap(fname, ...decl("nn:n"));
return function (stmt, i) {
verifyStatement(stmt);
const result = f(stmt, i);
return check(fname, result, mapStmtToDB.get(stmt));
};
})();
sqlite3.bind_parameter_name = (function () {
const fname = "sqlite3_bind_parameter_name";
const f = Module.cwrap(fname, ...decl("n:s"));
return function (stmt, i) {
verifyStatement(stmt);
const result = f(stmt, i);
return result;
};
})();
sqlite3.bind_text = (function () {
const fname = "sqlite3_bind_text";
const f = Module.cwrap(fname, ...decl("nnnnn:n"));
return function (stmt, i, value) {
verifyStatement(stmt);
const ptr = createUTF8(value);
const result = f(stmt, i, ptr, -1, sqliteFreeAddress);
return check(fname, result, mapStmtToDB.get(stmt));
};
})();
sqlite3.changes = (function () {
const fname = "sqlite3_changes";
const f = Module.cwrap(fname, ...decl("n:n"));
return function (db) {
verifyDatabase(db);
const result = f(db);
return result;
};
})();
sqlite3.clear_bindings = (function () {
const fname = "sqlite3_clear_bindings";
const f = Module.cwrap(fname, ...decl("n:n"));
return function (stmt) {
verifyStatement(stmt);
const result = f(stmt);
return check(fname, result, mapStmtToDB.get(stmt));
};
})();
sqlite3.close = (function () {
const fname = "sqlite3_close";
const f = Module.cwrap(fname, ...decl("n:n"), { async });
return async function (db) {
verifyDatabase(db);
const result = await f(db);
databases.delete(db);
return check(fname, result, db);
};
})();
sqlite3.column = function (stmt, iCol) {
verifyStatement(stmt);
const type = sqlite3.column_type(stmt, iCol);
switch (type) {
case SQLite.SQLITE_BLOB:
return sqlite3.column_blob(stmt, iCol);
case SQLite.SQLITE_FLOAT:
return sqlite3.column_double(stmt, iCol);
case SQLite.SQLITE_INTEGER:
const lo32 = sqlite3.column_int(stmt, iCol);
const hi32 = Module.getTempRet0();
return cvt32x2AsSafe(lo32, hi32);
case SQLite.SQLITE_NULL:
return null;
case SQLite.SQLITE_TEXT:
return sqlite3.column_text(stmt, iCol);
default:
throw new SQLiteError("unknown type", type);
}
};
sqlite3.column_blob = (function () {
const fname = "sqlite3_column_blob";
const f = Module.cwrap(fname, ...decl("nn:n"));
return function (stmt, iCol) {
verifyStatement(stmt);
const nBytes = sqlite3.column_bytes(stmt, iCol);
const address = f(stmt, iCol);
const result = Module.HEAPU8.subarray(address, address + nBytes);
return result;
};
})();
sqlite3.column_bytes = (function () {
const fname = "sqlite3_column_bytes";
const f = Module.cwrap(fname, ...decl("nn:n"));
return function (stmt, iCol) {
verifyStatement(stmt);
const result = f(stmt, iCol);
return result;
};
})();
sqlite3.column_count = (function () {
const fname = "sqlite3_column_count";
const f = Module.cwrap(fname, ...decl("n:n"));
return function (stmt) {
verifyStatement(stmt);
const result = f(stmt);
return result;
};
})();
sqlite3.column_double = (function () {
const fname = "sqlite3_column_double";
const f = Module.cwrap(fname, ...decl("nn:n"));
return function (stmt, iCol) {
verifyStatement(stmt);
const result = f(stmt, iCol);
return result;
};
})();
sqlite3.column_int = (function () {
// Retrieve int64 but use only the lower 32 bits. The upper 32-bits are
// accessible with Module.getTempRet0().
const fname = "sqlite3_column_int64";
const f = Module.cwrap(fname, ...decl("nn:n"));
return function (stmt, iCol) {
verifyStatement(stmt);
const result = f(stmt, iCol);
return result;
};
})();
sqlite3.column_int64 = (function () {
const fname = "sqlite3_column_int64";
const f = Module.cwrap(fname, ...decl("nn:n"));
return function (stmt, iCol) {
verifyStatement(stmt);
const lo32 = f(stmt, iCol);
const hi32 = Module.getTempRet0();
const result = cvt32x2ToBigInt(lo32, hi32);
return result;
};
})();
sqlite3.column_name = (function () {
const fname = "sqlite3_column_name";
const f = Module.cwrap(fname, ...decl("nn:s"));
return function (stmt, iCol) {
verifyStatement(stmt);
const result = f(stmt, iCol);
return result;
};
})();
sqlite3.column_names = function (stmt) {
const columns = [];
const nColumns = sqlite3.column_count(stmt);
for (let i = 0; i < nColumns; ++i) {
columns.push(sqlite3.column_name(stmt, i));
}
return columns;
};
sqlite3.column_text = (function () {
const fname = "sqlite3_column_text";
const f = Module.cwrap(fname, ...decl("nn:s"));
return function (stmt, iCol) {
verifyStatement(stmt);
const result = f(stmt, iCol);
return result;
};
})();
sqlite3.column_type = (function () {
const fname = "sqlite3_column_type";
const f = Module.cwrap(fname, ...decl("nn:n"));
return function (stmt, iCol) {
verifyStatement(stmt);
const result = f(stmt, iCol);
return result;
};
})();
sqlite3.create_function = function (db, zFunctionName, nArg, eTextRep, pApp, xFunc, xStep, xFinal) {
verifyDatabase(db);
// Convert SQLite callback arguments to JavaScript-friendly arguments.
function adapt(f) {
return f instanceof AsyncFunction
? async (ctx, n, values) => f(ctx, Module.HEAP32.subarray(values / 4, values / 4 + n))
: (ctx, n, values) => f(ctx, Module.HEAP32.subarray(values / 4, values / 4 + n));
}
const result = Module.create_function(
db,
zFunctionName,
nArg,
eTextRep,
pApp,
xFunc && adapt(xFunc),
xStep && adapt(xStep),
xFinal
);
return check("sqlite3_create_function", result, db);
};
sqlite3.data_count = (function () {
const fname = "sqlite3_data_count";
const f = Module.cwrap(fname, ...decl("n:n"));
return function (stmt) {
verifyStatement(stmt);
const result = f(stmt);
return result;
};
})();
sqlite3.exec = async function (db, sql, callback) {
for await (const stmt of sqlite3.statements(db, sql)) {
let columns;
while ((await sqlite3.step(stmt)) === SQLite.SQLITE_ROW) {
if (callback) {
columns = columns ?? sqlite3.column_names(stmt);
const row = sqlite3.row(stmt);
await callback(row, columns);
}
}
}
return SQLite.SQLITE_OK;
};
sqlite3.finalize = (function () {
const fname = "sqlite3_finalize";
const f = Module.cwrap(fname, ...decl("n:n"), { async });
return async function (stmt) {
const result = await f(stmt);
mapStmtToDB.delete(stmt);
// Don't throw on error here. Typically the error has already been
// thrown and finalize() is part of the cleanup.
return result;
};
})();
sqlite3.get_autocommit = (function () {
const fname = "sqlite3_get_autocommit";
const f = Module.cwrap(fname, ...decl("n:n"));
return function (db) {
const result = f(db);
return result;
};
})();
sqlite3.libversion = (function () {
const fname = "sqlite3_libversion";
const f = Module.cwrap(fname, ...decl(":s"));
return function () {
const result = f();
return result;
};
})();
sqlite3.libversion_number = (function () {
const fname = "sqlite3_libversion_number";
const f = Module.cwrap(fname, ...decl(":n"));
return function () {
const result = f();
return result;
};
})();
sqlite3.limit = (function () {
const fname = "sqlite3_limit";
const f = Module.cwrap(fname, ...decl("nnn:n"));
return function (db, id, newVal) {
const result = f(db, id, newVal);
return result;
};
})();
sqlite3.open_v2 = (function () {
const fname = "sqlite3_open_v2";
const f = Module.cwrap(fname, ...decl("snnn:n"), { async });
return async function (zFilename, flags, zVfs) {
flags = flags || SQLite.SQLITE_OPEN_CREATE | SQLite.SQLITE_OPEN_READWRITE;
zVfs = createUTF8(zVfs);
try {
// Allow retry operations.
const rc = await retry(() => f(zFilename, tmpPtr[0], flags, zVfs));
const db = Module.getValue(tmpPtr[0], "*");
databases.add(db);
Module.ccall("RegisterExtensionFunctions", "void", ["number"], [db]);
check(fname, rc);
return db;
} finally {
Module._sqlite3_free(zVfs);
}
};
})();
sqlite3.progress_handler = function (db, nProgressOps, handler, userData) {
verifyDatabase(db);
Module.progress_handler(db, nProgressOps, handler, userData);
};
sqlite3.reset = (function () {
const fname = "sqlite3_reset";
const f = Module.cwrap(fname, ...decl("n:n"), { async });
return async function (stmt) {
verifyStatement(stmt);
const result = await f(stmt);
return check(fname, result, mapStmtToDB.get(stmt));
};
})();
sqlite3.result = function (context, value) {
switch (typeof value) {
case "number":
if (value === (value | 0)) {
sqlite3.result_int(context, value);
} else {
sqlite3.result_double(context, value);
}
break;
case "string":
sqlite3.result_text(context, value);
break;
default:
if (value instanceof Uint8Array || Array.isArray(value)) {
sqlite3.result_blob(context, value);
} else if (value === null) {
sqlite3.result_null(context);
} else if (typeof value === "bigint") {
return sqlite3.result_int64(context, value);
} else {
console.warn("unknown result converted to null", value);
sqlite3.result_null(context);
}
break;
}
};
sqlite3.result_blob = (function () {
const fname = "sqlite3_result_blob";
const f = Module.cwrap(fname, ...decl("nnnn:n"));
return function (context, value) {
// @ts-ignore
const byteLength = value.byteLength ?? value.length;
const ptr = Module._sqlite3_malloc(byteLength);
Module.HEAPU8.subarray(ptr).set(value);
f(context, ptr, byteLength, sqliteFreeAddress); // void return
};
})();
sqlite3.result_double = (function () {
const fname = "sqlite3_result_double";
const f = Module.cwrap(fname, ...decl("nn:n"));
return function (context, value) {
f(context, value); // void return
};
})();
sqlite3.result_int = (function () {
const fname = "sqlite3_result_int";
const f = Module.cwrap(fname, ...decl("nn:n"));
return function (context, value) {
f(context, value); // void return
};
})();
sqlite3.result_int64 = (function () {
const fname = "sqlite3_result_int64";
const f = Module.cwrap(fname, ...decl("nnn:n"));
return function (context, value) {
if (value > MAX_INT64 || value < MIN_INT64) return SQLite.SQLITE_RANGE;
const lo32 = value & 0xffffffffn;
const hi32 = value >> 32n;
f(context, Number(lo32), Number(hi32)); // void return
};
})();
sqlite3.result_null = (function () {
const fname = "sqlite3_result_null";
const f = Module.cwrap(fname, ...decl("n:n"));
return function (context) {
f(context); // void return
};
})();
sqlite3.result_text = (function () {
const fname = "sqlite3_result_text";
const f = Module.cwrap(fname, ...decl("nnnn:n"));
return function (context, value) {
const ptr = createUTF8(value);
f(context, ptr, -1, sqliteFreeAddress); // void return
};
})();
sqlite3.row = function (stmt) {
const row = [];
const nColumns = sqlite3.data_count(stmt);
for (let i = 0; i < nColumns; ++i) {
const value = sqlite3.column(stmt, i);
// Copy blob if aliasing volatile WebAssembly memory. This avoids an
// unnecessary copy if users monkey patch column_blob to copy.
// @ts-ignore
row.push(value?.buffer === Module.HEAPU8.buffer ? value.slice() : value);
}
return row;
};
sqlite3.set_authorizer = function (db, xAuth, pApp) {
verifyDatabase(db);
// Convert SQLite callback arguments to JavaScript-friendly arguments.
function cvtArgs(_, iAction, p3, p4, p5, p6) {
return [
_,
iAction,
Module.UTF8ToString(p3),
Module.UTF8ToString(p4),
Module.UTF8ToString(p5),
Module.UTF8ToString(p6),
];
}
function adapt(f) {
return f instanceof AsyncFunction
? async (_, iAction, p3, p4, p5, p6) => f(...cvtArgs(_, iAction, p3, p4, p5, p6))
: (_, iAction, p3, p4, p5, p6) => f(...cvtArgs(_, iAction, p3, p4, p5, p6));
}
const result = Module.set_authorizer(db, adapt(xAuth), pApp);
return check("sqlite3_set_authorizer", result, db);
};
sqlite3.sql = (function () {
const fname = "sqlite3_sql";
const f = Module.cwrap(fname, ...decl("n:s"));
return function (stmt) {
verifyStatement(stmt);
const result = f(stmt);
return result;
};
})();
sqlite3.statements = function (db, sql, options = {}) {
const prepare = Module.cwrap(
"sqlite3_prepare_v3",
"number",
["number", "number", "number", "number", "number", "number"],
{ async: true }
);
return (async function* () {
const onFinally = [];
try {
// Encode SQL string to UTF-8.
const utf8 = new TextEncoder().encode(sql);
// Copy encoded string to WebAssembly memory. The SQLite docs say
// zero-termination is a minor optimization so add room for that.
// Also add space for the statement handle and SQL tail pointer.
const allocSize = utf8.byteLength - (utf8.byteLength % 4) + 12;
const pzHead = Module._sqlite3_malloc(allocSize);
const pzEnd = pzHead + utf8.byteLength + 1;
onFinally.push(() => Module._sqlite3_free(pzHead));
Module.HEAPU8.set(utf8, pzHead);
Module.HEAPU8[pzEnd - 1] = 0;
// Use extra space for the statement handle and SQL tail pointer.
const pStmt = pzHead + allocSize - 8;
const pzTail = pzHead + allocSize - 4;
// Ensure that statement handles are not leaked.
let stmt;
function maybeFinalize() {
if (stmt && !options.unscoped) {
sqlite3.finalize(stmt);
}
stmt = 0;
}
onFinally.push(maybeFinalize);
// Loop over statements.
Module.setValue(pzTail, pzHead, "*");
do {
// Reclaim resources for the previous iteration.
maybeFinalize();
// Call sqlite3_prepare_v3() for the next statement.
// Allow retry operations.
const zTail = Module.getValue(pzTail, "*");
const rc = await retry(() => {
return prepare(db, zTail, pzEnd - pzTail, options.flags || 0, pStmt, pzTail);
});
if (rc !== SQLite.SQLITE_OK) {
check("sqlite3_prepare_v3", rc, db);
}
stmt = Module.getValue(pStmt, "*");
if (stmt) {
mapStmtToDB.set(stmt, db);
yield stmt;
}
} while (stmt);
} finally {
while (onFinally.length) {
onFinally.pop()();
}
}
})();
};
sqlite3.step = (function () {
const fname = "sqlite3_step";
const f = Module.cwrap(fname, ...decl("n:n"), { async });
return async function (stmt) {
verifyStatement(stmt);
// Allow retry operations.
const rc = await retry(() => f(stmt));
return check(fname, rc, mapStmtToDB.get(stmt), [SQLite.SQLITE_ROW, SQLite.SQLITE_DONE]);
};
})();
sqlite3.update_hook = function (db, xUpdateHook) {
verifyDatabase(db);
// Convert SQLite callback arguments to JavaScript-friendly arguments.
function cvtArgs(iUpdateType, dbName, tblName, lo32, hi32) {
return [iUpdateType, Module.UTF8ToString(dbName), Module.UTF8ToString(tblName), cvt32x2ToBigInt(lo32, hi32)];
}
function adapt(f) {
return f instanceof AsyncFunction
? async (iUpdateType, dbName, tblName, lo32, hi32) => f(...cvtArgs(iUpdateType, dbName, tblName, lo32, hi32))
: (iUpdateType, dbName, tblName, lo32, hi32) => f(...cvtArgs(iUpdateType, dbName, tblName, lo32, hi32));
}
Module.update_hook(db, adapt(xUpdateHook));
};
sqlite3.value = function (pValue) {
const type = sqlite3.value_type(pValue);
switch (type) {
case SQLite.SQLITE_BLOB:
return sqlite3.value_blob(pValue);
case SQLite.SQLITE_FLOAT:
return sqlite3.value_double(pValue);
case SQLite.SQLITE_INTEGER:
const lo32 = sqlite3.value_int(pValue);
const hi32 = Module.getTempRet0();
return cvt32x2AsSafe(lo32, hi32);
case SQLite.SQLITE_NULL:
return null;
case SQLite.SQLITE_TEXT:
return sqlite3.value_text(pValue);
default:
throw new SQLiteError("unknown type", type);
}
};
sqlite3.value_blob = (function () {
const fname = "sqlite3_value_blob";
const f = Module.cwrap(fname, ...decl("n:n"));
return function (pValue) {
const nBytes = sqlite3.value_bytes(pValue);
const address = f(pValue);
const result = Module.HEAPU8.subarray(address, address + nBytes);
return result;
};
})();
sqlite3.value_bytes = (function () {
const fname = "sqlite3_value_bytes";
const f = Module.cwrap(fname, ...decl("n:n"));
return function (pValue) {
const result = f(pValue);
return result;
};
})();
sqlite3.value_double = (function () {
const fname = "sqlite3_value_double";
const f = Module.cwrap(fname, ...decl("n:n"));
return function (pValue) {
const result = f(pValue);
return result;
};
})();
sqlite3.value_int = (function () {
const fname = "sqlite3_value_int64";
const f = Module.cwrap(fname, ...decl("n:n"));
return function (pValue) {
const result = f(pValue);
return result;
};
})();
sqlite3.value_int64 = (function () {
const fname = "sqlite3_value_int64";
const f = Module.cwrap(fname, ...decl("n:n"));
return function (pValue) {
const lo32 = f(pValue);
const hi32 = Module.getTempRet0();
const result = cvt32x2ToBigInt(lo32, hi32);
return result;
};
})();
sqlite3.value_text = (function () {
const fname = "sqlite3_value_text";
const f = Module.cwrap(fname, ...decl("n:s"));
return function (pValue) {
const result = f(pValue);
return result;
};
})();
sqlite3.value_type = (function () {
const fname = "sqlite3_value_type";
const f = Module.cwrap(fname, ...decl("n:n"));
return function (pValue) {
const result = f(pValue);
return result;
};
})();
sqlite3.vfs_register = function (vfs, makeDefault) {
const result = Module.vfs_register(vfs, makeDefault);
return check("sqlite3_vfs_register", result);
};
function check(fname, result, db = null, allowed = [SQLite.SQLITE_OK]) {
if (allowed.includes(result)) return result;
const message = db ? Module.ccall("sqlite3_errmsg", "string", ["number"], [db]) : fname;
throw new SQLiteError(message, result);
}
// This function is used to automatically retry failed calls that
// have pending retry operations that should allow the retry to
// succeed.
async function retry(f) {
let rc;
do {
// Wait for all pending retry operations to complete. This is
// normally empty on the first loop iteration.
if (Module.retryOps.length) {
await Promise.all(Module.retryOps);
Module.retryOps = [];
}
rc = await f();
// Retry on failure with new pending retry operations.
} while (rc && Module.retryOps.length);
return rc;
}
return sqlite3;
}
// Helper function to use a more compact signature specification.
function decl(s) {
const result = [];
const m = s.match(/([ns@]*):([nsv@])/);
switch (m[2]) {
case "n":
result.push("number");
break;
case "s":
result.push("string");
break;
case "v":
result.push(null);
break;
}
const args = [];
for (let c of m[1]) {
switch (c) {
case "n":
args.push("number");
break;
case "s":
args.push("string");
break;
}
}
result.push(args);
return result;
}

View File

@@ -1,275 +0,0 @@
// Primary result codes.
// https://www.sqlite.org/rescode.html
export const SQLITE_OK = 0;
export const SQLITE_ERROR = 1;
export const SQLITE_INTERNAL = 2;
export const SQLITE_PERM = 3;
export const SQLITE_ABORT = 4;
export const SQLITE_BUSY = 5;
export const SQLITE_LOCKED = 6;
export const SQLITE_NOMEM = 7;
export const SQLITE_READONLY = 8;
export const SQLITE_INTERRUPT = 9;
export const SQLITE_IOERR = 10;
export const SQLITE_CORRUPT = 11;
export const SQLITE_NOTFOUND = 12;
export const SQLITE_FULL = 13;
export const SQLITE_CANTOPEN = 14;
export const SQLITE_PROTOCOL = 15;
export const SQLITE_EMPTY = 16;
export const SQLITE_SCHEMA = 17;
export const SQLITE_TOOBIG = 18;
export const SQLITE_CONSTRAINT = 19;
export const SQLITE_MISMATCH = 20;
export const SQLITE_MISUSE = 21;
export const SQLITE_NOLFS = 22;
export const SQLITE_AUTH = 23;
export const SQLITE_FORMAT = 24;
export const SQLITE_RANGE = 25;
export const SQLITE_NOTADB = 26;
export const SQLITE_NOTICE = 27;
export const SQLITE_WARNING = 28;
export const SQLITE_ROW = 100;
export const SQLITE_DONE = 101;
// Extended error codes.
export const SQLITE_IOERR_ACCESS = 3338;
export const SQLITE_IOERR_CHECKRESERVEDLOCK = 3594;
export const SQLITE_IOERR_CLOSE = 4106;
export const SQLITE_IOERR_DATA = 8202;
export const SQLITE_IOERR_DELETE = 2570;
export const SQLITE_IOERR_DELETE_NOENT = 5898;
export const SQLITE_IOERR_DIR_FSYNC = 1290;
export const SQLITE_IOERR_FSTAT = 1802;
export const SQLITE_IOERR_FSYNC = 1034;
export const SQLITE_IOERR_GETTEMPPATH = 6410;
export const SQLITE_IOERR_LOCK = 3850;
export const SQLITE_IOERR_NOMEM = 3082;
export const SQLITE_IOERR_READ = 266;
export const SQLITE_IOERR_RDLOCK = 2314;
export const SQLITE_IOERR_SEEK = 5642;
export const SQLITE_IOERR_SHORT_READ = 522;
export const SQLITE_IOERR_TRUNCATE = 1546;
export const SQLITE_IOERR_UNLOCK = 2058;
export const SQLITE_IOERR_VNODE = 6922;
export const SQLITE_IOERR_WRITE = 778;
export const SQLITE_IOERR_BEGIN_ATOMIC = 7434;
export const SQLITE_IOERR_COMMIT_ATOMIC = 7690;
export const SQLITE_IOERR_ROLLBACK_ATOMIC = 7946;
// Other extended result codes.
export const SQLITE_CONSTRAINT_CHECK = 275;
export const SQLITE_CONSTRAINT_COMMITHOOK = 531;
export const SQLITE_CONSTRAINT_FOREIGNKEY = 787;
export const SQLITE_CONSTRAINT_FUNCTION = 1043;
export const SQLITE_CONSTRAINT_NOTNULL = 1299;
export const SQLITE_CONSTRAINT_PINNED = 2835;
export const SQLITE_CONSTRAINT_PRIMARYKEY = 1555;
export const SQLITE_CONSTRAINT_ROWID = 2579;
export const SQLITE_CONSTRAINT_TRIGGER = 1811;
export const SQLITE_CONSTRAINT_UNIQUE = 2067;
export const SQLITE_CONSTRAINT_VTAB = 2323;
// Open flags.
// https://www.sqlite.org/c3ref/c_open_autoproxy.html
export const SQLITE_OPEN_READONLY = 0x00000001;
export const SQLITE_OPEN_READWRITE = 0x00000002;
export const SQLITE_OPEN_CREATE = 0x00000004;
export const SQLITE_OPEN_DELETEONCLOSE = 0x00000008;
export const SQLITE_OPEN_EXCLUSIVE = 0x00000010;
export const SQLITE_OPEN_AUTOPROXY = 0x00000020;
export const SQLITE_OPEN_URI = 0x00000040;
export const SQLITE_OPEN_MEMORY = 0x00000080;
export const SQLITE_OPEN_MAIN_DB = 0x00000100;
export const SQLITE_OPEN_TEMP_DB = 0x00000200;
export const SQLITE_OPEN_TRANSIENT_DB = 0x00000400;
export const SQLITE_OPEN_MAIN_JOURNAL = 0x00000800;
export const SQLITE_OPEN_TEMP_JOURNAL = 0x00001000;
export const SQLITE_OPEN_SUBJOURNAL = 0x00002000;
export const SQLITE_OPEN_SUPER_JOURNAL = 0x00004000;
export const SQLITE_OPEN_NOMUTEX = 0x00008000;
export const SQLITE_OPEN_FULLMUTEX = 0x00010000;
export const SQLITE_OPEN_SHAREDCACHE = 0x00020000;
export const SQLITE_OPEN_PRIVATECACHE = 0x00040000;
export const SQLITE_OPEN_WAL = 0x00080000;
export const SQLITE_OPEN_NOFOLLOW = 0x01000000;
// Locking levels.
// https://www.sqlite.org/c3ref/c_lock_exclusive.html
export const SQLITE_LOCK_NONE = 0;
export const SQLITE_LOCK_SHARED = 1;
export const SQLITE_LOCK_RESERVED = 2;
export const SQLITE_LOCK_PENDING = 3;
export const SQLITE_LOCK_EXCLUSIVE = 4;
// Device characteristics.
// https://www.sqlite.org/c3ref/c_iocap_atomic.html
export const SQLITE_IOCAP_ATOMIC = 0x00000001;
export const SQLITE_IOCAP_ATOMIC512 = 0x00000002;
export const SQLITE_IOCAP_ATOMIC1K = 0x00000004;
export const SQLITE_IOCAP_ATOMIC2K = 0x00000008;
export const SQLITE_IOCAP_ATOMIC4K = 0x00000010;
export const SQLITE_IOCAP_ATOMIC8K = 0x00000020;
export const SQLITE_IOCAP_ATOMIC16K = 0x00000040;
export const SQLITE_IOCAP_ATOMIC32K = 0x00000080;
export const SQLITE_IOCAP_ATOMIC64K = 0x00000100;
export const SQLITE_IOCAP_SAFE_APPEND = 0x00000200;
export const SQLITE_IOCAP_SEQUENTIAL = 0x00000400;
export const SQLITE_IOCAP_UNDELETABLE_WHEN_OPEN = 0x00000800;
export const SQLITE_IOCAP_POWERSAFE_OVERWRITE = 0x00001000;
export const SQLITE_IOCAP_IMMUTABLE = 0x00002000;
export const SQLITE_IOCAP_BATCH_ATOMIC = 0x00004000;
// xAccess flags.
// https://www.sqlite.org/c3ref/c_access_exists.html
export const SQLITE_ACCESS_EXISTS = 0;
export const SQLITE_ACCESS_READWRITE = 1;
export const SQLITE_ACCESS_READ = 2;
// File control opcodes
// https://www.sqlite.org/c3ref/c_fcntl_begin_atomic_write.html#sqlitefcntlbeginatomicwrite
export const SQLITE_FCNTL_LOCKSTATE = 1;
export const SQLITE_FCNTL_GET_LOCKPROXYFILE = 2;
export const SQLITE_FCNTL_SET_LOCKPROXYFILE = 3;
export const SQLITE_FCNTL_LAST_ERRNO = 4;
export const SQLITE_FCNTL_SIZE_HINT = 5;
export const SQLITE_FCNTL_CHUNK_SIZE = 6;
export const SQLITE_FCNTL_FILE_POINTER = 7;
export const SQLITE_FCNTL_SYNC_OMITTED = 8;
export const SQLITE_FCNTL_WIN32_AV_RETRY = 9;
export const SQLITE_FCNTL_PERSIST_WAL = 10;
export const SQLITE_FCNTL_OVERWRITE = 11;
export const SQLITE_FCNTL_VFSNAME = 12;
export const SQLITE_FCNTL_POWERSAFE_OVERWRITE = 13;
export const SQLITE_FCNTL_PRAGMA = 14;
export const SQLITE_FCNTL_BUSYHANDLER = 15;
export const SQLITE_FCNTL_TEMPFILENAME = 16;
export const SQLITE_FCNTL_MMAP_SIZE = 18;
export const SQLITE_FCNTL_TRACE = 19;
export const SQLITE_FCNTL_HAS_MOVED = 20;
export const SQLITE_FCNTL_SYNC = 21;
export const SQLITE_FCNTL_COMMIT_PHASETWO = 22;
export const SQLITE_FCNTL_WIN32_SET_HANDLE = 23;
export const SQLITE_FCNTL_WAL_BLOCK = 24;
export const SQLITE_FCNTL_ZIPVFS = 25;
export const SQLITE_FCNTL_RBU = 26;
export const SQLITE_FCNTL_VFS_POINTER = 27;
export const SQLITE_FCNTL_JOURNAL_POINTER = 28;
export const SQLITE_FCNTL_WIN32_GET_HANDLE = 29;
export const SQLITE_FCNTL_PDB = 30;
export const SQLITE_FCNTL_BEGIN_ATOMIC_WRITE = 31;
export const SQLITE_FCNTL_COMMIT_ATOMIC_WRITE = 32;
export const SQLITE_FCNTL_ROLLBACK_ATOMIC_WRITE = 33;
export const SQLITE_FCNTL_LOCK_TIMEOUT = 34;
export const SQLITE_FCNTL_DATA_VERSION = 35;
export const SQLITE_FCNTL_SIZE_LIMIT = 36;
export const SQLITE_FCNTL_CKPT_DONE = 37;
export const SQLITE_FCNTL_RESERVE_BYTES = 38;
export const SQLITE_FCNTL_CKPT_START = 39;
// Fundamental datatypes.
// https://www.sqlite.org/c3ref/c_blob.html
export const SQLITE_INTEGER = 1;
export const SQLITE_FLOAT = 2;
export const SQLITE_TEXT = 3;
export const SQLITE_BLOB = 4;
export const SQLITE_NULL = 5;
// Special destructor behavior.
// https://www.sqlite.org/c3ref/c_static.html
export const SQLITE_STATIC = 0;
export const SQLITE_TRANSIENT = -1;
// Text encodings.
// https://sqlite.org/c3ref/c_any.html
export const SQLITE_UTF8 = 1; /* IMP: R-37514-35566 */
export const SQLITE_UTF16LE = 2; /* IMP: R-03371-37637 */
export const SQLITE_UTF16BE = 3; /* IMP: R-51971-34154 */
export const SQLITE_UTF16 = 4; /* Use native byte order */
// Module constraint ops.
export const SQLITE_INDEX_CONSTRAINT_EQ = 2;
export const SQLITE_INDEX_CONSTRAINT_GT = 4;
export const SQLITE_INDEX_CONSTRAINT_LE = 8;
export const SQLITE_INDEX_CONSTRAINT_LT = 16;
export const SQLITE_INDEX_CONSTRAINT_GE = 32;
export const SQLITE_INDEX_CONSTRAINT_MATCH = 64;
export const SQLITE_INDEX_CONSTRAINT_LIKE = 65;
export const SQLITE_INDEX_CONSTRAINT_GLOB = 66;
export const SQLITE_INDEX_CONSTRAINT_REGEXP = 67;
export const SQLITE_INDEX_CONSTRAINT_NE = 68;
export const SQLITE_INDEX_CONSTRAINT_ISNOT = 69;
export const SQLITE_INDEX_CONSTRAINT_ISNOTNULL = 70;
export const SQLITE_INDEX_CONSTRAINT_ISNULL = 71;
export const SQLITE_INDEX_CONSTRAINT_IS = 72;
export const SQLITE_INDEX_CONSTRAINT_FUNCTION = 150;
export const SQLITE_INDEX_SCAN_UNIQUE = 1; /* Scan visits at most = 1 row */
// Function flags
export const SQLITE_DETERMINISTIC = 0x000000800;
export const SQLITE_DIRECTONLY = 0x000080000;
export const SQLITE_SUBTYPE = 0x000100000;
export const SQLITE_INNOCUOUS = 0x000200000;
// Sync flags
export const SQLITE_SYNC_NORMAL = 0x00002;
export const SQLITE_SYNC_FULL = 0x00003;
export const SQLITE_SYNC_DATAONLY = 0x00010;
// Authorizer action codes
export const SQLITE_CREATE_INDEX = 1;
export const SQLITE_CREATE_TABLE = 2;
export const SQLITE_CREATE_TEMP_INDEX = 3;
export const SQLITE_CREATE_TEMP_TABLE = 4;
export const SQLITE_CREATE_TEMP_TRIGGER = 5;
export const SQLITE_CREATE_TEMP_VIEW = 6;
export const SQLITE_CREATE_TRIGGER = 7;
export const SQLITE_CREATE_VIEW = 8;
export const SQLITE_DELETE = 9;
export const SQLITE_DROP_INDEX = 10;
export const SQLITE_DROP_TABLE = 11;
export const SQLITE_DROP_TEMP_INDEX = 12;
export const SQLITE_DROP_TEMP_TABLE = 13;
export const SQLITE_DROP_TEMP_TRIGGER = 14;
export const SQLITE_DROP_TEMP_VIEW = 15;
export const SQLITE_DROP_TRIGGER = 16;
export const SQLITE_DROP_VIEW = 17;
export const SQLITE_INSERT = 18;
export const SQLITE_PRAGMA = 19;
export const SQLITE_READ = 20;
export const SQLITE_SELECT = 21;
export const SQLITE_TRANSACTION = 22;
export const SQLITE_UPDATE = 23;
export const SQLITE_ATTACH = 24;
export const SQLITE_DETACH = 25;
export const SQLITE_ALTER_TABLE = 26;
export const SQLITE_REINDEX = 27;
export const SQLITE_ANALYZE = 28;
export const SQLITE_CREATE_VTABLE = 29;
export const SQLITE_DROP_VTABLE = 30;
export const SQLITE_FUNCTION = 31;
export const SQLITE_SAVEPOINT = 32;
export const SQLITE_COPY = 0;
export const SQLITE_RECURSIVE = 33;
// Authorizer return codes
export const SQLITE_DENY = 1;
export const SQLITE_IGNORE = 2;
// Limit categories
export const SQLITE_LIMIT_LENGTH = 0;
export const SQLITE_LIMIT_SQL_LENGTH = 1;
export const SQLITE_LIMIT_COLUMN = 2;
export const SQLITE_LIMIT_EXPR_DEPTH = 3;
export const SQLITE_LIMIT_COMPOUND_SELECT = 4;
export const SQLITE_LIMIT_VDBE_OP = 5;
export const SQLITE_LIMIT_FUNCTION_ARG = 6;
export const SQLITE_LIMIT_ATTACHED = 7;
export const SQLITE_LIMIT_LIKE_PATTERN_LENGTH = 8;
export const SQLITE_LIMIT_VARIABLE_NUMBER = 9;
export const SQLITE_LIMIT_TRIGGER_DEPTH = 10;
export const SQLITE_LIMIT_WORKER_THREADS = 11;
export const SQLITE_PREPARE_PERSISTENT = 0x01;
export const SQLITE_PREPARE_NORMALIZED = 0x02;
export const SQLITE_PREPARE_NO_VTAB = 0x04;

View File

@@ -1,62 +0,0 @@
/* eslint-disable no-var */
declare namespace Asyncify {
function handleAsync(f: () => Promise<any>);
}
declare function UTF8ToString(ptr: number): string;
declare function lengthBytesUTF8(s: string): number;
declare function stringToUTF8(s: string, p: number, n: number);
declare function ccall(name: string, returns: string, args: Array<any>, options?: object): any;
declare function getValue(ptr: number, type: string): number;
declare function setValue(ptr: number, value: number, type: string): number;
declare function mergeInto(library: object, methods: object): void;
declare var HEAPU8: Uint8Array;
declare var HEAPU32: Uint32Array;
declare var LibraryManager;
declare var Module;
declare var _vfsAccess;
declare var _vfsCheckReservedLock;
declare var _vfsClose;
declare var _vfsDelete;
declare var _vfsDeviceCharacteristics;
declare var _vfsFileControl;
declare var _vfsFileSize;
declare var _vfsLock;
declare var _vfsOpen;
declare var _vfsRead;
declare var _vfsSectorSize;
declare var _vfsSync;
declare var _vfsTruncate;
declare var _vfsUnlock;
declare var _vfsWrite;
declare var _jsFunc;
declare var _jsStep;
declare var _jsFinal;
declare var _modStruct;
declare var _modCreate;
declare var _modConnect;
declare var _modBestIndex;
declare var _modDisconnect;
declare var _modDestroy;
declare var _modOpen;
declare var _modClose;
declare var _modFilter;
declare var _modNext;
declare var _modEof;
declare var _modColumn;
declare var _modRowid;
declare var _modUpdate;
declare var _modBegin;
declare var _modSync;
declare var _modCommit;
declare var _modRollback;
declare var _modFindFunction;
declare var _modRename;
declare var _jsAuth;
declare var _jsProgress;

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@@ -12,7 +12,6 @@ import type {
TIssuesResponse,
TIssueSubIssues,
} from "@plane/types";
import { getIssuesShouldFallbackToServer } from "@plane/utils";
// services
import { APIService } from "@/services/api.service";
@@ -78,12 +77,7 @@ export class IssueService extends APIService {
queries?: Partial<Record<TIssueParams, string | boolean>>,
config = {}
): Promise<TIssuesResponse> {
if (getIssuesShouldFallbackToServer(queries) || this.serviceType !== EIssueServiceType.ISSUES) {
return await this.getIssuesFromServer(workspaceSlug, projectId, queries, config);
}
const { persistence } = await import("@/local-db/storage.sqlite");
const response = await persistence.getIssues(workspaceSlug, projectId, queries, config);
return response as TIssuesResponse;
return this.getIssuesFromServer(workspaceSlug, projectId, queries, config);
}
async getDeletedIssues(workspaceSlug: string, projectId: string, queries?: any): Promise<TIssuesResponse> {
@@ -115,11 +109,6 @@ export class IssueService extends APIService {
params: queries,
})
.then(async (response) => {
// skip issue update when the service type is epic
if (response.data && this.serviceType === EIssueServiceType.ISSUES) {
const { updateIssue } = await import("@/local-db/utils/load-issues");
updateIssue({ ...response.data, is_local_update: 1 });
}
// add is_epic flag when the service type is epic
if (response.data && this.serviceType === EIssueServiceType.EPICS) {
response.data.is_epic = true;
@@ -135,13 +124,7 @@ export class IssueService extends APIService {
return this.get(`/api/workspaces/${workspaceSlug}/projects/${projectId}/${this.serviceType}/list/`, {
params: { issues: issueIds.join(",") },
})
.then(async (response) => {
if (response?.data && Array.isArray(response?.data) && this.serviceType === EIssueServiceType.ISSUES) {
const { addIssuesBulk } = await import("@/local-db/utils/load-issues");
addIssuesBulk(response.data);
}
return response?.data;
})
.then(async (response) => response?.data)
.catch((error) => {
throw error?.response?.data;
});
@@ -243,10 +226,6 @@ export class IssueService extends APIService {
}
async deleteIssue(workspaceSlug: string, projectId: string, issuesId: string): Promise<any> {
if (this.serviceType === EIssueServiceType.ISSUES) {
const { deleteIssueFromLocal } = await import("@/local-db/utils/load-issues");
deleteIssueFromLocal(issuesId);
}
return this.delete(`/api/workspaces/${workspaceSlug}/projects/${projectId}/${this.serviceType}/${issuesId}/`)
.then((response) => response?.data)
.catch((error) => {
@@ -353,13 +332,7 @@ export class IssueService extends APIService {
async bulkOperations(workspaceSlug: string, projectId: string, data: TBulkOperationsPayload): Promise<any> {
return this.post(`/api/workspaces/${workspaceSlug}/projects/${projectId}/bulk-operation-issues/`, data)
.then(async (response) => {
if (this.serviceType === EIssueServiceType.ISSUES) {
const { persistence } = await import("@/local-db/storage.sqlite");
persistence.syncIssues(projectId);
}
return response?.data;
})
.then(async (response) => response?.data)
.catch((error) => {
throw error?.response?.data;
});
@@ -373,13 +346,7 @@ export class IssueService extends APIService {
}
): Promise<any> {
return this.delete(`/api/workspaces/${workspaceSlug}/projects/${projectId}/bulk-delete-issues/`, data)
.then(async (response) => {
if (this.serviceType === EIssueServiceType.ISSUES) {
const { persistence } = await import("@/local-db/storage.sqlite");
persistence.syncIssues(projectId);
}
return response?.data;
})
.then(async (response) => response?.data)
.catch((error) => {
throw error?.response?.data;
});
@@ -395,13 +362,7 @@ export class IssueService extends APIService {
archived_at: string;
}> {
return this.post(`/api/workspaces/${workspaceSlug}/projects/${projectId}/bulk-archive-issues/`, data)
.then(async (response) => {
if (this.serviceType === EIssueServiceType.ISSUES) {
const { persistence } = await import("@/local-db/storage.sqlite");
persistence.syncIssues(projectId);
}
return response?.data;
})
.then(async (response) => response?.data)
.catch((error) => {
throw error?.response?.data;
});
@@ -479,11 +440,6 @@ export class IssueService extends APIService {
params: queries,
})
.then(async (response) => {
// skip issue update when the service type is epic
if (response.data && this.serviceType === EIssueServiceType.ISSUES) {
const { updateIssue } = await import("@/local-db/utils/load-issues");
updateIssue({ ...response.data, is_local_update: 1 });
}
// add is_epic flag when the service type is epic
if (response.data && this.serviceType === EIssueServiceType.EPICS) {
response.data.is_epic = true;

View File

@@ -15,7 +15,6 @@ import type { DistributionUpdates } from "@plane/utils";
import { orderCycles, shouldFilterCycle, getDate, updateDistribution } from "@plane/utils";
// helpers
// services
import { syncIssuesWithDeletedCycles } from "@/local-db/utils/load-workspace";
import { CycleService } from "@/services/cycle.service";
import { CycleArchiveService } from "@/services/cycle_archive.service";
import { IssueService } from "@/services/issue";
@@ -618,7 +617,6 @@ export class CycleStore implements ICycleStore {
delete this.cycleMap[cycleId];
delete this.activeCycleIdMap[cycleId];
if (this.rootStore.favorite.entityMap[cycleId]) this.rootStore.favorite.removeFavoriteFromStore(cycleId);
syncIssuesWithDeletedCycles([cycleId]);
});
});

View File

@@ -9,8 +9,6 @@ import type {
} from "@plane/types";
import { EInboxIssueStatus } from "@plane/types";
// helpers
// local db
import { addIssueToPersistanceLayer } from "@/local-db/utils/utils";
// services
import { InboxIssueService } from "@/services/inbox";
import { IssueService } from "@/services/issue";
@@ -109,7 +107,6 @@ export class InboxIssueStore implements IInboxIssueStore {
if (status === EInboxIssueStatus.ACCEPTED) {
const updatedIssue = { ...this.issue, ...inboxIssue.issue };
this.store.issue.issues.addIssue([updatedIssue]);
await addIssueToPersistanceLayer(updatedIssue);
}
} catch {
runInAction(() => set(this, "status", previousData.status));

View File

@@ -243,7 +243,7 @@ export class CycleIssuesFilter extends IssueFilterHelperStore implements ICycleI
});
if (this.getShouldClearIssues(updatedDisplayFilters)) {
this.rootIssueStore.cycleIssues.clear(true, true); // clear issues for local store when some filters like layout changes
this.rootIssueStore.cycleIssues.clear(true); // clear issues for local store when some filters like layout changes
}
if (this.getShouldReFetchIssues(updatedDisplayFilters)) {

View File

@@ -15,7 +15,6 @@ import type {
import { getDistributionPathsPostUpdate } from "@plane/utils";
//local
import { storage } from "@/lib/local-storage";
import { persistence } from "@/local-db/storage.sqlite";
import type { IBaseIssuesStore } from "../helpers/base-issues.store";
import { BaseIssuesStore } from "../helpers/base-issues.store";
//
@@ -190,8 +189,7 @@ export class CycleIssues extends BaseIssuesStore implements ICycleIssues {
// set loader and clear store
runInAction(() => {
this.setLoader(loadType);
this.clear(!isExistingPaginationOptions, false); // clear while fetching from server.
if (!this.groupBy) this.clear(!isExistingPaginationOptions, true); // clear while using local to have the no load effect.
this.clear(!isExistingPaginationOptions); // clear while fetching from server.
});
// get params from pagination options
@@ -315,7 +313,6 @@ export class CycleIssues extends BaseIssuesStore implements ICycleIssues {
);
// call fetch issues
if (this.paginationOptions) {
await persistence.syncIssues(projectId.toString());
await this.fetchIssues(workspaceSlug, projectId, "mutation", this.paginationOptions, cycleId);
}

View File

@@ -23,9 +23,7 @@ import type {
import { EIssueServiceType, EIssueLayoutTypes } from "@plane/types";
// helpers
import { convertToISODateString } from "@plane/utils";
// local-db
import { SPECIAL_ORDER_BY } from "@/local-db/utils/query-constructor";
import { updatePersistentLayer } from "@/local-db/utils/utils";
// plane web imports
import { workItemSortWithOrderByExtended } from "@/plane-web/store/issue/helpers/base-issue.store";
// services
import { CycleService } from "@/services/cycle.service";
@@ -60,7 +58,7 @@ export interface IBaseIssuesStore {
//actions
removeIssue: (workspaceSlug: string, projectId: string, issueId: string) => Promise<void>;
clear(shouldClearPaginationOptions?: boolean, clearForLocal?: boolean): void;
clear(shouldClearPaginationOptions?: boolean): void;
// helper methods
getIssueIds: (groupId?: string, subGroupId?: string) => string[] | undefined;
issuesSortWithOrderBy(issueIds: string[], key: Partial<TIssueOrderByOptions>): string[];
@@ -281,13 +279,7 @@ export abstract class BaseIssuesStore implements IBaseIssuesStore {
const orderBy = displayFilters.order_by;
// Temporary code to fix no load order by
if (
this.rootIssueStore.rootStore.user.localDBEnabled &&
this.rootIssueStore.rootStore.router.projectId &&
layout !== EIssueLayoutTypes.SPREADSHEET &&
orderBy &&
Object.keys(SPECIAL_ORDER_BY).includes(orderBy)
) {
if (this.rootIssueStore.rootStore.router.projectId && layout !== EIssueLayoutTypes.SPREADSHEET && orderBy) {
return "sort_order";
}
@@ -483,7 +475,7 @@ export abstract class BaseIssuesStore implements IBaseIssuesStore {
// Update all the GroupIds to this Store's groupedIssueIds and update Individual group issue counts
runInAction(() => {
this.clear(shouldClearPaginationOptions, true);
this.clear(shouldClearPaginationOptions);
this.updateGroupedIssueIds(groupedIssues, groupedIssueCount);
this.loader[getGroupKey()] = undefined;
});
@@ -549,8 +541,6 @@ export abstract class BaseIssuesStore implements IBaseIssuesStore {
// If shouldUpdateList is true, call fetchParentStats
shouldUpdateList && (await this.fetchParentStats(workspaceSlug, projectId));
updatePersistentLayer(response.id);
return response;
}
@@ -1162,11 +1152,7 @@ export abstract class BaseIssuesStore implements IBaseIssuesStore {
/**
* Method called to clear out the current store
*/
clear(shouldClearPaginationOptions = true, clearForLocal = false) {
if (
(this.rootIssueStore.rootStore.user?.localDBEnabled && clearForLocal) ||
(!this.rootIssueStore.rootStore.user?.localDBEnabled && !clearForLocal)
) {
clear(shouldClearPaginationOptions = true) {
runInAction(() => {
this.groupedIssueIds = undefined;
this.issuePaginationData = {};
@@ -1178,7 +1164,6 @@ export abstract class BaseIssuesStore implements IBaseIssuesStore {
this.controller.abort();
this.controller = new AbortController();
}
}
/**
* Method called to add issue id to list.

View File

@@ -3,8 +3,6 @@ import { computedFn } from "mobx-utils";
// types
import type { TIssue, TIssueServiceType } from "@plane/types";
import { EIssueServiceType } from "@plane/types";
// local
import { persistence } from "@/local-db/storage.sqlite";
// services
import { IssueArchiveService, WorkspaceDraftService, IssueService } from "@/services/issue";
// types
@@ -32,7 +30,6 @@ export interface IIssueStoreActions {
export interface IIssueStore extends IIssueStoreActions {
getIsFetchingIssueDetails: (issueId: string | undefined) => boolean;
getIsLocalDBIssueDescription: (issueId: string | undefined) => boolean;
// helper methods
getIssueById: (issueId: string) => TIssue | undefined;
getIssueIdByIdentifier: (issueIdentifier: string) => string | undefined;
@@ -40,7 +37,6 @@ export interface IIssueStore extends IIssueStoreActions {
export class IssueStore implements IIssueStore {
fetchingIssueDetails: string | undefined = undefined;
localDBIssueDescription: string | undefined = undefined;
// root store
rootIssueDetailStore: IIssueDetail;
// services
@@ -53,7 +49,6 @@ export class IssueStore implements IIssueStore {
constructor(rootStore: IIssueDetail, serviceType: TIssueServiceType) {
makeObservable(this, {
fetchingIssueDetails: observable.ref,
localDBIssueDescription: observable.ref,
});
// root store
this.rootIssueDetailStore = rootStore;
@@ -71,12 +66,6 @@ export class IssueStore implements IIssueStore {
return this.fetchingIssueDetails === issueId;
});
getIsLocalDBIssueDescription = computedFn((issueId: string | undefined) => {
if (!issueId) return false;
return this.localDBIssueDescription === issueId;
});
// helper methods
getIssueById = computedFn((issueId: string) => {
if (!issueId) return undefined;
@@ -94,26 +83,12 @@ export class IssueStore implements IIssueStore {
expand: "issue_reactions,issue_attachments,issue_link,parent",
};
let issue: TIssue | undefined;
// fetch issue from local db
if (this.serviceType === EIssueServiceType.ISSUES) {
issue = await persistence.getIssue(issueId);
}
this.fetchingIssueDetails = issueId;
if (issue) {
this.addIssueToStore(issue);
this.localDBIssueDescription = issueId;
}
issue = await this.issueService.retrieve(workspaceSlug, projectId, issueId, query);
const issue = await this.issueService.retrieve(workspaceSlug, projectId, issueId, query);
if (!issue) throw new Error("Work item not found");
const issuePayload = this.addIssueToStore(issue);
this.localDBIssueDescription = undefined;
this.rootIssueDetailStore.rootIssueStore.issues.addIssue([issuePayload]);

View File

@@ -11,9 +11,7 @@ import type {
TIssueServiceType,
TLoader,
} from "@plane/types";
import { EIssueServiceType } from "@plane/types";
// services
import { updatePersistentLayer } from "@/local-db/utils/utils";
import { IssueService } from "@/services/issue";
// store
import type { IIssueDetail } from "./root.store";
@@ -198,10 +196,6 @@ export class IssueSubIssuesStore implements IIssueSubIssuesStore {
this.subIssues[parentIssueId].length
);
if (this.serviceType === EIssueServiceType.ISSUES) {
updatePersistentLayer([parentIssueId, ...issueIds]);
}
return;
};
@@ -298,10 +292,6 @@ export class IssueSubIssuesStore implements IIssueSubIssuesStore {
);
});
if (this.serviceType === EIssueServiceType.ISSUES) {
updatePersistentLayer([parentIssueId]);
}
return;
};
@@ -335,10 +325,6 @@ export class IssueSubIssuesStore implements IIssueSubIssuesStore {
);
});
if (this.serviceType === EIssueServiceType.ISSUES) {
updatePersistentLayer([parentIssueId]);
}
return;
};

View File

@@ -1,4 +1,4 @@
import { clone, set, update } from "lodash-es";
import { set, update } from "lodash-es";
import { action, makeObservable, observable, runInAction } from "mobx";
import { computedFn } from "mobx-utils";
// types
@@ -7,8 +7,6 @@ import type { TIssue } from "@plane/types";
import { getCurrentDateTimeInISO } from "@plane/utils";
import { rootStore } from "@/lib/store-context";
// services
import { deleteIssueFromLocal } from "@/local-db/utils/load-issues";
import { updatePersistentLayer } from "@/local-db/utils/utils";
import { IssueService } from "@/services/issue";
export type IIssueStore = {
@@ -103,17 +101,12 @@ export class IssueStore implements IIssueStore {
*/
updateIssue = (issueId: string, issue: Partial<TIssue>) => {
if (!issue || !issueId || !this.issuesMap[issueId]) return;
const issueBeforeUpdate = clone(this.issuesMap[issueId]);
runInAction(() => {
set(this.issuesMap, [issueId, "updated_at"], getCurrentDateTimeInISO());
Object.keys(issue).forEach((key) => {
set(this.issuesMap, [issueId, key], issue[key as keyof TIssue]);
});
});
if (!issueBeforeUpdate.is_epic) {
updatePersistentLayer(issueId);
}
};
/**
@@ -126,7 +119,6 @@ export class IssueStore implements IIssueStore {
runInAction(() => {
delete this.issuesMap[issueId];
});
deleteIssueFromLocal(issueId);
};
// helper methods

View File

@@ -248,7 +248,7 @@ export class ModuleIssuesFilter extends IssueFilterHelperStore implements IModul
});
if (this.getShouldClearIssues(updatedDisplayFilters)) {
this.rootIssueStore.moduleIssues.clear(true, true); // clear issues for local store when some filters like layout changes
this.rootIssueStore.moduleIssues.clear(true); // clear issues for local store when some filters like layout changes
}
if (this.getShouldReFetchIssues(updatedDisplayFilters)) {

View File

@@ -137,8 +137,7 @@ export class ModuleIssues extends BaseIssuesStore implements IModuleIssues {
// set loader and clear store
runInAction(() => {
this.setLoader(loadType);
this.clear(!isExistingPaginationOptions, false); // clear while fetching from server.
if (!this.groupBy) this.clear(!isExistingPaginationOptions, true); // clear while using local to have the no load effect.
this.clear(!isExistingPaginationOptions); // clear while fetching from server.
});
// get params from pagination options

View File

@@ -257,7 +257,7 @@ export class ProjectViewIssuesFilter extends IssueFilterHelperStore implements I
});
if (this.getShouldClearIssues(updatedDisplayFilters)) {
this.rootIssueStore.projectIssues.clear(true, true); // clear issues for local store when some filters like layout changes
this.rootIssueStore.projectIssues.clear(true); // clear issues for local store when some filters like layout changes
}
if (this.getShouldReFetchIssues(updatedDisplayFilters)) {

View File

@@ -94,8 +94,7 @@ export class ProjectViewIssues extends BaseIssuesStore implements IProjectViewIs
// set loader and clear store
runInAction(() => {
this.setLoader(loadType);
this.clear(!isExistingPaginationOptions, false); // clear while fetching from server.
if (!this.groupBy) this.clear(!isExistingPaginationOptions, true); // clear while using local to have the no load effect.
this.clear(!isExistingPaginationOptions); // clear while fetching from server.
});
// get params from pagination options

View File

@@ -231,7 +231,7 @@ export class ProjectIssuesFilter extends IssueFilterHelperStore implements IProj
});
if (this.getShouldClearIssues(updatedDisplayFilters)) {
this.rootIssueStore.projectIssues.clear(true, true); // clear issues for local store when some filters like layout changes
this.rootIssueStore.projectIssues.clear(true); // clear issues for local store when some filters like layout changes
}
if (this.getShouldReFetchIssues(updatedDisplayFilters)) {

View File

@@ -102,8 +102,7 @@ export class ProjectIssues extends BaseIssuesStore implements IProjectIssues {
// set loader and clear store
runInAction(() => {
this.setLoader(loadType);
this.clear(!isExistingPaginationOptions, false); // clear while fetching from server.
if (!this.groupBy) this.clear(!isExistingPaginationOptions, true); // clear while using local to have the no load effect.
this.clear(!isExistingPaginationOptions); // clear while fetching from server.
});
// get params from pagination options

View File

@@ -17,8 +17,6 @@ import type {
TBulkOperationsPayload,
} from "@plane/types";
import { getCurrentDateTimeInISO, convertToISODateString } from "@plane/utils";
// local-db
import { addIssueToPersistanceLayer } from "@/local-db/utils/utils";
// services
import workspaceDraftService from "@/services/issue/workspace_draft.service";
// types
@@ -350,9 +348,6 @@ export class WorkspaceDraftIssues implements IWorkspaceDraftIssues {
});
}
// sync issue to local db
addIssueToPersistanceLayer({ ...payload, ...response });
// Update draft issue count in workspaceUserInfo
this.updateWorkspaceUserDraftIssueCount(workspaceSlug, -1);
});

View File

@@ -6,7 +6,6 @@ import type { IIssueLabel, IIssueLabelTree } from "@plane/types";
// helpers
import { buildTree } from "@plane/utils";
// services
import { syncIssuesWithDeletedLabels } from "@/local-db/utils/load-workspace";
import { IssueLabelService } from "@/services/issue";
// store
import type { CoreRootStore } from "./root.store";
@@ -299,7 +298,6 @@ export class LabelStore implements ILabelStore {
runInAction(() => {
delete this.labelMap[labelId];
});
syncIssuesWithDeletedLabels([labelId]);
});
};
}

View File

@@ -7,7 +7,6 @@ import type { DistributionUpdates } from "@plane/utils";
import { updateDistribution, orderModules, shouldFilterModule } from "@plane/utils";
// helpers
// services
import { syncIssuesWithDeletedModules } from "@/local-db/utils/load-workspace";
import { ModuleService } from "@/services/module.service";
import { ModuleArchiveService } from "@/services/module_archive.service";
import { ProjectService } from "@/services/project";
@@ -453,7 +452,6 @@ export class ModulesStore implements IModuleStore {
runInAction(() => {
delete this.moduleMap[moduleId];
if (this.rootStore.favorite.entityMap[moduleId]) this.rootStore.favorite.removeFavoriteFromStore(moduleId);
syncIssuesWithDeletedModules([moduleId]);
});
});
};

View File

@@ -7,7 +7,6 @@ import type { IState } from "@plane/types";
// helpers
import { sortStates } from "@plane/utils";
// plane web
import { syncIssuesWithDeletedStates } from "@/local-db/utils/load-workspace";
import { ProjectStateService } from "@/plane-web/services/project/project-state.service";
import type { RootStore } from "@/plane-web/store/root.store";
@@ -248,7 +247,6 @@ export class StateStore implements IStateStore {
await this.stateService.deleteState(workspaceSlug, projectId, stateId).then(() => {
runInAction(() => {
delete this.stateMap[stateId];
syncIssuesWithDeletedStates([stateId]);
});
});
};

View File

@@ -3,8 +3,6 @@ import { action, makeObservable, observable, runInAction, computed } from "mobx"
// plane imports
import { EUserPermissions, API_BASE_URL } from "@plane/constants";
import type { IUser, TUserPermissions } from "@plane/types";
// local
import { persistence } from "@/local-db/storage.sqlite";
// plane web imports
import type { RootStore } from "@/plane-web/store/root.store";
import type { IUserPermissionStore } from "@/plane-web/store/user/permission.store";
@@ -48,7 +46,6 @@ export interface IUserStore {
reset: () => void;
signOut: () => Promise<void>;
// computed
localDBEnabled: boolean;
canPerformAnyCreateAction: boolean;
projectsWithCreatePermissions: { [projectId: string]: number } | null;
}
@@ -99,8 +96,6 @@ export class UserStore implements IUserStore {
// computed
canPerformAnyCreateAction: computed,
projectsWithCreatePermissions: computed,
localDBEnabled: computed,
});
}
@@ -253,7 +248,6 @@ export class UserStore implements IUserStore {
*/
signOut = async (): Promise<void> => {
await this.authService.signOut(API_BASE_URL);
await persistence.clearStorage(true);
this.store.resetOnSignOut();
};
@@ -296,8 +290,4 @@ export class UserStore implements IUserStore {
const filteredProjects = this.fetchProjectsWithCreatePermissions();
return filteredProjects ? Object.keys(filteredProjects).length > 0 : false;
}
get localDBEnabled() {
return this.userSettings.canUseLocalDB;
}
}

View File

@@ -14,7 +14,6 @@ export interface IUserSettingsStore {
isLoading: boolean;
error: TError | undefined;
data: IUserSettings;
canUseLocalDB: boolean;
sidebarCollapsed: boolean;
isScrolled: boolean;
// actions
@@ -42,7 +41,6 @@ export class UserSettingsStore implements IUserSettingsStore {
invites: undefined,
},
};
canUseLocalDB: boolean = false;
// services
userService: UserService;
@@ -52,7 +50,6 @@ export class UserSettingsStore implements IUserSettingsStore {
isLoading: observable.ref,
error: observable,
data: observable,
canUseLocalDB: observable.ref,
sidebarCollapsed: observable.ref,
isScrolled: observable.ref,
// actions

View File

@@ -266,7 +266,6 @@ export const getComputedDisplayFilters = (
defaultValues?: IIssueDisplayFilterOptions
): IIssueDisplayFilterOptions => {
const filters = !isEmpty(displayFilters) ? displayFilters : defaultValues;
return {
calendar: {
show_weekends: filters?.calendar?.show_weekends || false,
@@ -307,20 +306,6 @@ export const getComputedDisplayProperties = (
issue_type: displayProperties?.issue_type ?? true,
});
/**
* This is to check if the issues list api should fall back to server or use local db
* @param queries
* @returns
*/
export const getIssuesShouldFallbackToServer = (queries: any) => {
// If there is expand query and is not grouped then fallback to server
if (!isEmpty(queries.expand as string) && !queries.group_by) return true;
// If query has mentions then fallback to server
if (!isEmpty(queries.mentions)) return true;
return false;
};
export const generateWorkItemLink = ({
workspaceSlug,
projectId,
@@ -339,8 +324,8 @@ export const generateWorkItemLink = ({
isEpic?: boolean;
}): string => {
const archiveIssueLink = `/${workspaceSlug}/projects/${projectId}/archives/issues/${issueId}`;
const epicLink = `/${workspaceSlug}/projects/${projectId}/epics/${issueId}`;
const workItemLink = `/${workspaceSlug}/browse/${projectIdentifier}-${sequenceId}/`;
const epicLink = workItemLink;
return isArchived ? archiveIssueLink : isEpic ? epicLink : workItemLink;
};