import csv: add batch snackbar

This commit is contained in:
Sidney Alcantara
2022-06-08 11:43:08 +10:00
parent fdb839c3b0
commit 6886893233
5 changed files with 89 additions and 55 deletions

View File

@@ -21,7 +21,7 @@ import {
_bulkWriteDbAtom,
} from "./table";
import { TableRow, TableRowRef } from "@src/types/table";
import { TableRow, BulkWriteFunction } from "@src/types/table";
import {
rowyUser,
generateId,
@@ -213,10 +213,11 @@ export const deleteRowAtom = atom(
export interface IBulkAddRowsOptions {
rows: Partial<TableRow[]>;
collection: string;
onBatchCommit?: Parameters<BulkWriteFunction>[1];
}
export const bulkAddRowsAtom = atom(
null,
async (get, _, { rows, collection }: IBulkAddRowsOptions) => {
async (get, _, { rows, collection, onBatchCommit }: IBulkAddRowsOptions) => {
const bulkWriteDb = get(_bulkWriteDbAtom);
if (!bulkWriteDb) throw new Error("Cannot write to database");
const tableSettings = get(tableSettingsAtom);
@@ -254,9 +255,9 @@ export const bulkAddRowsAtom = atom(
}));
// Write to db
await bulkWriteDb(operations);
await bulkWriteDb(operations, onBatchCommit);
// Write an audit entry for each row
// TODO: Write an audit entry for each row
// if (auditChange) {
// const auditChangePromises: Promise<void>[] = [];
// for (const operation of operations) {

View File

@@ -11,11 +11,13 @@ export interface ISnackbarProgressRef {
export interface ISnackbarProgressProps {
target?: number;
stateRef: MutableRefObject<ISnackbarProgressRef | undefined>;
label?: React.ReactNode;
}
export default function SnackbarProgress({
target: targetProp = 100,
stateRef,
label,
}: ISnackbarProgressProps) {
const [progress, setProgress] = useState(0);
const [target, setTarget] = useState(targetProp);
@@ -31,14 +33,25 @@ export default function SnackbarProgress({
>
<span>
{progress}/{target}
{label}
</span>
<CircularProgressOptical
value={(progress / target) * 100}
variant="determinate"
size={24}
color="inherit"
/>
<div style={{ position: "relative", width: 24, height: 24 }}>
<CircularProgressOptical
value={(progress / target) * 100}
variant="determinate"
size={24}
color="inherit"
style={{ position: "absolute", top: 0, left: 0 }}
/>
<CircularProgressOptical
disableShrink
size={24}
color="inherit"
style={{ position: "absolute", top: 0, left: 0 }}
sx={{ color: "primary.contrastText", opacity: 0.33 }}
/>
</div>
</Stack>
);
}

View File

@@ -1,4 +1,4 @@
import { useState, useMemo, useCallback } from "react";
import { useState, useMemo, useCallback, useRef } from "react";
import useMemoValue from "use-memo-value";
import { useAtom, useSetAtom } from "jotai";
import { useSnackbar } from "notistack";
@@ -19,6 +19,9 @@ import Step1Columns from "./Step1Columns";
import Step2NewColumns from "./Step2NewColumns";
import Step3Preview from "./Step3Preview";
import CircularProgressOptical from "@src/components/CircularProgressOptical";
import SnackbarProgress, {
ISnackbarProgressRef,
} from "@src/components/SnackbarProgress";
import {
tableScope,
@@ -55,6 +58,7 @@ export default function ImportCsvWizard({ onClose }: ITableModalProps) {
const { enqueueSnackbar, closeSnackbar } = useSnackbar();
const theme = useTheme();
const isXs = useMediaQuery(theme.breakpoints.down("sm"));
const snackbarProgressRef = useRef<ISnackbarProgressRef>();
const columns = useMemoValue(tableSchema.columns ?? {}, isEqual);
@@ -96,13 +100,23 @@ export default function ImportCsvWizard({ onClose }: ITableModalProps) {
const handleFinish = async () => {
if (!parsedRows) return;
console.time("importCsv");
snackbarProgressRef.current?.setProgress(0);
const loadingSnackbar = enqueueSnackbar(
`Importing ${parsedRows.length} rows. This may take a while…`,
`Importing ${Number(
parsedRows.length
).toLocaleString()} rows. This might take a while.`,
{
persist: true,
action: <CircularProgressOptical color="inherit" size={24} />,
action: (
<SnackbarProgress
stateRef={snackbarProgressRef}
target={Math.ceil(parsedRows.length / 500)}
label=" batches"
/>
),
}
);
// Run add column & batch write at the same time
const promises: Promise<void>[] = [];
try {
@@ -111,9 +125,15 @@ export default function ImportCsvWizard({ onClose }: ITableModalProps) {
promises.push(addColumn({ config: col }));
promises.push(
bulkAddRows({ rows: parsedRows, collection: tableSettings.collection })
bulkAddRows({
rows: parsedRows,
collection: tableSettings.collection,
onBatchCommit: (batchNumber: number) =>
snackbarProgressRef.current?.setProgress(batchNumber),
})
);
onClose();
await Promise.all(promises);
logEvent(analytics, "import_success", { type: importType });
closeSnackbar(loadingSnackbar);
@@ -121,8 +141,6 @@ export default function ImportCsvWizard({ onClose }: ITableModalProps) {
enqueueSnackbar((e as Error).message, { variant: "error" });
} finally {
closeSnackbar(loadingSnackbar);
// Close wizard
onClose();
}
console.timeEnd("importCsv");
};

View File

@@ -25,7 +25,7 @@ import {
tableNextPageAtom,
auditChangeAtom,
} from "@src/atoms/tableScope";
import { BulkWriteOperation, TableRow } from "@src/types/table";
import { BulkWriteFunction } from "@src/types/table";
import { firebaseDbAtom } from "./ProjectSourceFirebase";
import useFirestoreDocWithAtom from "@src/hooks/useFirestoreDocWithAtom";
import useFirestoreCollectionWithAtom from "@src/hooks/useFirestoreCollectionWithAtom";
@@ -148,46 +148,47 @@ const TableSourceFirestore = memo(function TableSourceFirestore() {
const setBulkWriteDb = useSetAtom(_bulkWriteDbAtom, tableScope);
useEffect(() => {
setBulkWriteDb(
() => async (operations: BulkWriteOperation<Partial<TableRow>>[]) => {
// Chunk operations into batches of 500 (Firestore limit is 500)
const operationsChunked = chunk(operations, 500);
// Store array of promises so we can run them all at once
const promises: Promise<void>[] = [];
// Loop through chunks of 500
for (const operationsChunk of operationsChunked) {
// Create Firestore batch transaction
const batch = writeBatch(firebaseDb);
// Loop through operations and write to batch
for (const operation of operationsChunk) {
// New document
if (operation.type === "add") {
batch.set(doc(firebaseDb, operation.path), operation.data);
}
// Update existing document and merge values and delete fields
else if (operation.type === "update") {
const updateToDb = { ...operation.data };
if (Array.isArray(operation.deleteFields)) {
for (const field of operation.deleteFields) {
set(updateToDb as any, field, deleteField());
}
() =>
async (
operations: Parameters<BulkWriteFunction>[0],
onBatchCommit: Parameters<BulkWriteFunction>[1]
) => {
// Chunk operations into batches of 500 (Firestore limit is 500)
const operationsChunked = chunk(operations, 500);
// Loop through chunks of 500, then commit the batch sequentially
for (const [index, operationsChunk] of operationsChunked.entries()) {
// Create Firestore batch transaction
const batch = writeBatch(firebaseDb);
// Loop through operations and write to batch
for (const operation of operationsChunk) {
// New document
if (operation.type === "add") {
batch.set(doc(firebaseDb, operation.path), operation.data);
}
// Update existing document and merge values and delete fields
else if (operation.type === "update") {
const updateToDb = { ...operation.data };
if (Array.isArray(operation.deleteFields)) {
for (const field of operation.deleteFields) {
set(updateToDb as any, field, deleteField());
}
}
batch.set(doc(firebaseDb, operation.path), operation.data, {
merge: true,
});
}
// Delete existing documents
else if (operation.type === "delete") {
batch.delete(doc(firebaseDb, operation.path));
}
batch.set(doc(firebaseDb, operation.path), operation.data, {
merge: true,
});
}
// Delete existing documents
else if (operation.type === "delete") {
batch.delete(doc(firebaseDb, operation.path));
}
// Commit batch and wait for it to finish before continuing
// to prevent Firestore rate limits
await batch.commit().then(() => console.log("Batch committed"));
if (onBatchCommit) onBatchCommit(index + 1);
}
// Add to promises array
// promises.push(
await batch.commit().then(() => console.log("Batch committed"));
// );
}
// Return promise that waits for all promises to resolve
return Promise.all(promises);
}
);
return () => setBulkWriteDb(undefined);

View File

@@ -48,8 +48,9 @@ export type BulkWriteOperation<T> =
* @returns Promise
*/
export type BulkWriteFunction<T = Partial<TableRow>> = (
operations: BulkWriteOperation<T>[]
) => Promise<void[]>;
operations: BulkWriteOperation<T>[],
onBatchCommit?: (batchNumber: number) => void
) => Promise<void>;
/**
* Store the next page state to know if its loading and if its available