mirror of
https://github.com/rowyio/rowy.git
synced 2025-12-29 00:16:39 +01:00
import files from URL
This commit is contained in:
@@ -242,10 +242,15 @@ export interface IBulkAddRowsOptions {
|
||||
rows: Partial<TableRow[]>;
|
||||
collection: string;
|
||||
onBatchCommit?: Parameters<BulkWriteFunction>[1];
|
||||
type?: "add";
|
||||
}
|
||||
export const bulkAddRowsAtom = atom(
|
||||
null,
|
||||
async (get, _, { rows, collection, onBatchCommit }: IBulkAddRowsOptions) => {
|
||||
async (
|
||||
get,
|
||||
_,
|
||||
{ rows, collection, onBatchCommit, type }: IBulkAddRowsOptions
|
||||
) => {
|
||||
const bulkWriteDb = get(_bulkWriteDbAtom);
|
||||
if (!bulkWriteDb) throw new Error("Cannot write to database");
|
||||
const tableSettings = get(tableSettingsAtom);
|
||||
@@ -277,7 +282,11 @@ export const bulkAddRowsAtom = atom(
|
||||
|
||||
// Assign a random ID to each row
|
||||
const operations = rows.map((row) => ({
|
||||
type: row?._rowy_ref?.id ? ("update" as "update") : ("add" as "add"),
|
||||
type: type
|
||||
? type
|
||||
: row?._rowy_ref?.id
|
||||
? ("update" as "update")
|
||||
: ("add" as "add"),
|
||||
path: `${collection}/${row?._rowy_ref?.id ?? generateId()}`,
|
||||
data: { ...initialValues, ...omitRowyFields(row) },
|
||||
}));
|
||||
|
||||
@@ -7,6 +7,7 @@ import EmptyState from "@src/components/EmptyState";
|
||||
import { FieldType } from "@src/constants/fields";
|
||||
import { getFieldProp } from "@src/components/fields";
|
||||
import { DEFAULT_ROW_HEIGHT } from "@src/components/Table";
|
||||
import mockValue from "./mockValue";
|
||||
|
||||
export interface ICellProps
|
||||
extends Partial<
|
||||
@@ -31,6 +32,7 @@ export default function Cell({
|
||||
...props
|
||||
}: ICellProps) {
|
||||
const tableCell = type ? getFieldProp("TableCell", type) : null;
|
||||
value = mockValue(value, type);
|
||||
|
||||
return (
|
||||
<StyledTable>
|
||||
|
||||
11
src/components/Table/Mock/mockValue/file.ts
Normal file
11
src/components/Table/Mock/mockValue/file.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
export const fileValueConverter = (value: any) => {
|
||||
if (!value) return [];
|
||||
if (Array.isArray(value)) return value;
|
||||
if (typeof value === "string") {
|
||||
return value.split(",").map((url) => ({
|
||||
downloadURL: url.trim(),
|
||||
name: +new Date() + "-" + Math.round(Math.random() * 1000),
|
||||
}));
|
||||
}
|
||||
return [];
|
||||
};
|
||||
20
src/components/Table/Mock/mockValue/index.ts
Normal file
20
src/components/Table/Mock/mockValue/index.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { FieldType } from "@src/constants/fields";
|
||||
import { fileValueConverter } from "./file";
|
||||
import { referenceValueConverter } from "./reference";
|
||||
|
||||
export const VALUE_CONVERTERS: Partial<{
|
||||
[key in FieldType]: (value: any) => any;
|
||||
}> = {
|
||||
[FieldType.image]: fileValueConverter,
|
||||
[FieldType.reference]: referenceValueConverter,
|
||||
[FieldType.file]: fileValueConverter,
|
||||
};
|
||||
|
||||
export default function convert(value: any, type: FieldType) {
|
||||
const converter = VALUE_CONVERTERS[type];
|
||||
if (converter) {
|
||||
return converter(value);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
12
src/components/Table/Mock/mockValue/reference.ts
Normal file
12
src/components/Table/Mock/mockValue/reference.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
export const referenceValueConverter = (value: any) => {
|
||||
if (typeof value === "string") {
|
||||
if (
|
||||
value !== "" &&
|
||||
value.split("/").length > 0 &&
|
||||
value.split("/").length % 2 === 0
|
||||
) {
|
||||
return { path: value };
|
||||
}
|
||||
}
|
||||
return value;
|
||||
};
|
||||
@@ -37,7 +37,11 @@ import {
|
||||
import { ColumnConfig } from "@src/types/table";
|
||||
import { getFieldProp } from "@src/components/fields";
|
||||
import { analytics, logEvent } from "@src/analytics";
|
||||
import { FieldType } from "@src/constants/fields";
|
||||
import { generateId } from "@src/utils/table";
|
||||
import { isValidDocId } from "./utils";
|
||||
import useUploadFileFromURL from "./useUploadFileFromURL";
|
||||
import useConverter from "./useConverter";
|
||||
|
||||
export type CsvConfig = {
|
||||
pairs: { csvKey: string; columnKey: string }[];
|
||||
@@ -46,6 +50,8 @@ export type CsvConfig = {
|
||||
documentIdCsvKey: string | null;
|
||||
};
|
||||
|
||||
const needsUploadTypes = [FieldType.image, FieldType.file];
|
||||
|
||||
export interface IStepProps {
|
||||
csvData: NonNullable<ImportCsvData>;
|
||||
config: CsvConfig;
|
||||
@@ -66,6 +72,10 @@ export default function ImportCsvWizard({ onClose }: ITableModalProps) {
|
||||
const isXs = useMediaQuery(theme.breakpoints.down("sm"));
|
||||
const snackbarProgressRef = useRef<ISnackbarProgressRef>();
|
||||
|
||||
const snackbarUploadProgressRef = useRef<ISnackbarProgressRef>();
|
||||
const { addTask, runBatchUpload, askPermission } = useUploadFileFromURL();
|
||||
const { needsConverter, getConverter } = useConverter();
|
||||
|
||||
const columns = useMemoValue(tableSchema.columns ?? {}, isEqual);
|
||||
|
||||
const [config, setConfig] = useState<CsvConfig>({
|
||||
@@ -74,6 +84,7 @@ export default function ImportCsvWizard({ onClose }: ITableModalProps) {
|
||||
documentId: "auto",
|
||||
documentIdCsvKey: null,
|
||||
});
|
||||
|
||||
const updateConfig: IStepProps["updateConfig"] = useCallback((value) => {
|
||||
setConfig((prev) => {
|
||||
const pairs = uniqBy([...prev.pairs, ...(value.pairs ?? [])], "csvKey");
|
||||
@@ -123,6 +134,35 @@ export default function ImportCsvWizard({ onClose }: ITableModalProps) {
|
||||
)
|
||||
: { validRows: parsedRows, invalidRows: [] };
|
||||
|
||||
const { requiredConverts, requiredUploads } = useMemo(() => {
|
||||
const columns = config.pairs.map(({ csvKey, columnKey }) => ({
|
||||
csvKey,
|
||||
columnKey,
|
||||
...(tableSchema.columns?.[columnKey] ??
|
||||
find(config.newColumns, { key: columnKey }) ??
|
||||
{}),
|
||||
}));
|
||||
|
||||
let requiredConverts: any = {};
|
||||
let requiredUploads: any = {};
|
||||
columns.forEach((column, index) => {
|
||||
if (needsConverter(column.type)) {
|
||||
requiredConverts[index] = getConverter(column.type);
|
||||
console.log({ needsUploadTypes }, column.type);
|
||||
if (needsUploadTypes.includes(column.type)) {
|
||||
requiredUploads[column.fieldName + ""] = true;
|
||||
}
|
||||
}
|
||||
});
|
||||
return { requiredConverts, requiredUploads };
|
||||
}, [
|
||||
config.newColumns,
|
||||
config.pairs,
|
||||
getConverter,
|
||||
needsConverter,
|
||||
tableSchema.columns,
|
||||
]);
|
||||
|
||||
const handleFinish = async () => {
|
||||
if (!parsedRows) return;
|
||||
console.time("importCsv");
|
||||
@@ -176,12 +216,48 @@ export default function ImportCsvWizard({ onClose }: ITableModalProps) {
|
||||
{ variant: "warning" }
|
||||
);
|
||||
}
|
||||
const newValidRows = validRows.map((row) => {
|
||||
// Convert required values
|
||||
Object.keys(row).forEach((key, i) => {
|
||||
if (requiredConverts[i]) {
|
||||
row[key] = requiredConverts[i](row[key]);
|
||||
}
|
||||
});
|
||||
|
||||
const id = generateId();
|
||||
const newRow = {
|
||||
_rowy_ref: {
|
||||
path: `${tableSettings.collection}/${row?._rowy_ref?.id ?? id}`,
|
||||
id,
|
||||
},
|
||||
...row,
|
||||
};
|
||||
return newRow;
|
||||
});
|
||||
|
||||
promises.push(
|
||||
bulkAddRows({
|
||||
rows: validRows,
|
||||
type: "add",
|
||||
rows: newValidRows,
|
||||
collection: tableSettings.collection,
|
||||
onBatchCommit: (batchNumber: number) =>
|
||||
snackbarProgressRef.current?.setProgress(batchNumber),
|
||||
onBatchCommit: async (batchNumber: number) => {
|
||||
if (Object.keys(requiredUploads).length > 0) {
|
||||
newValidRows
|
||||
.slice((batchNumber - 1) * 500, batchNumber * 500 - 1)
|
||||
.forEach((row) => {
|
||||
Object.keys(requiredUploads).forEach((key) => {
|
||||
if (requiredUploads[key]) {
|
||||
addTask({
|
||||
docRef: row._rowy_ref,
|
||||
fieldName: key,
|
||||
files: row[key],
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
snackbarProgressRef.current?.setProgress(batchNumber);
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
@@ -192,6 +268,25 @@ export default function ImportCsvWizard({ onClose }: ITableModalProps) {
|
||||
`Imported ${Number(validRows.length).toLocaleString()} rows`,
|
||||
{ variant: "success" }
|
||||
);
|
||||
if (await askPermission()) {
|
||||
const uploadingSnackbar = enqueueSnackbar(
|
||||
`Importing ${Number(
|
||||
validRows.length
|
||||
).toLocaleString()} rows. This might take a while.`,
|
||||
{
|
||||
persist: true,
|
||||
action: (
|
||||
<SnackbarProgress
|
||||
stateRef={snackbarUploadProgressRef}
|
||||
target={Math.ceil(validRows.length / 500)}
|
||||
label=" batches"
|
||||
/>
|
||||
),
|
||||
}
|
||||
);
|
||||
await runBatchUpload(snackbarUploadProgressRef.current?.setProgress);
|
||||
closeSnackbar(uploadingSnackbar);
|
||||
}
|
||||
} catch (e) {
|
||||
enqueueSnackbar((e as Error).message, { variant: "error" });
|
||||
} finally {
|
||||
|
||||
49
src/components/TableModals/ImportCsvWizard/useConverter.ts
Normal file
49
src/components/TableModals/ImportCsvWizard/useConverter.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import { projectScope } from "@src/atoms/projectScope";
|
||||
import { FieldType } from "@src/constants/fields";
|
||||
import { firebaseDbAtom } from "@src/sources/ProjectSourceFirebase";
|
||||
import { doc, DocumentReference as Reference } from "firebase/firestore";
|
||||
import { useAtom } from "jotai";
|
||||
|
||||
const needsConverter = (type: FieldType) =>
|
||||
[FieldType.image, FieldType.reference, FieldType.file].includes(type);
|
||||
|
||||
export default function useConverter() {
|
||||
const [firebaseDb] = useAtom(firebaseDbAtom, projectScope);
|
||||
|
||||
const referenceConverter = (value: string): Reference | null => {
|
||||
if (!value) return null;
|
||||
if (value.split("/").length % 2 !== 0) return null;
|
||||
return doc(firebaseDb, value);
|
||||
};
|
||||
|
||||
const imageOrFileConverter = (urls: string): RowyFile[] => {
|
||||
return urls.split(",").map((url) => {
|
||||
url = url.trim();
|
||||
return {
|
||||
downloadURL: url,
|
||||
name: url.split("/").pop() || "",
|
||||
lastModifiedTS: +new Date(),
|
||||
type: "",
|
||||
};
|
||||
});
|
||||
};
|
||||
|
||||
const getConverter = (type: FieldType) => {
|
||||
switch (type) {
|
||||
case FieldType.image:
|
||||
case FieldType.file:
|
||||
return imageOrFileConverter;
|
||||
case FieldType.reference:
|
||||
return referenceConverter;
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
needsConverter,
|
||||
referenceConverter,
|
||||
imageOrFileConverter,
|
||||
getConverter,
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,141 @@
|
||||
import { useCallback, useRef } from "react";
|
||||
import { useSetAtom } from "jotai";
|
||||
import { useSnackbar } from "notistack";
|
||||
import Button from "@mui/material/Button";
|
||||
|
||||
import useUploader from "@src/hooks/useFirebaseStorageUploader";
|
||||
import { tableScope, updateFieldAtom } from "@src/atoms/tableScope";
|
||||
import { TableRowRef } from "@src/types/table";
|
||||
|
||||
const MAX_PARALLEL_TASKS = 30;
|
||||
|
||||
type UploadParamTypes = {
|
||||
docRef: TableRowRef;
|
||||
fieldName: string;
|
||||
files: RowyFile[];
|
||||
};
|
||||
|
||||
export default function useUploadFileFromURL() {
|
||||
const { upload } = useUploader();
|
||||
const updateField = useSetAtom(updateFieldAtom, tableScope);
|
||||
const { enqueueSnackbar, closeSnackbar } = useSnackbar();
|
||||
const jobs = useRef<UploadParamTypes[]>([]);
|
||||
|
||||
const askPermission = useCallback(async (): Promise<boolean> => {
|
||||
return new Promise((resolve) => {
|
||||
enqueueSnackbar("Upload files to firebase storage?", {
|
||||
persist: true,
|
||||
preventDuplicate: true,
|
||||
action: (
|
||||
<>
|
||||
<Button
|
||||
variant="contained"
|
||||
color="primary"
|
||||
onClick={() => {
|
||||
closeSnackbar();
|
||||
resolve(true);
|
||||
}}
|
||||
style={{
|
||||
marginRight: 8,
|
||||
}}
|
||||
>
|
||||
Yes
|
||||
</Button>
|
||||
<Button
|
||||
variant="contained"
|
||||
color="secondary"
|
||||
onClick={() => {
|
||||
closeSnackbar();
|
||||
resolve(false);
|
||||
}}
|
||||
>
|
||||
No
|
||||
</Button>
|
||||
</>
|
||||
),
|
||||
});
|
||||
});
|
||||
}, [enqueueSnackbar, closeSnackbar]);
|
||||
|
||||
const handleUpload = useCallback(
|
||||
async ({
|
||||
docRef,
|
||||
fieldName,
|
||||
files,
|
||||
}: UploadParamTypes): Promise<boolean> => {
|
||||
try {
|
||||
const files_ = await getFileFromURL(
|
||||
files.map((file) => file.downloadURL)
|
||||
);
|
||||
const { uploads, failures } = await upload({
|
||||
docRef,
|
||||
fieldName,
|
||||
files: files_,
|
||||
});
|
||||
if (failures.length > 0) {
|
||||
return false;
|
||||
}
|
||||
updateField({
|
||||
path: docRef.path,
|
||||
fieldName,
|
||||
value: uploads,
|
||||
useArrayUnion: false,
|
||||
});
|
||||
return true;
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
[upload, updateField]
|
||||
);
|
||||
|
||||
const batchUpload = useCallback(
|
||||
async (batch: UploadParamTypes[]) => {
|
||||
await Promise.all(batch.map((job) => handleUpload(job)));
|
||||
},
|
||||
[handleUpload]
|
||||
);
|
||||
|
||||
const runBatchUpload = useCallback(
|
||||
async (setProgress?: any) => {
|
||||
let currentJobs: UploadParamTypes[] = [];
|
||||
|
||||
while (
|
||||
currentJobs.length < MAX_PARALLEL_TASKS &&
|
||||
jobs.current.length > 0
|
||||
) {
|
||||
const job = jobs.current.shift();
|
||||
if (job) {
|
||||
currentJobs.push(job);
|
||||
}
|
||||
}
|
||||
|
||||
if (setProgress) setProgress((p: number) => p + currentJobs.length);
|
||||
await batchUpload(currentJobs);
|
||||
|
||||
if (jobs.current.length > 0) {
|
||||
runBatchUpload();
|
||||
}
|
||||
},
|
||||
[batchUpload]
|
||||
);
|
||||
|
||||
const addTask = useCallback((job: UploadParamTypes) => {
|
||||
jobs.current.push(job);
|
||||
}, []);
|
||||
|
||||
return {
|
||||
addTask,
|
||||
runBatchUpload,
|
||||
askPermission,
|
||||
};
|
||||
}
|
||||
|
||||
function getFileFromURL(urls: string[]): Promise<File[]> {
|
||||
const promises = urls.map((url) => {
|
||||
return fetch(url)
|
||||
.then((response) => response.blob())
|
||||
.then((blob) => new File([blob], +new Date() + url, { type: blob.type }));
|
||||
});
|
||||
return Promise.all(promises);
|
||||
}
|
||||
@@ -117,7 +117,11 @@ export default function Step1Columns({ config, setConfig }: IStepProps) {
|
||||
color="default"
|
||||
/>
|
||||
}
|
||||
label={selectedFields.length == allFields.length ? "Clear all" : "Select all"}
|
||||
label={
|
||||
selectedFields.length === allFields.length
|
||||
? "Clear all"
|
||||
: "Select all"
|
||||
}
|
||||
sx={{
|
||||
height: 42,
|
||||
mr: 0,
|
||||
|
||||
@@ -18,6 +18,9 @@ export const SELECTABLE_TYPES = [
|
||||
FieldType.url,
|
||||
FieldType.rating,
|
||||
|
||||
FieldType.image,
|
||||
FieldType.file,
|
||||
|
||||
FieldType.singleSelect,
|
||||
FieldType.multiSelect,
|
||||
|
||||
@@ -26,6 +29,8 @@ export const SELECTABLE_TYPES = [
|
||||
|
||||
FieldType.color,
|
||||
FieldType.slider,
|
||||
|
||||
FieldType.reference,
|
||||
];
|
||||
|
||||
export const REGEX_EMAIL =
|
||||
|
||||
Reference in New Issue
Block a user