Merge branch 'develop' into feature/theme-customisation

* develop: (36 commits)
  update ReExecute to handle >500 documents
  Bump tar from 4.4.13 to 4.4.15 in /cloud_functions/functions
  update wikiLinks
  only allow required value if default value is undefined
  updated field settings
  Bump color-string from 1.5.4 to 1.6.0 in /cloud_functions/functions
  analytics column events test
  moved ft_build to FunctionsBuilder repo
  fix analytics
  redirect missing cloud run url to home/settings
  update ft_actions dependancies
  remove second deploy button
  Google Cloud Shell one click deploy UI
  upgrade dependencies
  fix rowHeight not updating
  SideDrawer Form: fix react-hook-form state calls
  fix dynamic value type not saving, add utilFns
  fix date field
  ConnectTable: add snapshotFields config
  .
  ...
This commit is contained in:
Sidney Alcantara
2021-08-06 21:42:05 +10:00
97 changed files with 1572 additions and 10665 deletions

55
.github/workflows/tweet_release.yml vendored Normal file
View File

@@ -0,0 +1,55 @@
# This is a basic workflow to help you get started with Actions
name: tweet_releases
# Controls when the workflow will run
on:
# Triggers the workflow on push or pull request events but only for the master branch
push:
branches: [master]
pull_request:
branches: [master]
release:
types:
- published
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
build:
name: Tweet
runs-on: ubuntu-latest
steps:
- name: Post Twitter Status
uses: julb/action-post-twitter-status@v1
env:
twitter_app_consumer_api_key: ${{ secrets.TWITTER_API_KEY }}
twitter_app_consumer_api_secret_key:
${{ secrets.TWITTER_API_SECRET_KEY }}
twitter_app_access_token: ${{ secrets.TWITTER_ACCESS_TOKEN }}
twitter_app_access_token_secret:
${{ secrets.TWITTER_ACCESS_TOKEN_SECRET }}
with:
message:
"Testing github action for tweeting new releases and updates to
Firetable project"
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
jobs:
# This workflow contains a single job called "build"
build:
name: Tweet
runs-on: ubuntu-latest
steps:
- name: Post Twitter Status
uses: julb/action-post-twitter-status@v1
env:
twitter_app_consumer_api_key: ${{ secrets.TWITTER_API_KEY }}
twitter_app_consumer_api_secret_key:
${{ secrets.TWITTER_API_SECRET_KEY }}
twitter_app_access_token: ${{ secrets.TWITTER_ACCESS_TOKEN }}
twitter_app_access_token_secret:
${{ secrets.TWITTER_ACCESS_TOKEN_SECRET }}
with:
message:
"Testing github action for tweeting new releases and updates to
Firetable project"

138
README.md
View File

@@ -1,13 +1,34 @@
[![Firetable: Combine the power of Firestore with the simplicity of spreadsheets](https://firetable.io/social-image.jpg)](https://firetable.io)
<h1 align="center">
Firetable
</h1>
<h3 align="center">
Firetable is an open-source platform to unlock the full potential of Google Firestore
</h3>
<p align="center">
With Firetable, non-technical team members can access Firestore data with a permission controlled intuitive UI while developers can build fast on the GCP stack (manage their data models, create and deploy cloud functions, connect to third party apps via webhooks and APIs...and more!) 🚀
</p>
<!-- [![Firetable: Combine the power of Firestore with the simplicity of spreadsheets](https://firetable.io/social-image.jpg)](https://firetable.io) -->
<h3 align="center">
<a href="https://firetable.io">Website</a>
<span> · </span>
<a href="https://discord.gg/B8yAD5PDX4">Discord</a>
<span> · </span>
<a href="https://twitter.com/firetableio">Twitter</a>
</h3>
<p align="center">
<a aria-label="License" href="https://github.com/AntlerVC/firetable/blob/master/LICENSE">
<img src="https://badgen.net/github/license/AntlerVC/firetable" />
<a aria-label="License" href="https://github.com/FiretableProject/firetable/blob/master/LICENSE">
<img src="https://badgen.net/github/license/FiretableProject/firetable" />
</a>
<a aria-label="Commits" href="https://github.com/AntlerVC/firetable/commits/develop">
<img src="https://badgen.net/github/last-commit/AntlerVC/firetable/develop" />
<a aria-label="Commits" href="https://github.com/FiretableProject/firetable/commits/develop">
<img src="https://badgen.net/github/last-commit/FiretableProject/firetable/develop" />
</a>
<a aria-label="Firetable CLI" href="https://npmjs.com/package/firetable">
@@ -16,26 +37,54 @@
</p>
<p align="center">
<a href="https://try.firetable.io" >
<img align="center" alt="Firetable demo GIF" src="https://firetable.io/demo.gif" width="100%">
</a>
</p>
<br/>
<h3 align="center">
<a href="https://try.firetable.io">Live Demo</a>
<span> · </span>
<a href="https://github.com/FiretableProject/firetable/wiki/Getting-Started">Get started</a>
<span> · </span>
<a href="https://github.com/FiretableProject/firetable/issues/new?assignees=&labels=&template=feature_request.md&title=">Feature request</a>
<span> · </span>
<a href="https://github.com/FiretableProject/firetable/issues/new?assignees=&labels=&template=bug_report.md&title=">Report a bug</a>
<span> · </span>
<a href="https://discord.gg/B8yAD5PDX4">Discussions</a>
</a>
</h3>
<br/>
# Features
- Spreadsheet interface for viewing Firestore collections, documents, and
subcollections.
- **Intuitive spreadsheet like interface for Firestore.** With Firetable UI you
can view and directly edit the data in Firestore but not just that, you can
completely manage your existing data model or create from scratch.
- Add, edit, and delete rows
- Sort and filter by row values
- CRUD operations on your data - Add, edit, and delete rows
- Create table collections, add column fields
- Sort and filter data
- Resize and rename columns
- 27 different field types.[Read more](https://github.com/AntlerVC/firetable/wiki/Field-Types)
- **Rich set of data fields.**
[29+ field types](https://github.com/FiretableProject/firetable/wiki/Field-Types)
supported and growing
- Basic types: Short Text, Long Text, Email, Phone, URL…
- Custom UI pickers: Date, Checkbox, Single Select, Multi Select…
- Uploaders: Image, File
- Rich Editors: JSON, Code, Rich Text (HTML)
- Powerful access controls with custom user roles.
[Read more](https://github.com/AntlerVC/firetable/wiki/Role-Based-Security-Rules)
- **Permission controlled views.** You can customize who sees what data and what
action they can take with powerful set of access controls based on custom user
roles.
[Read more](https://github.com/FiretableProject/firetable/wiki/Role-Based-Security-Rules)
- **Bulk data actions.** You can import and export your Firestore data from/to
CSV files.
- Supercharge your database with your own scripts.
- **Supercharge your database with your own scripts.**
- Action field: trigger any Cloud Function
- Derivative field: populate cell with value derived from the rest of the
@@ -43,7 +92,7 @@
- Aggregate field: populate cell with value aggregated from the rows
sub-table
- Integrations with external services.
- **Integrations with external services.**
- Connect Table uses Algolia to get a snapshot of another tables row values
- Connect Service uses any HTTP endpoint to get a cell value
@@ -51,30 +100,22 @@
<p align="center">
<a href="https://firebase.google.com/products/firestore">
<img src="https://github.com/AntlerVC/firetable/wiki/images/firebase/firestore.png" alt="Cloud Firestore" width="32%" />
<img src="https://github.com/FiretableProject/firetable/wiki/images/firebase/firestore.png" alt="Cloud Firestore" width="19%" />
</a>
<a href="https://firebase.google.com/products/auth">
<img src="https://github.com/AntlerVC/firetable/wiki/images/firebase/authentication.png" alt="Firebase Authentication" width="32%" />
<img src="https://github.com/FiretableProject/firetable/wiki/images/firebase/authentication.png" alt="Firebase Authentication" width="19%" />
</a>
<a href="https://firebase.google.com/products/functions">
<img src="https://github.com/AntlerVC/firetable/wiki/images/firebase/functions.png" alt="Firebase Functions" width="32%" />
<img src="https://github.com/FiretableProject/firetable/wiki/images/firebase/functions.png" alt="Firebase Functions" width="19%" />
</a>
<a href="https://firebase.google.com/products/hosting">
<img src="https://github.com/AntlerVC/firetable/wiki/images/firebase/hosting.png" alt="Firebase Hosting" width="32%" />
<img src="https://github.com/FiretableProject/firetable/wiki/images/firebase/hosting.png" alt="Firebase Hosting" width="19%" />
</a>
<a href="https://firebase.google.com/products/storage">
<img src="https://github.com/AntlerVC/firetable/wiki/images/firebase/storage.png" alt="Firebase Storage" width="32%" />
<img src="https://github.com/FiretableProject/firetable/wiki/images/firebase/storage.png" alt="Firebase Storage" width="19%" />
</a>
</p>
## [Live demo →](https://try.firetable.io)
<a href="https://try.firetable.io">
<img align="center" alt="Firetable demo GIF" src="https://firetable.io/demo.gif">
</a>
<br />
# Getting started
To set up Firetable, you must be comfortable with working with the command line.
@@ -85,48 +126,45 @@ packages globally.
Weve created the Firetable CLI to automate the steps required to set up
Firetable.
### [**Read the getting started guide →**](https://github.com/AntlerVC/firetable/wiki/Getting-Started)
<br />
### [**Read the getting started guide →**](https://github.com/FiretableProject/firetable/wiki/Getting-Started)
# Documentation
Were still working on improving our documentation and writing more
beginner-friendly guides.
[**Documentation on GitHub Wiki →**](https://github.com/AntlerVC/firetable/wiki)
<br />
[**Documentation on GitHub Wiki →**](https://github.com/FiretableProject/firetable/wiki)
# Issues
[![Open issues](https://badgen.net/github/open-issues/AntlerVC/firetable)](https://github.com/antlervc/firetable/issues)
[![Open issues](https://badgen.net/github/open-issues/FiretableProject/firetable)](https://github.com/antlervc/firetable/issues)
[Create issues and bug reports here.](https://github.com/antlervc/firetable/issues)
Make sure to provide console log outputs and screenshots!
# Roadmap and feature requests
- [Roadmap](https://github.com/AntlerVC/firetable/wiki/Roadmap)
- [View our ideas and feature requests](https://github.com/AntlerVC/firetable/projects/1)
<br />
- [Roadmap](https://github.com/FiretableProject/firetable/wiki/Roadmap)
- [View our ideas and feature requests](https://github.com/FiretableProject/firetable/projects/1)
---
<br />
<!-- # Join Our Community
# About Antler Engineering
<a href="https://discord.gg/B8yAD5PDX4">
<img
src="https://invidget.switchblade.xyz/B8yAD5PDX4"
alt="Join Firetable Open Source Project"
>
</a>
<br> -->
<img src="https://firebasestorage.googleapis.com/v0/b/antler-vc.appspot.com/o/antler-logo.svg?alt=media&token=34db0e2e-1d24-4995-9efa-8bf209c55613" align="right" width="200" height="48" />
# Contributing 🙌
Firetable is created and being actively developed by
[Antler Engineering](https://twitter.com/AntlerEng).
We welcome any contributions from the open source community.
At [Antler](https://antler.co), we identify and invest in exceptional people.
Were a global startup generator and early-stage VC firm that builds
groundbreaking technology companies.
[Apply now](<https://www.antler.co/apply?utm_source=Firetable&utm_medium=website&utm_campaign=Thu%20Apr%2016%202020%2018:00:00%20GMT%2B0200%20(CEST)&utm_content=TechTracking>)
to be part of a global cohort of tech founders.
- **[Good First Issues](https://github.com/FiretableProject/firetable/projects/3)**
project is a good place to start for contributing to Firetable.
- For new feature or enhancement, please create an issue first or send us a
message on **[Discord](https://discord.gg/B8yAD5PDX4)** so that we can discuss
and guide you as needed ahead of you starting the work.

View File

@@ -47,7 +47,7 @@ export const scheduledFirestoreImport = functions.pubsub
const firestoreBackup = (collectionIds: string[] = []) => {
const projectId = process.env.GCP_PROJECT || process.env.GCLOUD_PROJECT;
const databaseName = client.databasePath(projectId, "(default)");
const databaseName = client.databasePath(projectId ?? "", "(default)");
const date = new Date();
const backupFolder = `${date.getUTCFullYear()}-${

View File

@@ -1464,9 +1464,9 @@ color-name@^1.0.0, color-name@~1.1.4:
integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==
color-string@^1.5.2, color-string@^1.5.4:
version "1.5.4"
resolved "https://registry.yarnpkg.com/color-string/-/color-string-1.5.4.tgz#dd51cd25cfee953d138fe4002372cc3d0e504cb6"
integrity sha512-57yF5yt8Xa3czSEW1jfQDE79Idk0+AkN/4KWad6tbdxUmAs3MvjxlWSWD4deYytcRfoZ9nhKyFl1kj5tBvidbw==
version "1.6.0"
resolved "https://registry.yarnpkg.com/color-string/-/color-string-1.6.0.tgz#c3915f61fe267672cb7e1e064c9d692219f6c312"
integrity sha512-c/hGS+kRWJutUBEngKKmk4iH3sD59MBkoxVapS/0wgpCz2u7XsNloxknyvBhzwEs1IbV36D9PwqLPJ2DTu3vMA==
dependencies:
color-name "^1.0.0"
simple-swizzle "^0.2.2"
@@ -5977,9 +5977,9 @@ tar-stream@^2.1.0, tar-stream@^2.1.4:
readable-stream "^3.1.1"
tar@^4.3.0:
version "4.4.13"
resolved "https://registry.yarnpkg.com/tar/-/tar-4.4.13.tgz#43b364bc52888d555298637b10d60790254ab525"
integrity sha512-w2VwSrBoHa5BsSyH+KxEqeQBAllHhccyMFVHtGtdMpF4W7IRWfZjFiQceJPChOeTsSDVUpER2T8FA93pr0L+QA==
version "4.4.15"
resolved "https://registry.yarnpkg.com/tar/-/tar-4.4.15.tgz#3caced4f39ebd46ddda4d6203d48493a919697f8"
integrity sha512-ItbufpujXkry7bHH9NpQyTXPbJ72iTlXgkBAYsAjDXk3Ds8t/3NfO5P4xZGy7u+sYuQUbimgzswX4uQIEeNVOA==
dependencies:
chownr "^1.1.1"
fs-minipass "^1.2.5"

View File

@@ -14,8 +14,9 @@
},
"main": "lib/index.js",
"dependencies": {
"firebase-admin": "^9.10.0",
"firebase-functions": "^3.14.1"
"firebase-admin": "^9.11.0",
"firebase-functions": "^3.14.1",
"lodash": "^4.17.21"
},
"devDependencies": {
"firebase-tools": "^8.7.0",

File diff suppressed because it is too large Load Diff

4
ft_actions/yarn.lock Normal file
View File

@@ -0,0 +1,4 @@
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
# yarn lockfile v1

View File

@@ -1,2 +0,0 @@
antler*.json
.gitignore

1
ft_build/.gitignore vendored
View File

@@ -1 +0,0 @@
build/

View File

@@ -1,20 +0,0 @@
# Use the official lightweight Node.js image.
# https://hub.docker.com/_/node
FROM node:14-slim
# Create and change to the app directory.
WORKDIR /workdir
# Copy local code to the container image.
# If you've done yarn install locally, node_modules will be copied to
# docker work directory to save time to perform the same actions again.
COPY . ./
# Install production missing dependencies from above copy command.
# If you add a package-lock.json, speed your build by switching to 'npm ci'.
# RUN npm ci --only=production
RUN yarn
# Run the web service on container startup.
CMD [ "yarn", "start" ]

View File

@@ -1,80 +0,0 @@
import { addPackages, addSparkLib, asyncExecute } from "./terminal";
const fs = require("fs");
import { generateConfigFromTableSchema } from "./loader";
import { commandErrorHandler } from "../utils";
const path = require("path");
import admin from "firebase-admin";
export default async function generateConfig(
schemaPath: string,
user: admin.auth.UserRecord,
streamLogger
) {
return await generateConfigFromTableSchema(
schemaPath,
user,
streamLogger
).then(async (success) => {
if (!success) {
await streamLogger.info(
`generateConfigFromTableSchema failed to complete`
);
return false;
}
await streamLogger.info(`generateConfigFromTableSchema done`);
const configFile = fs.readFileSync(
path.resolve(__dirname, "../functions/src/functionConfig.ts"),
"utf-8"
);
await streamLogger.info(`configFile: ${JSON.stringify(configFile)}`);
const requiredDependencies = configFile.match(
/(?<=(require\(("|'))).*?(?=("|')\))/g
);
if (requiredDependencies) {
const packgesAdded = await addPackages(
requiredDependencies.map((p: any) => ({ name: p })),
user,
streamLogger
);
if (!packgesAdded) {
return false;
}
}
await streamLogger.info(
`requiredDependencies: ${JSON.stringify(requiredDependencies)}`
);
const isFunctionConfigValid = await asyncExecute(
"cd build/functions/src; tsc functionConfig.ts",
commandErrorHandler(
{
user,
functionConfigTs: configFile,
description: `Invalid compiled functionConfig.ts`,
},
streamLogger
)
);
await streamLogger.info(
`isFunctionConfigValid: ${JSON.stringify(isFunctionConfigValid)}`
);
if (!isFunctionConfigValid) {
return false;
}
const { sparksConfig } = require("../functions/src/functionConfig.js");
const requiredSparks = sparksConfig.map((s: any) => s.type);
await streamLogger.info(
`requiredSparks: ${JSON.stringify(requiredSparks)}`
);
for (const lib of requiredSparks) {
const success = await addSparkLib(lib, user, streamLogger);
if (!success) {
return false;
}
}
return true;
});
}

View File

@@ -1,207 +0,0 @@
import { db } from "../firebaseConfig";
const fs = require("fs");
const beautify = require("js-beautify").js;
import admin from "firebase-admin";
import { parseSparksConfig } from "../utils";
export const generateConfigFromTableSchema = async (
schemaDocPath: string,
user: admin.auth.UserRecord,
streamLogger
) => {
await streamLogger.info("getting schema...");
const schemaDoc = await db.doc(schemaDocPath).get();
const schemaData = schemaDoc.data();
try {
if (!schemaData) throw new Error("no schema found");
// Temporarily disabled because this is super long
// await streamLogger.info(`schemaData: ${JSON.stringify(schemaData)}`);
const derivativeColumns = Object.values(schemaData.columns).filter(
(col: any) => col.type === "DERIVATIVE"
);
await streamLogger.info(
`derivativeColumns: ${JSON.stringify(derivativeColumns)}`
);
const derivativesConfig = `[${derivativeColumns.reduce(
(acc, currColumn: any) => {
if (
!currColumn.config.listenerFields ||
currColumn.config.listenerFields.length === 0
)
throw new Error(
`${currColumn.key} derivative is missing listener fields`
);
if (currColumn.config.listenerFields.includes(currColumn.key))
throw new Error(
`${currColumn.key} derivative has its own key as a listener field`
);
return `${acc}{\nfieldName:'${
currColumn.key
}',evaluate:async ({row,ref,db,auth,storage,utilFns}) =>{${
currColumn.config.script
}},\nlistenerFields:[${currColumn.config.listenerFields
.map((fieldKey: string) => `"${fieldKey}"`)
.join(",\n")}]},\n`;
},
""
)}]`;
await streamLogger.info(
`derivativesConfig: ${JSON.stringify(derivativesConfig)}`
);
const initializableColumns = Object.values(
schemaData.columns
).filter((col: any) => Boolean(col.config?.defaultValue));
await streamLogger.info(
`initializableColumns: ${JSON.stringify(initializableColumns)}`
);
const initializeConfig = `[${initializableColumns.reduce(
(acc, currColumn: any) => {
if (currColumn.config.defaultValue.type === "static") {
return `${acc}{\nfieldName:'${currColumn.key}',
type:"${currColumn.config.defaultValue.type}",
value:${
typeof currColumn.config.defaultValue.value === "string"
? `"${currColumn.config.defaultValue.value}"`
: JSON.stringify(currColumn.config.defaultValue.value)
},
},\n`;
} else if (currColumn.config.defaultValue.type === "dynamic") {
return `${acc}{\nfieldName:'${currColumn.key}',
type:"${currColumn.config.defaultValue.type}",
script:async ({row,ref,db,auth,utilFns}) =>{${currColumn.config.defaultValue.script}},
},\n`;
} else {
return `${acc}{\nfieldName:'${currColumn.key}',
type:"${currColumn.config.defaultValue.type}"
},\n`;
}
},
""
)}]`;
await streamLogger.info(
`initializeConfig: ${JSON.stringify(initializeConfig)}`
);
const documentSelectColumns = Object.values(schemaData.columns).filter(
(col: any) => col.type === "DOCUMENT_SELECT" && col.config?.trackedFields
);
const documentSelectConfig = `[${documentSelectColumns.reduce(
(acc, currColumn: any) => {
return `${acc}{\nfieldName:'${
currColumn.key
}',\ntrackedFields:[${currColumn.config.trackedFields
.map((fieldKey: string) => `"${fieldKey}"`)
.join(",\n")}]},\n`;
},
""
)}]`;
await streamLogger.info(
`documentSelectColumns: ${JSON.stringify(documentSelectColumns)}`
);
const sparksConfig = parseSparksConfig(
schemaData.sparks,
user,
streamLogger
);
await streamLogger.info(`sparksConfig: ${JSON.stringify(sparksConfig)}`);
const collectionType = schemaDocPath.includes("subTables")
? "subCollection"
: schemaDocPath.includes("groupSchema")
? "groupCollection"
: "collection";
let collectionId = "";
let functionName = "";
let triggerPath = "";
switch (collectionType) {
case "collection":
collectionId = schemaDocPath.split("/").pop() ?? "";
functionName = `"${collectionId}"`;
triggerPath = `"${collectionId}/{docId}"`;
break;
case "subCollection":
let pathParentIncrement = 0;
triggerPath =
'"' +
schemaDocPath
.replace("_FIRETABLE_/settings/schema/", "")
.replace(/subTables/g, function () {
pathParentIncrement++;
return `{parentDoc${pathParentIncrement}}`;
}) +
"/{docId}" +
'"';
functionName =
'"' +
schemaDocPath
.replace("_FIRETABLE_/settings/schema/", "")
.replace(/\/subTables\//g, "_") +
'"';
break;
case "groupCollection":
collectionId = schemaDocPath.split("/").pop() ?? "";
const triggerDepth = schemaData.triggerDepth
? schemaData.triggerDepth
: 1;
triggerPath = "";
for (let i = 1; i <= triggerDepth; i++) {
triggerPath = triggerPath + `{parentCol${i}}/{parentDoc${i}}/`;
}
triggerPath = '"' + triggerPath + collectionId + "/" + "{docId}" + '"';
functionName = `"CG_${collectionId}${
triggerDepth > 1 ? `_D${triggerDepth}` : ""
}"`;
break;
default:
break;
}
await streamLogger.info(
`collectionType: ${JSON.stringify(collectionType)}`
);
// generate field types from table meta data
const fieldTypes = JSON.stringify(
Object.keys(schemaData.columns).reduce((acc, cur) => {
const field = schemaData.columns[cur];
let fieldType = field.type;
if (fieldType === "DERIVATIVE") {
fieldType = field.config.renderFieldType;
}
return {
[cur]: fieldType,
...acc,
};
}, {})
);
await streamLogger.info(`fieldTypes: ${JSON.stringify(fieldTypes)}`);
const exports: any = {
fieldTypes,
triggerPath,
functionName: functionName.replace(/-/g, "_"),
derivativesConfig,
initializeConfig,
documentSelectConfig,
sparksConfig,
};
await streamLogger.info(`exports: ${JSON.stringify(exports)}`);
const fileData = Object.keys(exports).reduce((acc, currKey) => {
return `${acc}\nexport const ${currKey} = ${exports[currKey]}`;
}, ``);
await streamLogger.info(`fileData: ${JSON.stringify(fileData)}`);
const path = require("path");
fs.writeFileSync(
path.resolve(__dirname, "../functions/src/functionConfig.ts"),
beautify(fileData, { indent_size: 2 })
);
return true;
} catch (error) {
streamLogger.error(error.message);
return false;
}
};

View File

@@ -1,83 +0,0 @@
import * as child from "child_process";
import admin from "firebase-admin";
import { commandErrorHandler, logErrorToDB } from "../utils";
function execute(command: string, callback: any) {
console.log(command);
child.exec(command, function (error, stdout, stderr) {
console.log({ error, stdout, stderr });
callback(stdout);
});
}
export const asyncExecute = async (command: string, callback: any) =>
new Promise(async (resolve, reject) => {
child.exec(command, async function (error, stdout, stderr) {
console.log({ error, stdout, stderr });
await callback(error, stdout, stderr);
resolve(!error);
});
});
export const addPackages = async (
packages: { name: string; version?: string }[],
user: admin.auth.UserRecord,
streamLogger
) => {
const packagesString = packages.reduce((acc, currPackage) => {
return `${acc} ${currPackage.name}@${currPackage.version ?? "latest"}`;
}, "");
if (packagesString.trim().length !== 0) {
const success = await asyncExecute(
`cd build/functions;yarn add ${packagesString}`,
commandErrorHandler(
{
user,
description: "Error adding packages",
},
streamLogger
)
);
return success;
}
return true;
};
export const addSparkLib = async (
name: string,
user: admin.auth.UserRecord,
streamLogger
) => {
try {
const { dependencies } = require(`../sparksLib/${name}`);
const packages = Object.keys(dependencies).map((key) => ({
name: key,
version: dependencies[key],
}));
const success = await addPackages(packages, user, streamLogger);
if (!success) {
return false;
}
} catch (error) {
logErrorToDB(
{
user,
errorDescription: "Error parsing dependencies",
},
streamLogger
);
return false;
}
const success = await asyncExecute(
`cp build/sparksLib/${name}.ts build/functions/src/sparks/${name}.ts`,
commandErrorHandler(
{
user,
description: "Error copying sparksLib",
},
streamLogger
)
);
return success;
};

View File

@@ -1,18 +0,0 @@
{
"compilerOptions": {
"module": "commonjs",
"noImplicitReturns": true,
"noUnusedLocals": true,
"outDir": "lib",
"sourceMap": true,
"strict": true,
"noImplicitAny": false,
"resolveJsonModule": true,
"target": "es6",
"lib": ["ESNext"],
"strictNullChecks": false
},
"compileOnSave": true,
"include": ["src", "generateConfig.ts"],
"ignore": ["sparks", "sparksLib"]
}

View File

@@ -1,29 +0,0 @@
#!/bin/bash
helpFunction()
{
echo "Usage: ./deploy.sh --project-id [YOUR GCLOUD PROJECT ID]"
exit 0
}
while test $# -gt 0; do
case "$1" in
--project-id)
shift
project_id=$1
shift
;;
*)
echo "$1 is not a recognized flag!"
return 1;
;;
esac
done
if [[ -z "$project_id" ]];
then
helpFunction
fi
gcloud config set project $project_id
gcloud builds submit --tag gcr.io/$project_id/ft-builder
gcloud run deploy ft-builder --image gcr.io/$project_id/ft-builder --platform managed --memory 4Gi --allow-unauthenticated --set-env-vars="_PROJECT_ID=$project_id" --region=australia-southeast1

View File

@@ -1,9 +0,0 @@
{
"functions": {
"predeploy": [
"npm --prefix \"$RESOURCE_DIR\" run lint",
"npm --prefix \"$RESOURCE_DIR\" run build"
],
"source": "functions"
}
}

View File

@@ -1,10 +0,0 @@
// Initialize Firebase Admin
import * as admin from "firebase-admin";
admin.initializeApp();
const db = admin.firestore();
const auth = admin.auth();
db.settings({ timestampsInSnapshots: true, ignoreUndefinedProperties: true });
export { db, admin, auth };

View File

@@ -1,64 +0,0 @@
module.exports = {
env: {
browser: true,
es6: true,
node: true,
},
extends: ["plugin:import/errors", "plugin:import/warnings"],
parser: "@typescript-eslint/parser",
parserOptions: {
project: "tsconfig.json",
sourceType: "module",
},
plugins: ["@typescript-eslint", "import"],
rules: {
"@typescript-eslint/adjacent-overload-signatures": "error",
"@typescript-eslint/no-empty-function": "error",
"@typescript-eslint/no-empty-interface": "warn",
"@typescript-eslint/no-floating-promises": "error",
"@typescript-eslint/no-namespace": "error",
"@typescript-eslint/no-unnecessary-type-assertion": "error",
"@typescript-eslint/prefer-for-of": "warn",
"@typescript-eslint/triple-slash-reference": "error",
"@typescript-eslint/unified-signatures": "warn",
"comma-dangle": "warn",
"constructor-super": "error",
eqeqeq: ["warn", "always"],
"import/no-deprecated": "warn",
"import/no-extraneous-dependencies": "error",
"import/no-unassigned-import": "warn",
"no-cond-assign": "error",
"no-duplicate-case": "error",
"no-duplicate-imports": "error",
"no-empty": [
"error",
{
allowEmptyCatch: true,
},
],
"no-invalid-this": "error",
"no-new-wrappers": "error",
"no-param-reassign": "error",
"no-redeclare": "error",
"no-sequences": "error",
"no-shadow": [
"error",
{
hoist: "all",
},
],
"no-throw-literal": "error",
"no-unsafe-finally": "error",
"no-unused-labels": "error",
"no-var": "warn",
"no-void": "error",
"prefer-const": "warn",
},
settings: {
jsdoc: {
tagNamePreference: {
returns: "return",
},
},
},
};

View File

@@ -1,12 +0,0 @@
# Compiled JavaScript files
**/*.js
**/*.js.map
# Except the ESLint config file
!.eslintrc.js
# TypeScript v1 declaration files
typings/
# Node.js dependency directory
node_modules/

View File

@@ -1,38 +0,0 @@
{
"name": "functions",
"version": "0.0.1",
"scripts": {
"lint": "tslint --project tsconfig.json",
"build": "tsc",
"serve": "npm run build && firebase serve --only functions",
"shell": "npm run build && firebase functions:shell",
"start": "npm run shell",
"deployFT": "echo 'n\n' | firebase deploy --interactive",
"logs": "firebase functions:log"
},
"engines": {
"node": "14"
},
"main": "lib/index.js",
"dependencies": {
"@google-cloud/secret-manager": "^3.2.3",
"firebase-admin": "^9.4.2",
"firebase-functions": "^3.13.1"
},
"devDependencies": {
"@types/node": "^14.14.11",
"husky": "^4.2.5",
"prettier": "^2.1.1",
"pretty-quick": "^3.0.0",
"ts-node": "^8.6.2",
"tsc": "^1.20150623.0",
"tslint": "^6.1.0",
"typescript": "^4.1.2"
},
"husky": {
"hooks": {
"pre-commit": "pretty-quick --staged"
}
},
"private": true
}

View File

@@ -1,63 +0,0 @@
import * as functions from "firebase-functions";
import * as admin from "firebase-admin";
import { db, auth, storage } from "../firebaseConfig";
import utilFns from "../utils";
const derivative = (
functionConfig: {
fieldName: string;
listenerFields: string[];
evaluate: (props: {
row: any;
ref: FirebaseFirestore.DocumentReference<FirebaseFirestore.DocumentData>;
db: FirebaseFirestore.Firestore;
auth: admin.auth.Auth;
storage: admin.storage.Storage;
utilFns: any;
}) => any;
}[]
) => async (change: functions.Change<functions.firestore.DocumentSnapshot>) => {
try {
const row = change.after?.data();
const ref = change.after ? change.after.ref : change.before.ref;
const update = await functionConfig.reduce(
async (accUpdates: any, currDerivative) => {
const shouldEval = utilFns.hasChanged(change)([
...currDerivative.listenerFields,
"_ft_forcedUpdateAt",
]);
if (shouldEval) {
try {
const newValue = await currDerivative.evaluate({
row,
ref,
db,
auth,
storage,
utilFns,
});
if (
newValue !== undefined &&
newValue !== row[currDerivative.fieldName]
) {
return {
...(await accUpdates),
[currDerivative.fieldName]: newValue,
};
}
} catch (error) {
console.log(error);
}
}
return await accUpdates;
},
{}
);
return update;
} catch (error) {
console.log(`Derivatives Error`, error);
return {};
}
};
export default derivative;

View File

@@ -1,19 +0,0 @@
// Initialize Firebase Admin
import * as functions from "firebase-functions";
import * as admin from "firebase-admin";
admin.initializeApp();
// Initialize Cloud Firestore Database
export const db = admin.firestore();
// Initialize Auth
export const auth = admin.auth();
// Initialize Storage
export const storage = admin.storage();
const settings = {
timestampsInSnapshots: true,
ignoreUndefinedProperties: true,
};
db.settings(settings);
export const env = functions.config();

View File

@@ -1,68 +0,0 @@
import * as functions from "firebase-functions";
import derivative from "./derivatives";
import spark from "./sparks";
import {
functionName,
triggerPath,
derivativesConfig,
documentSelectConfig,
sparksConfig,
initializeConfig,
fieldTypes,
} from "./functionConfig";
import { getTriggerType, changedDocPath } from "./utils";
import propagate from "./propagates";
import initialize from "./initialize";
export const FT = {
[functionName]: functions.firestore
.document(triggerPath)
.onWrite(async (change, context) => {
const triggerType = getTriggerType(change);
let promises: Promise<any>[] = [];
const sparkPromises = sparksConfig
.filter((sparkConfig) => sparkConfig.triggers.includes(triggerType))
.map((sparkConfig) => spark(sparkConfig, fieldTypes)(change, context));
console.log(
`#${
sparkPromises.length
} sparks will be evaluated on ${triggerType} of ${changedDocPath(
change
)}`
);
promises = sparkPromises;
const propagatePromise = propagate(
change,
documentSelectConfig,
triggerType
);
promises.push(propagatePromise);
try {
let docUpdates = {};
if (triggerType === "update") {
try {
docUpdates = await derivative(derivativesConfig)(change);
} catch (err) {
console.log(`caught error: ${err}`);
}
} else if (triggerType === "create") {
try {
const initialData = await initialize(initializeConfig)(
change.after
);
const derivativeData = await derivative(derivativesConfig)(change);
docUpdates = { ...initialData, ...derivativeData };
} catch (err) {
console.log(`caught error: ${err}`);
}
}
if (Object.keys(docUpdates).length !== 0) {
promises.push(change.after.ref.update(docUpdates));
}
const result = await Promise.allSettled(promises);
console.log(JSON.stringify(result));
} catch (err) {
console.log(`caught error: ${err}`);
}
}),
};

View File

@@ -1,30 +0,0 @@
import * as functions from "firebase-functions";
import utilFns from "../utils";
import { db, auth, storage } from "../firebaseConfig";
const initializedDoc = (
columns: { fieldName: string; type: string; value?: any; script?: any }[]
) => async (snapshot: functions.firestore.DocumentSnapshot) =>
columns.reduce(async (acc, column) => {
if (snapshot.get(column.fieldName) !== undefined) return { ...(await acc) }; // prevents overwriting already initialised values
if (column.type === "static") {
return {
...(await acc),
[column.fieldName]: column.value,
};
} else if (column.type === "null") {
return { ...(await acc), [column.fieldName]: null };
} else if (column.type === "dynamic") {
return {
...(await acc),
[column.fieldName]: await column.script({
row: snapshot.data(),
ref: snapshot.ref,
db,
auth,
storage,
utilFns,
}),
};
} else return { ...(await acc) };
}, {});
export default initializedDoc;

View File

@@ -1,98 +0,0 @@
import { DocumentSnapshot } from "firebase-functions/lib/providers/firestore";
import { rowReducer } from "../utils";
import { db } from "../firebaseConfig";
const TARGET_SUB_COLLECTION = "_FT_BINDINGS";
//sample binding document
// /_FT_BINDINGS/{docId}
// docId is encodeURIComponent of docPath
/**
{
[targetCollectionName]:{
[targetField]:{
trackedFields:[]
targets{
[docId]:true
}
}
}
}
*/
// source changes Trigger
// check and propagate any tracked changes to
export const propagateChanges = (docSnapshot: DocumentSnapshot) =>
new Promise((resolve, reject) =>
db
.collection(TARGET_SUB_COLLECTION)
.doc(encodeURIComponent(docSnapshot.ref.path))
.get()
.then((doc) => {
const promises = [];
const docData = doc.data();
if (!doc.exists) {
resolve(false);
return;
}
const targetCollectionPaths = Object.keys(docData);
targetCollectionPaths.forEach((cPath) => {
const targetFieldKeys = Object.keys(docData[cPath]);
targetFieldKeys.forEach((targetFieldKey) => {
const { trackedFields, targets } = docData[cPath][targetFieldKey];
const fieldPromises = Object.keys(targets).map(
async (targetDocId) => {
const targetRef = db
.collection(decodeURIComponent(cPath))
.doc(targetDocId);
const targetDoc = await targetRef.get();
if (!targetDoc.exists) return false;
const targetFieldValue = targetDoc.get(targetFieldKey);
const indexOfCurrentTarget = targetFieldValue.findIndex(
(element) => element.docPath === docSnapshot.ref.path
);
if (indexOfCurrentTarget > -1) {
targetFieldValue[indexOfCurrentTarget].snapshot = rowReducer(
trackedFields,
docSnapshot.data()
);
await targetRef.update({
[targetFieldKey]: targetFieldValue,
});
return true;
} else return false;
}
);
fieldPromises.forEach((p) => promises.push(p));
});
});
resolve(Promise.allSettled(promises));
return;
})
);
// when deleting a document all snapshot copies of it in
export const removeCopiesOfDeleteDoc = (
sourceDocRef: FirebaseFirestore.DocumentReference
) =>
sourceDocRef
.collection(TARGET_SUB_COLLECTION)
.get()
.then((queryResult) => {
queryResult.docs.map(async (doc) => {
const { targetRef, targetFieldKey } = doc.data() as {
targetRef: FirebaseFirestore.DocumentReference;
targetFieldKey: string;
};
const targetDoc = await targetRef.get();
const currentTargetFieldValue = targetDoc.get(targetFieldKey);
const newTargetFieldValue = currentTargetFieldValue.filter(
({ docPath }: { docPath: string; snapshot: any }) =>
docPath !== sourceDocRef.path
);
await targetRef.update({ [targetFieldKey]: newTargetFieldValue });
await doc.ref.delete();
});
});

View File

@@ -1,103 +0,0 @@
import * as admin from "firebase-admin";
const fieldValue = admin.firestore.FieldValue;
import { db } from "../firebaseConfig";
const TARGET_SUB_COLLECTION = "_FT_BINDINGS";
//sample bindings document
// /_FT_BINDINGS/{docId}
// docId is encodeURIComponent of docPath
/**
{
[targetCollectionName]:{
[targetField]:{
trackedFields:[]
targets{
[docId]:true
}
}
}
}
*/
// Target changes Trigger
// add propagation reference from source subcollection
export const addTargetRef = (
targetRef: FirebaseFirestore.DocumentReference,
sourceDocPath: string,
targetFieldKey: string,
trackedFields
) =>
db.doc(`${TARGET_SUB_COLLECTION}/${encodeURIComponent(sourceDocPath)}`).set(
{
[encodeURIComponent(targetRef.parent.path)]: {
[targetFieldKey]: {
trackedFields,
targets: { [targetRef.id]: true },
},
},
},
{ merge: true }
);
// remove propagation reference from source subcollection
export const removeTargetRef = (
targetRef: FirebaseFirestore.DocumentReference,
sourceDocPath: string,
targetFieldKey: string
) =>
db.doc(`${TARGET_SUB_COLLECTION}/${encodeURIComponent(sourceDocPath)}`).set(
{
[encodeURIComponent(targetRef.parent.path)]: {
[targetFieldKey]: {
targets: { [targetRef.id]: fieldValue.delete() },
},
},
},
{ merge: true }
);
// db
// .doc(`${sourceDocPath}/${TARGET_SUB_COLLECTION}/${encodeURIComponent(targetRef.parent.path)}`)
// .set({ [targetFieldKey]:{targets:{[targetRef.id]:fieldValue.delete()}}},{merge: true});
// new Promise((resolve, reject) => db
// .collection(`${sourceDocPath}/${TARGET_SUB_COLLECTION}`)
// .where("targetRef", "==", targetRef)
// .where("targetFieldKey","==",targetFieldKey)
// .get()
// .then((queryResult) => resolve(Promise.all(queryResult.docs.map((doc) => doc.ref.delete())))));
// removes all references of deleted targets
export const removeRefsOnTargetDelete = (
targetRef: FirebaseFirestore.DocumentReference,
targetFieldKey: string
) =>
new Promise((resolve, reject) =>
db
.collection(TARGET_SUB_COLLECTION)
.where(
`${targetRef.parent.path}.${targetFieldKey}.targets.${targetRef.id}`,
"==",
true
)
.get()
.then((queryResult) =>
resolve(
Promise.all(
queryResult.docs.map((doc) =>
doc.ref.set(
{
[encodeURIComponent(targetRef.parent.path)]: {
[targetFieldKey]: {
targets: { [targetRef.id]: fieldValue.delete() },
},
},
},
{ merge: true }
)
)
)
)
)
);

View File

@@ -1,81 +0,0 @@
import * as functions from "firebase-functions";
import { propagateChanges, removeCopiesOfDeleteDoc } from "./SourceFns";
import {
addTargetRef,
removeTargetRef,
removeRefsOnTargetDelete,
} from "./TargetFns";
//import { asyncForEach} from '../utils'
const propagateChangesOnTrigger = (
change: functions.Change<functions.firestore.DocumentSnapshot>,
triggerType: "delete" | "create" | "update"
) => {
switch (triggerType) {
case "update":
return propagateChanges(change.after);
case "delete":
return removeCopiesOfDeleteDoc(change.before.ref);
case "create":
default:
return false;
}
};
const updateLinks = (
change: functions.Change<functions.firestore.DocumentSnapshot>,
config: { fieldName: string; trackedFields: string[] }
) => {
const beforeDocPaths = change.before.get(config.fieldName)
? change.before.get(config.fieldName).map((x) => x.docPath)
: [];
const afterDocPaths = change.after.get(config.fieldName)
? change.after.get(config.fieldName).map((x) => x.docPath)
: [];
const addedDocPaths = afterDocPaths.filter(
(x) => !beforeDocPaths.includes(x)
);
const removedDocPaths = beforeDocPaths.filter(
(x) => !afterDocPaths.includes(x)
);
if (addedDocPaths.length !== 0 || removedDocPaths.length !== 0) {
const addPromises = addedDocPaths.map((docPath) =>
addTargetRef(
change.after.ref,
docPath,
config.fieldName,
config.trackedFields
)
);
const removePromises = removedDocPaths.map((docPath) =>
removeTargetRef(change.after.ref, docPath, config.fieldName)
);
return Promise.all([...addPromises, ...removePromises]);
} else {
return false;
}
};
export default function propagate(
change: functions.Change<functions.firestore.DocumentSnapshot>,
config: { fieldName: string; trackedFields: string[] }[],
triggerType: "delete" | "create" | "update"
) {
const promises = [];
if (["delete", "update"].includes(triggerType)) {
const propagateChangesPromise = propagateChangesOnTrigger(
change,
triggerType
);
promises.push(propagateChangesPromise);
}
if (config.length > 0) {
if (triggerType === "delete") {
config.forEach((c) =>
promises.push(removeRefsOnTargetDelete(change.before.ref, c.fieldName))
);
} else if (triggerType === "update") {
config.forEach((c) => promises.push(updateLinks(change, c)));
}
}
return Promise.allSettled(promises);
}

View File

@@ -1,77 +0,0 @@
import * as functions from "firebase-functions";
import utilFns, { hasRequiredFields, getTriggerType } from "../utils";
import { db, auth, storage } from "../firebaseConfig";
const spark = (sparkConfig, fieldTypes) => async (
change: functions.Change<functions.firestore.DocumentSnapshot>,
context: functions.EventContext
) => {
const beforeData = change.before?.data();
const afterData = change.after?.data();
const ref = change.after ? change.after.ref : change.before.ref;
const triggerType = getTriggerType(change);
try {
const {
label,
type,
triggers,
shouldRun,
requiredFields,
sparkBody,
} = sparkConfig;
const sparkContext = {
row: triggerType === "delete" ? beforeData : afterData,
ref,
db,
auth,
change,
triggerType,
sparkConfig,
utilFns,
fieldTypes,
storage,
};
if (!triggers.includes(triggerType)) return false; //check if trigger type is included in the spark
if (
triggerType !== "delete" &&
requiredFields &&
requiredFields.length !== 0 &&
!hasRequiredFields(requiredFields, afterData)
) {
console.log("requiredFields are ", requiredFields, "type is", type);
return false; // check if it hase required fields for the spark to run
}
const dontRun = shouldRun
? !(typeof shouldRun === "function"
? await shouldRun(sparkContext)
: shouldRun)
: false; //
console.log(label, "type is ", type, "dontRun value is", dontRun);
if (dontRun) return false;
const sparkData = await Object.keys(sparkBody).reduce(
async (acc, key) => ({
[key]:
typeof sparkBody[key] === "function"
? await sparkBody[key](sparkContext)
: sparkBody[key],
...(await acc),
}),
{}
);
console.log(JSON.stringify(sparkData));
const sparkFn = require(`./${type}`).default;
await sparkFn(sparkData, sparkContext);
return true;
} catch (err) {
const { label, type } = sparkConfig;
console.log(
`error in ${label} spark of type ${type}, on ${context.eventType} in Doc ${context.resource.name}`
);
console.error(err);
return Promise.reject(err);
}
};
export default spark;

View File

@@ -1,17 +0,0 @@
import * as functions from "firebase-functions";
export const hasAnyRole = (
authorizedRoles: string[],
context: functions.https.CallableContext
) => {
if (!context.auth || !context.auth.token.roles) return false;
const userRoles = context.auth.token.roles as string[];
const authorization = authorizedRoles.reduce(
(authorized: boolean, role: string) => {
if (userRoles.includes(role)) return true;
else return authorized;
},
false
);
return authorization;
};

View File

@@ -1,3 +0,0 @@
export const sendEmail = (msg: any) => {
// pubsub.push
};

View File

@@ -1,77 +0,0 @@
import * as admin from "firebase-admin";
import * as functions from "firebase-functions";
export const serverTimestamp = admin.firestore.FieldValue.serverTimestamp;
import { sendEmail } from "./email";
import { hasAnyRole } from "./auth";
import { SecretManagerServiceClient } from "@google-cloud/secret-manager";
const secrets = new SecretManagerServiceClient();
export const getSecret = async (name: string, v: string = "latest") => {
const [version] = await secrets.accessSecretVersion({
name: `projects/${process.env.GCLOUD_PROJECT}/secrets/${name}/versions/${v}`,
});
const payload = version.payload?.data?.toString();
if (payload && payload[0] === "{") {
return JSON.parse(payload);
} else {
return payload;
}
};
const characters =
"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz";
export function generateId(length: number): string {
let result = "";
const charactersLength = characters.length;
for (let i = 0; i < length; i++) {
result += characters.charAt(Math.floor(Math.random() * charactersLength));
}
return result;
}
export const hasRequiredFields = (requiredFields: string[], data: any) =>
requiredFields.reduce((acc: boolean, currField: string) => {
if (data[currField] === undefined || data[currField] === null) return false;
else return acc;
}, true);
export async function asyncForEach(array: any[], callback: Function) {
for (let index = 0; index < array.length; index++) {
await callback(array[index], index, array);
}
}
export const getTriggerType = (change) =>
Boolean(change.after.data()) && Boolean(change.before.data())
? "update"
: Boolean(change.after.data())
? "create"
: "delete";
export const changedDocPath = (
change: functions.Change<functions.firestore.DocumentSnapshot>
) => change.before?.ref.path ?? change.after.ref.path;
export const rowReducer = (fieldsToSync, row) =>
fieldsToSync.reduce((acc: any, curr: string) => {
if (row[curr] !== undefined && row[curr] !== null)
return { ...acc, [curr]: row[curr] };
else return acc;
}, {});
const hasChanged =(change:functions.Change<functions.firestore.DocumentSnapshot>)=> (trackedFields:string[]) =>{
const before = change.before?.data();
const after = change.after?.data();
if (!before && after)return true;
else if (before && !after)return false;
else return trackedFields.some(trackedField =>JSON.stringify(before[trackedField]) !== JSON.stringify(after[trackedField]))
}
export default {
hasChanged,
getSecret,
hasRequiredFields,
generateId,
sendEmail,
serverTimestamp,
hasAnyRole,
asyncForEach,
};

View File

@@ -1,18 +0,0 @@
{
"compilerOptions": {
"module": "commonjs",
"noImplicitReturns": true,
"noUnusedLocals": true,
"outDir": "lib",
"sourceMap": true,
"strict": true,
"noImplicitAny": false,
"resolveJsonModule": true,
"target": "es6",
"lib": ["ESNext"],
"strictNullChecks": false
},
"compileOnSave": true,
"include": ["src", "generateConfig.ts"],
"ignore": ["sparks"]
}

View File

@@ -1,116 +0,0 @@
{
"rules": {
// -- Strict errors --
// These lint rules are likely always a good idea.
// Force function overloads to be declared together. This ensures readers understand APIs.
"adjacent-overload-signatures": true,
// Do not allow the subtle/obscure comma operator.
"ban-comma-operator": true,
// Do not allow internal modules or namespaces . These are deprecated in favor of ES6 modules.
"no-namespace": true,
// Do not allow parameters to be reassigned. To avoid bugs, developers should instead assign new values to new vars.
"no-parameter-reassignment": true,
// Force the use of ES6-style imports instead of /// <reference path=> imports.
"no-reference": true,
// Do not allow type assertions that do nothing. This is a big warning that the developer may not understand the
// code currently being edited (they may be incorrectly handling a different type case that does not exist).
"no-unnecessary-type-assertion": true,
// Disallow nonsensical label usage.
"label-position": true,
// Disallows the (often typo) syntax if (var1 = var2). Replace with if (var2) { var1 = var2 }.
"no-conditional-assignment": true,
// Disallows constructors for primitive types (e.g. new Number('123'), though Number('123') is still allowed).
"no-construct": true,
// Do not allow super() to be called twice in a constructor.
"no-duplicate-super": true,
// Do not allow the same case to appear more than once in a switch block.
"no-duplicate-switch-case": true,
// Do not allow a variable to be declared more than once in the same block. Consider function parameters in this
// rule.
"no-duplicate-variable": [true, "check-parameters"],
// Disallows a variable definition in an inner scope from shadowing a variable in an outer scope. Developers should
// instead use a separate variable name.
"no-shadowed-variable": true,
// Empty blocks are almost never needed. Allow the one general exception: empty catch blocks.
"no-empty": [true, "allow-empty-catch"],
// Functions must either be handled directly (e.g. with a catch() handler) or returned to another function.
// This is a major source of errors in Cloud Functions and the team strongly recommends leaving this rule on.
"no-floating-promises": true,
// Do not allow any imports for modules that are not in package.json. These will almost certainly fail when
// deployed.
"no-implicit-dependencies": true,
// The 'this' keyword can only be used inside of classes.
"no-invalid-this": true,
// Do not allow strings to be thrown because they will not include stack traces. Throw Errors instead.
"no-string-throw": true,
// Disallow control flow statements, such as return, continue, break, and throw in finally blocks.
"no-unsafe-finally": true,
// Expressions must always return a value. Avoids common errors like const myValue = functionReturningVoid();
"no-void-expression": [true, "ignore-arrow-function-shorthand"],
// Disallow duplicate imports in the same file.
"no-duplicate-imports": true,
// -- Strong Warnings --
// These rules should almost never be needed, but may be included due to legacy code.
// They are left as a warning to avoid frustration with blocked deploys when the developer
// understand the warning and wants to deploy anyway.
// Warn when an empty interface is defined. These are generally not useful.
"no-empty-interface": { "severity": "warning" },
// Warn when an import will have side effects.
"no-import-side-effect": { "severity": "warning" },
// Warn when variables are defined with var. Var has subtle meaning that can lead to bugs. Strongly prefer const for
// most values and let for values that will change.
"no-var-keyword": { "severity": "warning" },
// Prefer === and !== over == and !=. The latter operators support overloads that are often accidental.
"triple-equals": { "severity": "warning" },
// Warn when using deprecated APIs.
"deprecation": { "severity": "warning" },
// -- Light Warnings --
// These rules are intended to help developers use better style. Simpler code has fewer bugs. These would be "info"
// if TSLint supported such a level.
// prefer for( ... of ... ) to an index loop when the index is only used to fetch an object from an array.
// (Even better: check out utils like .map if transforming an array!)
"prefer-for-of": { "severity": "warning" },
// Warns if function overloads could be unified into a single function with optional or rest parameters.
"unified-signatures": { "severity": "warning" },
// Prefer const for values that will not change. This better documents code.
"prefer-const": { "severity": "warning" },
// Multi-line object literals and function calls should have a trailing comma. This helps avoid merge conflicts.
"trailing-comma": { "severity": "warning" }
},
"linterOptions": {
"exclude": ["./generator/templates/**"]
},
"defaultSeverity": "error"
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,140 +0,0 @@
const express = require("express");
const bodyParser = require("body-parser");
const cors = require("cors");
import { asyncExecute } from "./compiler/terminal";
import { createStreamLogger } from "./utils";
import generateConfig from "./compiler";
import { auth } from "./firebaseConfig";
import meta from "./package.json";
import { commandErrorHandler, logErrorToDB } from "./utils";
import firebase from "firebase-admin";
const app = express();
const jsonParser = bodyParser.json();
app.use(cors());
app.get("/", async (req: any, res: any) => {
res.send(`Firetable cloud function builder version ${meta.version}`);
});
app.post("/", jsonParser, async (req: any, res: any) => {
let user: firebase.auth.UserRecord;
const userToken = req?.body?.token;
if (!userToken) {
console.log("missing auth token");
res.send({
success: false,
reason: "missing auth token",
});
return;
}
try {
const decodedToken = await auth.verifyIdToken(userToken);
const uid = decodedToken.uid;
user = await auth.getUser(uid);
const roles = user?.customClaims?.roles;
if (!roles || !Array.isArray(roles) || !roles?.includes("ADMIN")) {
await logErrorToDB({
errorDescription: `user is not admin`,
user,
});
res.send({
success: false,
reason: `user is not admin`,
});
return;
}
console.log("successfully authenticated");
} catch (error) {
await logErrorToDB({
errorDescription: `error verifying auth token: ${error}`,
user,
});
res.send({
success: false,
reason: `error verifying auth token: ${error}`,
});
return;
}
const configPath = req?.body?.configPath;
console.log("configPath:", configPath);
if (!configPath) {
await logErrorToDB({
errorDescription: `Invalid configPath (${configPath})`,
user,
});
res.send({
success: false,
reason: "invalid configPath",
});
}
const streamLogger = await createStreamLogger(configPath);
await streamLogger.info("streamLogger created");
const success = await generateConfig(configPath, user, streamLogger);
if (!success) {
await streamLogger.error("generateConfig failed to complete");
await streamLogger.fail();
res.send({
success: false,
reason: `generateConfig failed to complete`,
});
return;
}
await streamLogger.info("generateConfig success");
let hasEnvError = false;
if (!process.env._PROJECT_ID) {
await logErrorToDB(
{
errorDescription: `Invalid env: _PROJECT_ID (${process.env._PROJECT_ID})`,
user,
},
streamLogger
);
hasEnvError = true;
}
if (hasEnvError) {
await streamLogger.error("Invalid env:_PROJECT_ID");
await streamLogger.fail();
res.send({
success: false,
reason: "Invalid env:_PROJECT_ID",
});
return;
}
await asyncExecute(
`cd build/functions; \
yarn install`,
commandErrorHandler({ user }, streamLogger)
);
await asyncExecute(
`cd build/functions; \
yarn deployFT \
--project ${process.env._PROJECT_ID} \
--only functions`,
commandErrorHandler({ user }, streamLogger)
);
await streamLogger.end();
res.send({
success: true,
});
});
const port = process.env.PORT || 8080;
app.listen(port, () => {
console.log(
`Firetable cloud function builder ${meta.version}: listening on port ${port}`
);
});

View File

@@ -1,34 +0,0 @@
{
"name": "ft-functions-builder",
"description": "Manages the build and deployment of Firetable cloud functions",
"version": "1.0.0",
"private": true,
"main": "index.js",
"scripts": {
"start": "yarn build && node build",
"build": "rm -rf build && tsc --project ./ && cp -r functions build && cp -r sparksLib build",
"deploy": "./deploy.sh"
},
"engines": {
"node": "14"
},
"dependencies": {
"body-parser": "^1.19.0",
"cors": "^2.8.5",
"express": "^4.17.1",
"firebase-admin": "^9.2.0",
"firebase-functions": "^3.11.0"
},
"devDependencies": {
"@types/express": "^4.17.11",
"@types/node": "^14.14.33",
"firebase-tools": "^8.7.0",
"husky": "^4.2.5",
"js-beautify": "^1.13.0",
"prettier": "^2.1.1",
"pretty-quick": "^3.0.0",
"ts-node": "^9.1.1",
"tslint": "^6.1.0",
"typescript": "^4.2.3"
}
}

View File

@@ -1,109 +0,0 @@
export const dependencies = {
algoliasearch: "^4.8.3",
};
const get = (obj, path, defaultValue = undefined) => {
const travel = (regexp) =>
String.prototype.split
.call(path, regexp)
.filter(Boolean)
.reduce(
(res, key) => (res !== null && res !== undefined ? res[key] : res),
obj
);
const result = travel(/[,[\]]+?/) || travel(/[,[\].]+?/);
return result === undefined || result === obj ? defaultValue : result;
};
const filterSnapshot = (
field: { docPath: string; snapshot: any },
preservedKeys: string[]
) => {
return {
docPath: field.docPath,
...preservedKeys.reduce((acc: any, currentKey: string) => {
const value = get(field.snapshot, currentKey);
if (value) {
return { ...acc, snapshot: { [currentKey]: value, ...acc.snapshot } };
} else return acc;
}, {}),
};
};
// returns object of fieldsToSync
const rowReducer = (fieldsToSync, row) =>
fieldsToSync.reduce(
(
acc: any,
curr: string | { fieldName: string; snapshotFields: string[] }
) => {
if (typeof curr === "string") {
if (row[curr] && typeof row[curr].toDate === "function") {
return {
...acc,
[curr]: row[curr].toDate().getTime() / 1000,
};
} else if (row[curr] !== undefined || row[curr] !== null) {
return { ...acc, [curr]: row[curr] };
} else {
return acc;
}
} else {
if (row[curr.fieldName] && curr.snapshotFields) {
return {
...acc,
[curr.fieldName]: row[curr.fieldName].map((snapshot) =>
filterSnapshot(snapshot, curr.snapshotFields)
),
};
} else {
return acc;
}
}
},
{}
);
const significantDifference = (fieldsToSync, change) => {
const beforeData = change.before.data();
const afterData = change.after.data();
return fieldsToSync.reduce((acc, field) => {
const key = typeof field === "string" ? field : field.fieldName;
if (JSON.stringify(beforeData[key]) !== JSON.stringify(afterData[key]))
return true;
else return acc;
}, false);
};
const algoliaIndex = async (data, sparkContext) => {
const { row, objectID, index, fieldsToSync } = data;
const { triggerType, change } = sparkContext;
const record = rowReducer(fieldsToSync, row);
const algoliasearch = require("algoliasearch");
const { getSecret } = require("../utils");
const { appId, adminKey } = await getSecret("algolia");
console.log(`algolia app id : ${appId}`);
const client = algoliasearch(appId, adminKey);
const _index = client.initIndex(index); // initialize algolia index
switch (triggerType) {
case "delete":
await _index.deleteObject(objectID);
break;
case "update":
if (
significantDifference([...fieldsToSync, "_ft_forcedUpdateAt"], change)
) {
_index.saveObject({ ...record, objectID });
}
break;
case "create":
await _index.saveObject({ ...record, objectID });
break;
default:
break;
}
return true;
};
export default algoliaIndex;

View File

@@ -1,11 +0,0 @@
export const dependencies = {
"node-fetch": "2.6.1",
};
const api = async (args) => {
const { body, url, method, callback } = args;
const fetch = require("node-fetch");
return fetch(url, { method: method, body: body })
.then((res) => res.json())
.then((json) => callback(json));
};
export default api;

View File

@@ -1,424 +0,0 @@
export const dependencies = {
"@google-cloud/bigquery": "^5.5.0",
};
const get = (obj, path, defaultValue = undefined) => {
const travel = (regexp) =>
String.prototype.split
.call(path, regexp)
.filter(Boolean)
.reduce(
(res, key) => (res !== null && res !== undefined ? res[key] : res),
obj
);
const result = travel(/[,[\]]+?/) || travel(/[,[\].]+?/);
return result === undefined || result === obj ? defaultValue : result;
};
const filterSnapshot = (
field: { docPath: string; snapshot: any },
preservedKeys: string[]
) => {
return {
docPath: field.docPath,
...preservedKeys.reduce((acc: any, currentKey: string) => {
const value = get(field.snapshot, currentKey);
if (value) {
return { ...acc, snapshot: { [currentKey]: value, ...acc.snapshot } };
} else return acc;
}, {}),
};
};
// returns object of fieldsToSync
const rowReducer = (fieldsToSync, row) =>
fieldsToSync.reduce(
(
acc: any,
curr: string | { fieldName: string; snapshotFields: string[] }
) => {
if (typeof curr === "string") {
if (row[curr] && typeof row[curr].toDate === "function") {
return {
...acc,
[curr]: row[curr].toDate().getTime() / 1000,
};
} else if (row[curr] !== undefined || row[curr] !== null) {
return { ...acc, [curr]: row[curr] };
} else {
return acc;
}
} else {
if (row[curr.fieldName] && curr.snapshotFields) {
return {
...acc,
[curr.fieldName]: row[curr.fieldName].map((snapshot) =>
filterSnapshot(snapshot, curr.snapshotFields)
),
};
} else {
return acc;
}
}
},
{}
);
const significantDifference = (fieldsToSync, change) => {
const beforeData = change.before.data();
const afterData = change.after.data();
return fieldsToSync.reduce((acc, field) => {
const key = typeof field === "string" ? field : field.fieldName;
if (JSON.stringify(beforeData[key]) !== JSON.stringify(afterData[key]))
return true;
else return acc;
}, false);
};
const transformToSQLData = (value: any, ftType: string) => {
if (value === null || value === undefined) {
return {
value: `null`,
type: "STRING",
};
}
const sanitise = (x: string) =>
x?.replace?.(/\"/g, '\\"')?.replace?.(/\n/g, "\\n") ?? "";
switch (ftType) {
case "SIMPLE_TEXT":
case "LONG_TEXT":
case "EMAIL":
case "PHONE_NUMBER":
case "CODE":
case "RICH_TEXT":
case "ID":
case "SINGLE_SELECT":
case "URL":
return {
value: `"${sanitise(value)}"`,
type: "STRING",
};
case "JSON": // JSON
case "FILE": // JSON
case "IMAGE": // JSON
case "USER": // JSON
case "COLOR": // JSON
case "DOCUMENT_SELECT":
case "SERVICE_SELECT":
case "ACTION":
case "AGGREGATE":
case "MULTI_SELECT": // array
return {
value: `"${sanitise(JSON.stringify(value))}"`,
type: "STRING",
};
case "CHECK_BOX":
return {
value: value ? `true` : `false`,
type: "BOOLEAN",
};
case "NUMBER":
case "PERCENTAGE":
case "RATING":
case "SLIDER":
return {
value: Number(value),
type: "NUMERIC",
};
case "DATE":
case "DATE_TIME":
case "DURATION":
if (!value?.toDate) {
return {
value: `null`,
type: "TIMESTAMP",
};
}
return {
value: `timestamp("${value?.toDate?.()}")`,
type: "TIMESTAMP",
};
case "LAST":
case "STATUS":
case "SUB_TABLE":
default:
// unknown or meaningless to sync
return {
value: `null`,
type: "STRING",
};
}
};
const transformToSQLValue = (ftValue: any, ftType: string) => {
const { value } = transformToSQLData(ftValue, ftType);
return value;
};
const transformToSQLType = (ftType: string) => {
const { type } = transformToSQLData("", ftType);
return type;
};
const bigqueryIndex = async (payload, sparkContext) => {
const { objectID, index, fieldsToSync, projectID, datasetLocation } = payload;
const { triggerType, change, fieldTypes } = sparkContext;
const record = rowReducer(fieldsToSync, sparkContext.row);
const { BigQuery } = require("@google-cloud/bigquery");
const bigquery = new BigQuery();
const _projectID = projectID ?? process.env.GCLOUD_PROJECT;
const tableFullName = `${_projectID}.firetable.${index}`;
console.log(
`projectID: ${_projectID}, index: ${index}, tableFullName: ${tableFullName}`
);
// create dataset with exact name "firetable" if not exists
async function preprocessDataset() {
const dataset = bigquery.dataset("firetable", {
location: datasetLocation ?? "US",
});
const res = await dataset.exists();
const exists = res[0];
if (!exists) {
console.log("Dataset 'firetable' does not exist, creating dataset...");
await dataset.create();
console.log("Dataset 'firetable' created.");
} else {
console.log("Dataset 'firetable' exists.");
}
}
async function preprocessTable() {
const dataset = bigquery.dataset("firetable");
const table = dataset.table(index);
const res = await table.exists();
const exists = res[0];
if (!exists) {
console.log(
`Table '${index}' does not exist in dataset 'firetable', creating dataset...`
);
await table.create();
console.log(`Table '${index}' created in dataset 'firetable'.`);
} else {
console.log(`Table ${index} exists in 'firetable'.`);
}
}
async function preprocessSchema() {
const dataset = bigquery.dataset("firetable");
const table = dataset.table(index);
const generatedTypes = Object.keys(fieldTypes)
.filter((field) => fieldsToSync.includes(field))
.reduce((acc, cur) => {
return {
[cur]: transformToSQLType(fieldTypes[cur]),
...acc,
};
}, {});
const generatedSchema = [
{ name: "objectID", type: "STRING", mode: "REQUIRED" },
...Object.keys(generatedTypes).map((key) => {
return {
name: key,
type: generatedTypes[key],
mode: "NULLABLE",
};
}),
];
const pushSchema = async () => {
console.log("pushing schema:", generatedSchema);
const metadata = {
schema: generatedSchema,
};
await table.setMetadata(metadata);
console.log("schema pushed.");
};
const existingRes = await table.getMetadata();
const existingSchema = existingRes[0].schema?.fields;
if (!existingSchema) {
console.log("Existing schema does not exist, pushing schema...");
await pushSchema();
return;
}
// check if schema update is needed
const objectIDFilter = (field) => field.name !== "objectID";
const schemaIdentical =
Object.keys(generatedTypes).length ===
existingSchema.filter(objectIDFilter).length &&
existingSchema
.filter(objectIDFilter)
.every((field) => generatedTypes[field.name] === field.type);
if (schemaIdentical) {
// no change to schema
console.log("Existing schema detected, no update needeed.");
return;
}
// check schema compatibility (only new field is accpted)
const compatible =
Object.keys(generatedTypes).length >
existingSchema.filter(objectIDFilter).length &&
existingSchema
.filter(objectIDFilter)
.filter((field) => Object.keys(generatedTypes).includes(field.name))
.every((field) => generatedTypes[field.name] === field.type);
if (!compatible) {
const errorMessage =
"New update to field types is not compatible with existing schema. Please manually remove the current bigquery table or update spark index";
console.log(errorMessage);
throw errorMessage;
} else {
console.log(
"New field types detected and it is compatible with current schema."
);
}
// push schema
await pushSchema();
}
// return if the objectID exists in bool
async function exist() {
const query = `SELECT objectID FROM ${tableFullName}
WHERE objectID="${objectID}"
;`;
console.log(query);
const res = await bigquery.query(query);
const rows = res?.[0];
return !!rows?.length;
}
function getTypeKnownRecord(data) {
const knownTypes = Object.keys(fieldTypes);
const givenKeys = Object.keys(data);
const knownKeys = givenKeys.filter((key) => knownTypes.includes(key));
const unknownKeys = givenKeys.filter((key) => !knownTypes.includes(key));
const knownRecord = Object.keys(data)
.filter((key) => knownKeys.includes(key))
.reduce((obj, key) => {
return {
...obj,
[key]: data[key],
};
}, {});
if (unknownKeys?.length > 0) {
console.log(
"The following fields do not exist in Firetable and are ignored.",
unknownKeys
);
}
return knownRecord;
}
async function insert(data) {
const keys = Object.keys(data).join(",");
const values = Object.keys(data)
.map((key) => transformToSQLValue(data[key], fieldTypes[key]))
.join(",");
const query = `INSERT INTO ${tableFullName}
(objectID, ${keys})
VALUES ("${objectID}", ${values})
;`;
console.log(query);
await executeQuery(query);
}
// execute a query, if rate limited, sleep and try again until success
// ATTENTION: cloud function might timeout the function execution time at 60,000ms
const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
async function executeQuery(query, delayDepth = 1) {
try {
const res = await bigquery.query(query);
console.log(res);
} catch (error) {
if (
error?.errors?.length === 1 &&
(error?.errors?.[0]?.reason === "rateLimitExceeded" ||
error?.errors?.[0]?.reason === "quotaExceeded")
) {
const delay = Math.round(
Math.floor(Math.random() * 3_000 * (delayDepth % 20) + 1000)
);
console.log(`API rate limited, try again in ${delay}ms`);
await sleep(delay);
await executeQuery(query, delayDepth + 1);
} else {
console.log(error?.errors ?? error);
}
}
if (delayDepth === 1) {
console.log("Query finished.");
}
}
async function update(data) {
const values = Object.keys(data)
.map((key) => `${key}=${transformToSQLValue(data[key], fieldTypes[key])}`)
.join(",");
const query = `UPDATE ${tableFullName}
SET ${values}
WHERE objectID="${objectID}"
;`;
console.log(query);
await executeQuery(query);
}
async function insertOrUpdate(data) {
const objectExists = await exist();
if (objectExists) {
await update(data);
} else {
await insert(data);
}
}
async function remove() {
const query = `DELETE FROM ${tableFullName}
WHERE objectID="${objectID}"
;`;
console.log(query);
await executeQuery(query);
}
// preprocess before starting index logic
await preprocessDataset();
await preprocessTable();
await preprocessSchema();
// only proceed with fields that have known types
const typeKnownRecord = getTypeKnownRecord(record);
switch (triggerType) {
case "delete":
await remove();
break;
case "update":
if (
significantDifference([...fieldsToSync, "_ft_forcedUpdateAt"], change)
) {
await insertOrUpdate(typeKnownRecord);
} else {
console.log("significantDifference is false, no update needed.");
}
break;
case "create":
await insertOrUpdate(typeKnownRecord);
break;
default:
break;
}
return true;
};
export default bigqueryIndex;

View File

@@ -1,56 +0,0 @@
export const dependencies = {};
// returns object of fieldsToSync
const rowReducer = (fieldsToSync, row) =>
fieldsToSync.reduce((acc: any, curr: string) => {
if (row[curr] !== undefined && row[curr] !== null)
return { ...acc, [curr]: row[curr] };
else return acc;
}, {});
const significantDifference = (fieldsToSync, change) => {
const beforeData = change.before.data();
const afterData = change.after.data();
return fieldsToSync.reduce((acc, field) => {
if (JSON.stringify(beforeData[field]) !== JSON.stringify(afterData[field]))
return true;
else return acc;
}, false);
};
const docSync = async (data, sparkContext) => {
const { row, targetPath, fieldsToSync } = data;
const { triggerType, change } = sparkContext;
const record = rowReducer(fieldsToSync, row);
const { db } = require("../firebaseConfig");
switch (triggerType) {
case "delete":
try {
await db.doc(targetPath).delete();
}
catch (error) {
console.log(error);
}
break;
case "update":
if (
significantDifference([...fieldsToSync, "_ft_forcedUpdateAt"], change)
) {
try {
await db.doc(targetPath).update(record);
} catch (error) {
console.log(error);
}
}
break;
case "create":
await db.doc(targetPath).set(record, { merge: true });
break;
default:
break;
}
return true;
};
export default docSync;

View File

@@ -1,33 +0,0 @@
export const dependencies = {};
const significantDifference = (fieldsToSync, change) => {
const beforeData = change.before.data();
const afterData = change.after.data();
return fieldsToSync.reduce((acc, field) => {
if (JSON.stringify(beforeData[field]) !== JSON.stringify(afterData[field]))
return true;
else return acc;
}, false);
};
const historySnapshot = async (data, sparkContext) => {
const { trackedFields } = data;
const { triggerType, change } = sparkContext;
if (
(triggerType === "update" &&
significantDifference(trackedFields, change)) ||
triggerType === "delete"
) {
try {
await change.before.ref.collection("historySnapshots").add({
...change.before.data(),
archivedAt: new Date(),
archiveEvent: triggerType,
});
} catch (error) {
console.log(error);
}
}
return true;
};
export default historySnapshot;

View File

@@ -1,25 +0,0 @@
export const dependencies = {
"mailchimp-api-v3": "1.15.0",
};
// method : 'get|post|put|patch|delete'
// path :`/lists/${listId}/members`
const mailchimp = async (data) => {
const { path, method, path_params, body, query } = data;
const mailchimpLib = require("mailchimp-api-v3");
const utilFns = require("../utils");
const mailchimpKey = await utilFns.getSecret("mailchimp");
const _mailchimp = new mailchimpLib(mailchimpKey);
return new Promise((resolve, reject) => {
_mailchimp.request(
{
method,
path,
path_params,
body,
query,
},
resolve
);
});
};
export default mailchimp;

View File

@@ -1,131 +0,0 @@
export const dependencies = {
meilisearch: "^0.18.1",
};
const get = (obj, path, defaultValue = undefined) => {
const travel = (regexp) =>
String.prototype.split
.call(path, regexp)
.filter(Boolean)
.reduce(
(res, key) => (res !== null && res !== undefined ? res[key] : res),
obj
);
const result = travel(/[,[\]]+?/) || travel(/[,[\].]+?/);
return result === undefined || result === obj ? defaultValue : result;
};
const filterSnapshot = (
field: { docPath: string; snapshot: any },
preservedKeys: string[]
) => {
return {
docPath: field.docPath,
...preservedKeys.reduce((acc: any, currentKey: string) => {
const value = get(field.snapshot, currentKey);
if (value) {
return { ...acc, snapshot: { [currentKey]: value, ...acc.snapshot } };
} else return acc;
}, {}),
};
};
// returns object of fieldsToSync
const rowReducer = (fieldsToSync, row) =>
fieldsToSync.reduce(
(
acc: any,
curr: string | { fieldName: string; snapshotFields: string[] }
) => {
if (typeof curr === "string") {
if (row[curr] && typeof row[curr].toDate === "function") {
return {
...acc,
[curr]: row[curr].toDate().getTime() / 1000,
};
} else if (row[curr] !== undefined || row[curr] !== null) {
return { ...acc, [curr]: row[curr] };
} else {
return acc;
}
} else {
if (row[curr.fieldName] && curr.snapshotFields) {
return {
...acc,
[curr.fieldName]: row[curr.fieldName].map((snapshot) =>
filterSnapshot(snapshot, curr.snapshotFields)
),
};
} else {
return acc;
}
}
},
{}
);
const significantDifference = (fieldsToSync, change) => {
const beforeData = change.before.data();
const afterData = change.after.data();
return fieldsToSync.reduce((acc, field) => {
const key = typeof field === "string" ? field : field.fieldName;
if (JSON.stringify(beforeData[key]) !== JSON.stringify(afterData[key]))
return true;
else return acc;
}, false);
};
const meiliIndex = async (data, sparkContext) => {
const { row, objectID, index, fieldsToSync } = data;
const { triggerType, change } = sparkContext;
const record = rowReducer(fieldsToSync, row);
const { MeiliSearch } = require("meilisearch");
const { getSecret } = require("../utils");
const meiliConfig = await getSecret("meilisearch");
console.log(`meilisearch host : ${meiliConfig.host}, index: ${index}`);
const client = new MeiliSearch(meiliConfig);
const _index = client.index(index);
let res;
switch (triggerType) {
case "delete":
console.log("Deleting...");
res = await _index.deleteDocument(objectID);
break;
case "update":
if (
significantDifference([...fieldsToSync, "_ft_forcedUpdateAt"], change)
) {
console.log("Updating...");
res = await _index.updateDocuments([
{
id: objectID,
...record,
},
]);
}
break;
case "create":
console.log("Creating...");
res = await _index.addDocuments([
{
id: objectID,
...record,
},
]);
break;
default:
console.log("No match.");
break;
}
console.log("Checking status...");
if (res?.updateId) {
console.log("Querying status...");
const status = await client.index(index).getUpdateStatus(res.updateId);
console.log("Status:", status);
}
return true;
};
export default meiliIndex;

View File

@@ -1,13 +0,0 @@
export const dependencies = {
"@sendgrid/mail": "^7.4.2",
};
const sendgridEmail = async (data) => {
const { msg } = data;
const sgMail = require("@sendgrid/mail");
const utilFns = require("../utils");
sgMail.setSubstitutionWrappers("{{", "}}");
const sendgridKey = await utilFns.getSecret("sendgrid");
sgMail.setApiKey(sendgridKey);
return sgMail.send(msg);
};
export default sendgridEmail;

View File

@@ -1,92 +0,0 @@
/*
{ channels?:string[], emails?:string[], text?:string, blocks?:any,attachments?:any }
*/
export const dependencies = {
"@slack/web-api": "^6.0.0",
};
const initSlack = async () => {
const { getSecret } = require("../utils");
const { token } = await getSecret("slack");
const { WebClient } = require("@slack/web-api");
return new WebClient(token);
};
const messageByChannel = (slackClient) => async ({
text,
channel,
blocks,
attachments,
}: {
channel: string;
text: string;
blocks: any[];
attachments: any[];
}) =>
await slackClient.chat.postMessage({
text,
channel,
blocks,
attachments,
});
const messageByEmail = (slackClient) => async ({
email,
text,
blocks,
attachments,
}: {
email: string;
text: string;
blocks: any[];
attachments: any[];
}) => {
try {
const user = await slackClient.users.lookupByEmail({ email });
if (user.ok) {
const channel = user.user.id;
return await messageByChannel(slackClient)({
text,
blocks,
attachments,
channel,
});
} else {
return await false;
}
} catch (error) {
console.log(`${error} maybe${email} is not on slack`);
console.log(`${error}`);
return await false;
}
};
const slackMessage = async (data) => {
const slackClient = await initSlack();
const { channels, emails, text, blocks, attachments } = data;
if (channels) {
const messages = channels.map((channel: string) =>
messageByChannel(slackClient)({
text,
blocks: blocks ?? [],
channel,
attachments,
})
);
await Promise.all(messages);
}
if (emails) {
const messages = emails.map((email: string) =>
messageByEmail(slackClient)({
text: text,
blocks: blocks ?? [],
email,
attachments,
})
);
await Promise.all(messages);
}
return true;
};
export default slackMessage;

View File

@@ -1,7 +0,0 @@
export const dependencies = {};
const task = async (args) => {
const { promises } = args;
const result = await Promise.allSettled(Array.isArray(promises)?promises:[promises])
return result
};
export default task;

View File

@@ -0,0 +1,48 @@
export const dependencies = {
// --- Add your dependencies
// algoliasearch: "^4.8.3",
};
// Define your spark
const sparkName = async (data, sparkContext) => {
// Your spark inputs
const { row, targetPath, fieldsToSync } = data;
const { triggerType, change } = sparkContext;
// ---------------------------------------------
// --- Utilise your dependencies ---
// const algoliasearch = require("algoliasearch");
// ---------------------------------------------
// --- Get the secret from Secrets Manager
// Example: Algolia Secret
// const { getSecret } = require("../utils");
// const { appId, adminKey } = await getSecret("algolia");
// ---------------------------------------------
// --- Connect to any third party extensions ---
// Example Algolia
// const client = algoliasearch(appId, adminKey);
// const _index = client.initIndex(index);
// ---------------------------------------------
// --- Handle required trigger actions ---
switch (triggerType) {
case "create":
// create trigger actions
break;
case "update":
// update trigger actions
break;
case "delete":
// delete trigger actions
break;
default:
break;
}
return true;
};
export default sparkName;

View File

@@ -1,13 +0,0 @@
export const dependencies = {
twilio: "3.56.0",
};
const twilioMessage = async (data) => {
const utilFns = require("../utils");
const { accountSid, authToken } = await utilFns.getSecret("twilio");
const client = require("twilio")(accountSid, authToken);
const { body, from, to } = data;
return client.messages
.create({ body, from, to })
.then((message) => console.log(message.sid));
};
export default twilioMessage;

View File

@@ -1,20 +0,0 @@
{
"compilerOptions": {
"target": "es6",
"module": "commonjs",
"rootDir": "./",
"outDir": "./build",
"esModuleInterop": true,
"strict": true,
"noImplicitReturns": true,
"noUnusedLocals": false,
"sourceMap": true,
"noImplicitAny": false,
"resolveJsonModule": true,
"lib": ["ESNext"],
"strictNullChecks": false
},
"compileOnSave": true,
"exclude": ["functions", "build"],
"include": ["*.ts", "firebase.json", "sparksLib"]
}

View File

@@ -1,198 +0,0 @@
import { db } from "./firebaseConfig";
import admin from "firebase-admin";
function firetableUser(user: admin.auth.UserRecord) {
return {
displayName: user?.displayName,
email: user?.email,
uid: user?.uid,
emailVerified: user?.emailVerified,
photoURL: user?.photoURL,
timestamp: new Date(),
};
}
async function insertErrorRecordToDB(errorRecord: object) {
await db.collection("_FT_ERRORS").add(errorRecord);
}
async function insertErrorToStreamer(errorRecord: object, streamLogger) {
let errorString = "";
for (const key of [
"command",
"description",
"functionConfigTs",
"sparksConfig",
"stderr",
"errorStackTrace",
]) {
const value = errorRecord[key];
if (value) {
errorString += `\n\n${key}: ${value}`;
}
}
await streamLogger.error(errorString);
}
function commandErrorHandler(
meta: {
user: admin.auth.UserRecord;
description?: string;
functionConfigTs?: string;
sparksConfig?: string;
},
streamLogger
) {
return async function (error, stdout, stderr) {
await streamLogger.info(stdout);
if (!error) {
return;
}
const errorRecord = {
errorType: "commandError",
ranBy: firetableUser(meta.user),
createdAt: admin.firestore.FieldValue.serverTimestamp(),
stdout: stdout ?? "",
stderr: stderr ?? "",
errorStackTrace: error?.stack ?? "",
command: error?.cmd ?? "",
description: meta?.description ?? "",
functionConfigTs: meta?.functionConfigTs ?? "",
sparksConfig: meta?.sparksConfig ?? "",
};
await insertErrorToStreamer(errorRecord, streamLogger);
insertErrorRecordToDB(errorRecord);
};
}
async function logErrorToDB(
data: {
errorDescription: string;
errorExtraInfo?: string;
errorTraceStack?: string;
user: admin.auth.UserRecord;
sparksConfig?: string;
},
streamLogger?
) {
console.error(data.errorDescription);
const errorRecord = {
errorType: "codeError",
ranBy: firetableUser(data.user),
description: data.errorDescription,
createdAt: admin.firestore.FieldValue.serverTimestamp(),
sparksConfig: data?.sparksConfig ?? "",
errorExtraInfo: data?.errorExtraInfo ?? "",
errorStackTrace: data?.errorTraceStack ?? "",
};
if (streamLogger) {
await insertErrorToStreamer(errorRecord, streamLogger);
}
insertErrorRecordToDB(errorRecord);
}
function parseSparksConfig(
sparks: string | undefined,
user: admin.auth.UserRecord,
streamLogger
) {
if (sparks) {
try {
// remove leading "sparks.config(" and trailing ")"
return sparks
.replace(/^(\s*)sparks.config\(/, "")
.replace(/\);?\s*$/, "");
} catch (error) {
logErrorToDB(
{
errorDescription: "Sparks is not wrapped with sparks.config",
errorTraceStack: error.stack,
user,
sparksConfig: sparks,
},
streamLogger
);
}
}
return "[]";
}
async function createStreamLogger(tableConfigPath: string) {
const startTimeStamp = Date.now();
const fullLog: {
log: string;
level: "info" | "error";
timestamp: number;
}[] = [];
const logRef = db
.doc(tableConfigPath)
.collection("ftBuildLogs")
.doc(startTimeStamp.toString());
await logRef.set({ startTimeStamp, status: "BUILDING" });
console.log(
`streamLogger created. tableConfigPath: ${tableConfigPath}, startTimeStamp: ${startTimeStamp}`
);
return {
info: async (log: string) => {
console.log(log);
fullLog.push({
log,
level: "info",
timestamp: Date.now(),
});
await logRef.update({
fullLog,
});
},
error: async (log: string) => {
console.error(log);
fullLog.push({
log,
level: "error",
timestamp: Date.now(),
});
await logRef.update({
fullLog,
});
},
end: async () => {
const logsDoc = await logRef.get();
const errorLog = logsDoc
.get("fullLog")
.filter((log) => log.level === "error");
if (errorLog.length !== 0) {
console.log("streamLogger marked as FAIL");
await logRef.update({
status: "FAIL",
failTimeStamp: Date.now(),
});
} else {
console.log("streamLogger marked as SUCCESS");
await logRef.update({
status: "SUCCESS",
successTimeStamp: Date.now(),
});
}
},
fail: async () => {
console.log("streamLogger marked as FAIL");
await logRef.update({
status: "FAIL",
failTimeStamp: Date.now(),
});
},
};
}
export {
commandErrorHandler,
logErrorToDB,
parseSparksConfig,
createStreamLogger,
};

File diff suppressed because it is too large Load Diff

View File

@@ -45,7 +45,7 @@
"react-dom": "^17.0.2",
"react-dropzone": "^10.1.8",
"react-firebaseui": "^5.0.2",
"react-hook-form": "^6.15.5",
"react-hook-form": "^6",
"react-image": "^4.0.3",
"react-joyride": "^2.3.0",
"react-json-view": "^1.19.1",

View File

@@ -1,20 +1,6 @@
<!DOCTYPE html>
<html lang="en">
<head>
<script
async
src="https://www.googletagmanager.com/gtag/js?id=UA-140647798-6"
></script>
<script>
window.dataLayer = window.dataLayer || [];
function gtag() {
dataLayer.push(arguments);
}
gtag("js", new Date());
gtag("config", "UA-140647798-6");
</script>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<meta

View File

@@ -20,6 +20,7 @@ import routes from "constants/routes";
import AuthView from "pages/Auth";
import SignOutView from "pages/Auth/SignOut";
import TestView from "pages/Test";
import { analytics } from "analytics";
const AuthSetupGuidePage = lazy(
() => import("pages/Auth/SetupGuide" /* webpackChunkName: "AuthSetupGuide" */)
);

17
www/src/analytics.ts Normal file
View File

@@ -0,0 +1,17 @@
import firebase from "firebase/app";
import "firebase/analytics";
var firebaseConfig = {
apiKey: "AIzaSyBwgfb-GmsCZ_d4B5kRElzWMoPWwjdKioM",
authDomain: "firetable-service.firebaseapp.com",
projectId: "firetable-service",
storageBucket: "firetable-service.appspot.com",
messagingSenderId: "831080389",
appId: "1:831080389:web:ab0bbacccdd887ab3b6dac",
measurementId: "G-K97G7PBDNT",
};
// Initialize Firebase
const firetableServiceApp = firebase.initializeApp(
firebaseConfig,
"firetable-service"
);
export const analytics = firebase.analytics(firetableServiceApp);

View File

@@ -0,0 +1,69 @@
import useDoc from "hooks/useDoc";
import Modal from "components/Modal";
import { Box, Button, CircularProgress, Typography } from "@material-ui/core";
import { IFormDialogProps } from "./Table/ColumnMenu/NewColumn";
import OpenInNewIcon from "@material-ui/icons/OpenInNew";
import CheckCircleIcon from "@material-ui/icons/CheckCircle";
export interface IProjectSettings
extends Pick<IFormDialogProps, "handleClose"> {}
export default function BuilderInstaller({ handleClose }: IProjectSettings) {
const [settingsState] = useDoc({
path: "_FIRETABLE_/settings",
});
if (settingsState.loading) return null;
const complete =
settingsState.doc.ftBuildStatus === "COMPLETE" ||
!!settingsState.doc.ftBuildUrl;
const building = settingsState.doc.ftBuildStatus === "BUILDING";
const waiting =
!settingsState.doc.ftBuildStatus && !settingsState.doc.ftBuildUrl;
return (
<Modal
onClose={handleClose}
title="One Click Builder Installer"
maxWidth="sm"
children={
<Box display="flex" flexDirection="column">
<Typography variant="body2">
You will be redirected to Google Cloud Shell to deploy Firetable
Function Builder to Cloud Run.
</Typography>
<br />
<Box
display="flex"
justifyContent="center"
alignItems="center"
flexDirection="column"
>
{complete && (
<>
<CheckCircleIcon />
<Typography variant="overline">Deploy Complete</Typography>
</>
)}
{building && (
<>
<CircularProgress size={25} />
<Typography variant="overline">Deploying...</Typography>
</>
)}
{waiting && (
<>
<CircularProgress size={25} />
<Typography variant="overline">
Waiting for deploy...
</Typography>
</>
)}
</Box>
</Box>
}
/>
);
}

View File

@@ -4,6 +4,7 @@ import { authOptions } from "firebase/firebaseui";
import { Link } from "@material-ui/core";
import OpenInNewIcon from "@material-ui/icons/OpenInNew";
import WIKI_LINKS from "constants/wikiLinks";
export const projectSettingsForm = [
{
@@ -44,12 +45,30 @@ export const projectSettingsForm = [
{
type: FieldType.contentHeader,
name: "_contentHeading_cloudRun",
label: "Cloud Run Configuration",
label: "Functions Builder",
},
{
type: FieldType.shortText,
name: "ftBuildUrl",
label: "Cloud Run Trigger URL",
format: "url",
assistiveText: (
<>
Firetable requires a cloud run instance to build and deploy Firetable
cloud functions ,
<Link href={WIKI_LINKS.FtFunctions} target="_blank" rel="noopener">
more info
<OpenInNewIcon
aria-label="Open in new tab"
fontSize="small"
style={{ verticalAlign: "bottom", marginLeft: 4 }}
/>
</Link>
.
<br />
To deploy the cloud run instance simply click the button bellow and
follow the cloud shell prompts.
</>
) as any,
},
];

View File

@@ -3,11 +3,17 @@ import { projectSettingsForm } from "./form";
import useDoc, { DocActions } from "hooks/useDoc";
import { IFormDialogProps } from "components/Table/ColumnMenu/NewColumn";
import { Button } from "@material-ui/core";
export interface IProjectSettings
extends Pick<IFormDialogProps, "handleClose"> {}
extends Pick<IFormDialogProps, "handleClose"> {
handleOpenBuilderInstaller: () => void;
}
export default function ProjectSettings({ handleClose }: IProjectSettings) {
export default function ProjectSettings({
handleClose,
handleOpenBuilderInstaller,
}: IProjectSettings) {
const [settingsState, settingsDispatch] = useDoc({
path: "_FIRETABLE_/settings",
});
@@ -27,6 +33,17 @@ export default function ProjectSettings({ handleClose }: IProjectSettings) {
});
};
const onOpenBuilderInstaller = () => {
handleClose();
window.open(
"https://deploy.cloud.run/?git_repo=https://github.com/FiretableProject/FunctionsBuilder.git",
"_blank"
);
handleOpenBuilderInstaller();
};
const hasCloudRunConfig = !!settingsState.doc.ftBuildUrl;
return (
<FormDialog
onClose={handleClose}
@@ -35,6 +52,11 @@ export default function ProjectSettings({ handleClose }: IProjectSettings) {
values={{ ...settingsState.doc, ...publicSettingsState.doc }}
onSubmit={handleSubmit}
SubmitButtonProps={{ children: "Save" }}
formFooter={
hasCloudRunConfig ? null : (
<Button onClick={onOpenBuilderInstaller}>One click deploy</Button>
)
}
/>
);
}

View File

@@ -10,11 +10,12 @@ import { Values } from "./utils";
import { useFiretableContext } from "contexts/FiretableContext";
import { FiretableState } from "hooks/useFiretable";
export interface IAutosaveProps
extends Pick<UseFormMethods, "reset" | "formState"> {
export interface IAutosaveProps {
control: Control;
docRef: firebase.default.firestore.DocumentReference;
row: any;
reset: UseFormMethods["reset"];
dirtyFields: UseFormMethods["formState"]["dirtyFields"];
}
const getEditables = (values: Values, tableState?: FiretableState) =>
@@ -33,7 +34,7 @@ export default function Autosave({
docRef,
row,
reset,
formState,
dirtyFields,
}: IAutosaveProps) {
const { tableState, updateCell } = useFiretableContext();
@@ -49,12 +50,14 @@ export default function Autosave({
// Get only fields that have had their value updated by the user
const updatedValues = _pickBy(
_pickBy(debouncedValue, (_, key) => formState.dirtyFields[key]),
_pickBy(debouncedValue, (_, key) => dirtyFields[key]),
(value, key) => !_isEqual(value, row[key])
);
console.log(debouncedValue, row);
console.log(updatedValues, dirtyFields);
if (Object.keys(updatedValues).length === 0) return;
// Update the document
Object.entries(updatedValues).forEach(([key, value]) =>
updateCell(
row.ref,

View File

@@ -5,9 +5,11 @@ import _isEqual from "lodash/isEqual";
import { Values } from "./utils";
export interface IResetProps
extends Pick<UseFormMethods, "formState" | "reset" | "getValues"> {
export interface IResetProps {
defaultValues: Values;
dirtyFields: UseFormMethods["formState"]["dirtyFields"];
reset: UseFormMethods["reset"];
getValues: UseFormMethods["getValues"];
}
/**
@@ -15,7 +17,7 @@ export interface IResetProps
*/
export default function Reset({
defaultValues,
formState,
dirtyFields,
reset,
getValues,
}: IResetProps) {
@@ -26,7 +28,7 @@ export default function Reset({
// If the field is dirty, (i.e. the user input a value but it hasnt been)
// saved to the db yet, keep its current value and keep it marked as dirty
for (const [field, isDirty] of Object.entries(formState.dirtyFields)) {
for (const [field, isDirty] of Object.entries(dirtyFields)) {
if (isDirty) {
resetValues[field] = currentValues[field];
}

View File

@@ -42,6 +42,7 @@ export default function Form({ values }: IFormProps) {
mode: "onBlur",
defaultValues,
});
const { dirtyFields } = formState;
// const { sideDrawerRef } = useFiretableContext();
// useEffect(() => {
@@ -62,11 +63,11 @@ export default function Form({ values }: IFormProps) {
docRef={docRef}
row={values}
reset={reset}
formState={formState}
dirtyFields={dirtyFields}
/>
<Reset
formState={formState}
dirtyFields={dirtyFields}
reset={reset}
defaultValues={defaultValues}
getValues={getValues}

View File

@@ -3,6 +3,8 @@ import { useForm } from "react-hook-form";
import { IMenuModalProps } from "..";
import { makeStyles, createStyles } from "@material-ui/styles";
import Switch from "@material-ui/core/Switch";
import FormControlLabel from "@material-ui/core/FormControlLabel";
import {
Typography,
TextField,
@@ -26,7 +28,6 @@ const useStyles = makeStyles((theme) =>
codeEditorContainer: {
border: `1px solid ${theme.palette.divider}`,
borderRadius: theme.shape.borderRadius,
overflow: "hidden",
},
mono: {
@@ -59,7 +60,7 @@ export default function DefaultValueInput({
config.defaultValue?.value ?? getFieldProp("initialValue", _type),
},
});
console.log(config);
return (
<>
<Subheading>Default Value</Subheading>
@@ -109,7 +110,32 @@ export default function DefaultValueInput({
/>
</MenuItem>
</TextField>
{(!config.defaultValue || config.defaultValue.type === "undefined") && (
<>
<FormControlLabel
value="required"
label="Make this column required"
labelPlacement="start"
control={
<Switch
checked={config.required}
onChange={(event, checked) => handleChange("required")(checked)}
name="required"
/>
}
style={{
marginLeft: 0,
justifyContent: "space-between",
}}
/>
{
<Typography color="textSecondary" paragraph>
The row will not be created or updated unless all required values
are set.
</Typography>
}
</>
)}
{config.defaultValue?.type === "static" && customFieldInput && (
<form>
<FormAutosave
@@ -134,8 +160,8 @@ export default function DefaultValueInput({
<div className={classes.codeEditorContainer}>
<CodeEditor
height={120}
value={config.defaultValue?.script}
onChange={handleChange("defaultValue.script")}
script={config.defaultValue?.script}
handleChange={handleChange("defaultValue.script")}
editorOptions={{
minimap: {
enabled: false,

View File

@@ -17,13 +17,11 @@ import { useAppContext } from "contexts/AppContext";
import { useConfirmation } from "components/ConfirmationDialog";
import { FieldType } from "constants/fields";
import Switch from "@material-ui/core/Switch";
import FormControlLabel from "@material-ui/core/FormControlLabel";
import Typography from "@material-ui/core/Typography";
import Divider from "@material-ui/core/Divider";
import Subheading from "components/Table/ColumnMenu/Subheading";
import WIKI_LINKS from "constants/wikiLinks";
import Button from "@material-ui/core/Button";
import routes from "constants/routes";
export default function FieldSettings(props: IMenuModalProps) {
const {
name,
@@ -54,6 +52,7 @@ export default function FieldSettings(props: IMenuModalProps) {
) {
setShowRebuildPrompt(true);
}
console.log(key, update);
const updatedConfig = _set({ ...newConfig }, key, update);
setNewConfig(updatedConfig);
};
@@ -66,7 +65,7 @@ export default function FieldSettings(props: IMenuModalProps) {
[newConfig.renderFieldType, type]
);
if (!open) return null;
console.log(newConfig);
return (
<Modal
maxWidth="md"
@@ -77,36 +76,6 @@ export default function FieldSettings(props: IMenuModalProps) {
<>
{initializable && (
<>
{
<section>
<Subheading>Required?</Subheading>
<Typography color="textSecondary" paragraph>
The row will not be created or updated unless all required
values are set.
</Typography>
<FormControlLabel
value="required"
label="Make this column required"
labelPlacement="start"
control={
<Switch
checked={newConfig["required"]}
onChange={() =>
setNewConfig({
...newConfig,
required: !Boolean(newConfig["required"]),
})
}
name="required"
/>
}
style={{
marginLeft: 0,
justifyContent: "space-between",
}}
/>
</section>
}
<section style={{ marginTop: 1 }}>
{/* top margin fixes visual bug */}
<ErrorBoundary fullScreen={false}>
@@ -167,8 +136,19 @@ export default function FieldSettings(props: IMenuModalProps) {
const ftBuildUrl = settingsDoc.get("ftBuildUrl");
if (!ftBuildUrl) {
snack.open({
message: `Cloud Run trigger URL not configured. Configuration guide: ${WIKI_LINKS.cloudRunFtBuilder}`,
message: `Firetable functions builder is not yet setup`,
variant: "error",
action: (
<Button
variant="contained"
component={"a"}
target="_blank"
href={routes.projectSettings}
rel="noopener noreferrer"
>
Go to Settings
</Button>
),
});
}
const userTokenInfo = await appContext?.currentUser?.getIdTokenResult();
@@ -193,6 +173,7 @@ export default function FieldSettings(props: IMenuModalProps) {
});
}
handleSave(fieldName, { config: newConfig });
handleClose();
setShowRebuildPrompt(false);
},
children: "Update",

View File

@@ -37,7 +37,10 @@ export default function NameChange({
}
actions={{
primary: {
onClick: () => handleSave(fieldName, { name: newName }),
onClick: () => {
handleSave(fieldName, { name: newName });
handleClose();
},
children: "Update",
},
secondary: {

View File

@@ -8,7 +8,8 @@ import { TextField } from "@material-ui/core";
import Modal from "components/Modal";
import { FieldType } from "constants/fields";
import FieldsDropdown from "./FieldsDropdown";
import { getFieldProp } from "components/fields";
import { analytics } from "analytics";
const useStyles = makeStyles((theme) =>
createStyles({
helperText: {
@@ -20,11 +21,13 @@ const useStyles = makeStyles((theme) =>
export interface IFormDialogProps extends IMenuModalProps {
data: Record<string, any>;
openSettings: (column: any) => void;
}
export default function FormDialog({
open,
data,
openSettings,
handleClose,
handleSave,
}: IFormDialogProps) {
@@ -33,7 +36,7 @@ export default function FormDialog({
const [columnLabel, setColumnLabel] = useState("");
const [fieldKey, setFieldKey] = useState("");
const [type, setType] = useState(FieldType.shortText);
const requireConfiguration = getFieldProp("requireConfiguration", type);
useEffect(() => {
if (type !== FieldType.id) setFieldKey(_camel(columnLabel));
}, [columnLabel]);
@@ -110,9 +113,22 @@ export default function FormDialog({
config: {},
...data.initializeColumn,
});
if (requireConfiguration) {
openSettings({
type,
name: columnLabel,
fieldName: fieldKey,
key: fieldKey,
config: {},
...data.initializeColumn,
});
} else handleClose();
analytics.logEvent("create_column", {
type,
});
},
disabled: !columnLabel || !fieldKey || !type,
children: "Add",
children: requireConfiguration ? "Next" : "Add",
},
secondary: {
onClick: handleClose,

View File

@@ -3,7 +3,7 @@ import { useState } from "react";
import { IMenuModalProps } from ".";
import Modal from "components/Modal";
import FieldsDropdown from "./FieldsDropdown";
import { analytics } from "analytics";
export default function FormDialog({
fieldName,
type,
@@ -29,7 +29,14 @@ export default function FormDialog({
}
actions={{
primary: {
onClick: () => handleSave(fieldName, { type: newType }),
onClick: () => {
handleSave(fieldName, { type: newType });
handleClose();
analytics.logEvent("change_column_type", {
newType,
prevType: type,
});
},
children: "Update",
},
secondary: {

View File

@@ -116,9 +116,13 @@ export default function ColumnMenu() {
const handleModalSave = (key: string, update: Record<string, any>) => {
actions.update(key, update);
clearModal();
};
const openSettings = (column) => {
setSelectedColumnHeader({
column,
});
setModal({ type: ModalStates.settings, data: { column } });
};
const menuItems = [
{
type: "subheader",
@@ -203,7 +207,7 @@ export default function ColumnMenu() {
// This is based off the cell type
icon: <SettingsIcon />,
onClick: () => {
setModal({ type: ModalStates.settings, data: { column } });
openSettings(column);
},
disabled: !isConfigurable,
},
@@ -304,6 +308,7 @@ export default function ColumnMenu() {
{...menuModalProps}
open={modal.type === ModalStates.new}
data={modal.data}
openSettings={openSettings}
/>
</>
)}

View File

@@ -27,6 +27,10 @@ import { useFiretableContext } from "contexts/FiretableContext";
import { useAppContext } from "contexts/AppContext";
import { DocActions } from "hooks/useDoc";
const getType = (column) =>
column.type === FieldType.derivative
? column.config.renderFieldType
: column.type;
const OPERATORS = [
{
value: "==",
@@ -166,6 +170,7 @@ const Filters = () => {
operator: "",
value: "",
};
const type = getType(selectedColumn);
if (
[
FieldType.phone,
@@ -173,21 +178,21 @@ const Filters = () => {
FieldType.url,
FieldType.email,
FieldType.checkbox,
].includes(selectedColumn.type)
].includes(type)
) {
updatedQuery = { ...updatedQuery, operator: "==" };
}
if (selectedColumn.type === FieldType.checkbox) {
if (type === FieldType.checkbox) {
updatedQuery = { ...updatedQuery, value: false };
}
if (selectedColumn.type === FieldType.connectTable) {
if (type === FieldType.connectTable) {
updatedQuery = {
key: `${selectedColumn.key}ID`,
operator: "array-contains-any",
value: [],
};
}
if (selectedColumn.type === FieldType.multiSelect) {
if (type === FieldType.multiSelect) {
updatedQuery = {
...updatedQuery,
operator: "array-contains-any",
@@ -200,7 +205,7 @@ const Filters = () => {
const operators = selectedColumn
? OPERATORS.filter((operator) =>
operator.compatibleTypes.includes(selectedColumn.type)
operator.compatibleTypes.includes(getType(selectedColumn))
)
: [];
@@ -220,7 +225,8 @@ const Filters = () => {
const id = open ? "simple-popper" : undefined;
const renderInputField = (selectedColumn, operator) => {
switch (selectedColumn.type) {
const type = getType(selectedColumn);
switch (type) {
case FieldType.checkbox:
return (
<Switch

View File

@@ -247,7 +247,7 @@ export default function ImportCsv({ render, PopoverProps }: IImportCsvProps) {
<TextField
variant="filled"
multiline
inputProps={{ rowsMin: 5 }}
inputProps={{ minRows: 5 }}
autoFocus
fullWidth
label="Paste your CSV here"

View File

@@ -41,14 +41,20 @@ export default function ReExecute() {
const handleConfirm = async () => {
setUpdating(true);
const _ft_forcedUpdateAt = new Date();
const batch = db.batch();
const querySnapshot = await query.get();
querySnapshot.docs.forEach((doc) => {
batch.update(doc.ref, { _ft_forcedUpdateAt });
});
await batch.commit();
setUpdating(false);
setTimeout(() => setOpen(false), 3000); // give time to for ft function to run
const docs = [...querySnapshot.docs];
while (docs.length) {
const batch = db.batch();
const temp = docs.splice(0, 499);
temp.forEach((doc) => {
batch.update(doc.ref, { _ft_forcedUpdateAt });
});
await batch.commit();
}
setTimeout(() => {
setUpdating(false);
setOpen(false);
}, 3000); // give time to for ft function to run
};
return (

View File

@@ -8,13 +8,14 @@ import { DialogContentText, Chip, Stack } from "@material-ui/core";
import Alert from "@material-ui/core/Alert";
import TableHeaderButton from "./TableHeaderButton";
import SparkIcon from "@material-ui/icons/OfflineBolt";
import Button from "@material-ui/core/Button";
import Modal from "components/Modal";
import { useFiretableContext } from "contexts/FiretableContext";
import { useAppContext } from "contexts/AppContext";
import { useSnackLogContext } from "contexts/SnackLogContext";
import CodeEditor from "../editors/CodeEditor";
import WIKI_LINKS from "constants/wikiLinks";
import routes from "constants/routes";
export default function SparksEditor() {
const snack = useSnackContext();
const { tableState, tableActions } = useFiretableContext();
@@ -54,8 +55,19 @@ export default function SparksEditor() {
const ftBuildUrl = settingsDoc.get("ftBuildUrl");
if (!ftBuildUrl) {
snack.open({
message: `Cloud Run trigger URL not configured. Configuration guide: ${WIKI_LINKS.cloudRunFtBuilder}`,
message: `Firetable functions builder is not yet setup`,
variant: "error",
action: (
<Button
variant="contained"
component={"a"}
target="_blank"
href={routes.projectSettings}
rel="noopener noreferrer"
>
Go to Settings
</Button>
),
});
}

View File

@@ -19,6 +19,7 @@ import {
Tab,
IconButton,
Link,
Button,
} from "@material-ui/core";
import Modal from "components/Modal";
import { makeStyles, createStyles } from "@material-ui/styles";
@@ -35,7 +36,7 @@ import Ansi from "ansi-to-react";
import EmptyState from "components/EmptyState";
import PropTypes from "prop-types";
import WIKI_LINKS from "constants/wikiLinks";
import routes from "constants/routes";
function a11yProps(index) {
return {
@@ -489,16 +490,17 @@ export default function TableLogs() {
message="Need Configuration"
description={
<>
Cloud Run trigger URL not configured.
<Link
href={WIKI_LINKS.cloudRunFtBuilder}
<Typography>
Function builder is not currently setup.{" "}
</Typography>
<Button
component={"a"}
href={routes.projectSettings}
target="_blank"
rel="noopener noreferrer"
variant="body2"
underline="always"
>
Configuration guide
</Link>
Go to Settings
</Button>
</>
}
/>

View File

@@ -22,6 +22,10 @@ const useStyles = makeStyles((theme) =>
saveButton: {
marginTop: theme.spacing(1),
},
editor: {
// overwrite user-select: none that causes editor not focusable in Safari
userSelect: "auto",
},
})
);
@@ -138,6 +142,21 @@ export default function CodeEditor(props: any) {
*/
function generateId(): string {}
/**
* Add an item to an array field
*/
function arrayUnion(val: string): void {}
/**
* Remove an item to an array field
*/
function arrayRemove(val: string): void {}
/**
* Increment a number field
*/
function increment(val: number): void {}
function hasRequiredFields(requiredFields: string[], data: any): boolean {}
function hasAnyRole(
@@ -280,7 +299,7 @@ export default function CodeEditor(props: any) {
}
type bigqueryIndex = {
type: "meiliIndex";
type: "bigqueryIndex";
triggers: Triggers;
shouldRun: ShouldRun;
requiredFields?: Fields;
@@ -349,6 +368,19 @@ export default function CodeEditor(props: any) {
promises: ContextToAny;
}
}
type mailchimp = {
label?:string;
type: "mailchimp";
triggers: Triggers;
shouldRun: ShouldRun;
requiredFields?: Fields;
sparkBody: {
method: any;
path: any;
body: any;
}
}
// an individual spark
type Spark =
@@ -361,6 +393,7 @@ export default function CodeEditor(props: any) {
| sendgridEmail
| apiCall
| twilioMessage
| mailchimp
| task;
type Sparks = Spark[]
@@ -445,6 +478,7 @@ export default function CodeEditor(props: any) {
value={initialEditorValue}
onChange={handleChange}
onValidate={handleEditorValidation}
className={classes.editor}
/>
</div>
</>

View File

@@ -77,14 +77,18 @@ export default function ActionFab({
cloudFunction(
callableName,
data,
(response) => {
async (response) => {
const { message, cellValue, success } = response.data;
setIsRunning(false);
snack.open({
message: JSON.stringify(message),
variant: success ? "success" : "error",
});
if (cellValue && cellValue.status) onSubmit(cellValue);
if (cellValue && cellValue.status) {
await ref.update({
[column.key]: cellValue,
});
}
},
(error) => {
console.error("ERROR", callableName, error);

View File

@@ -28,5 +28,6 @@ export const config: IFieldConfig = {
TableEditor: NullEditor as any,
SideDrawerField,
settings: Settings,
requireConfiguration: true,
};
export default config;

View File

@@ -65,7 +65,11 @@ export const ConnectService = React.forwardRef(function ConnectService(
{Array.isArray(value) &&
value.map((doc: any) => (
<Grid item key={doc.primaryKey}>
<Chip label={config.titleKey} className={classes.chip} />
<Chip
label={config.titleKey}
className={classes.chip}
size="small"
/>
</Grid>
))}
</Grid>

View File

@@ -70,6 +70,7 @@ export const ConnectTable = React.forwardRef(function ConnectTable(
label={config.primaryKeys
.map((key: string) => doc.snapshot[key])
.join(" ")}
size="small"
className={classes.chip}
/>
</Grid>

View File

@@ -1,5 +1,5 @@
import { lazy, Suspense } from "react";
import { Typography } from "@material-ui/core";
import { Typography, Grid } from "@material-ui/core";
import MultiSelect from "@antlerengineering/multiselect";
import FieldSkeleton from "components/SideDrawer/Form/FieldSkeleton";
import { FieldType } from "constants/fields";
@@ -24,28 +24,38 @@ const Settings = ({ config, handleChange }) => {
.map((c) => ({ label: c.name, value: c.key }));
return (
<>
<MultiSelect
label={"Listener fields (this script runs when these fields change)"}
options={columnOptions}
value={config.listenerFields ?? []}
onChange={handleChange("listenerFields")}
/>
<Typography variant="overline">Field type of the output</Typography>
<FieldsDropdown
value={config.renderFieldType}
options={Object.values(FieldType).filter(
(f) =>
![
FieldType.derivative,
FieldType.aggregate,
FieldType.subTable,
FieldType.action,
].includes(f)
)}
onChange={(newType: any) => {
handleChange("renderFieldType")(newType.target.value);
}}
/>
<Grid container direction="row" spacing={2}>
<Grid item xs={12} md={6}>
<Typography variant="overline">listener Fields</Typography>
<MultiSelect
//label={"Listener fields"}
options={columnOptions}
value={config.listenerFields ?? []}
onChange={handleChange("listenerFields")}
/>
<Typography color="textSecondary" paragraph>
Changes to these fields will trigger the evaluation of the column.
</Typography>
</Grid>
<Grid item xs={12} md={6}>
<Typography variant="overline">Output Field type</Typography>
<FieldsDropdown
value={config.renderFieldType}
options={Object.values(FieldType).filter(
(f) =>
![
FieldType.derivative,
FieldType.aggregate,
FieldType.subTable,
FieldType.action,
].includes(f)
)}
onChange={(newType: any) => {
handleChange("renderFieldType")(newType.target.value);
}}
/>
</Grid>
</Grid>
<Typography variant="overline">derivative script</Typography>
<CodeEditorHelper docLink={WIKI_LINKS.derivatives} />
<Suspense fallback={<FieldSkeleton height={200} />}>

View File

@@ -13,6 +13,7 @@ export const config: IFieldConfig = {
initialValue: "",
initializable: true,
icon: <DerivativeIcon />,
requireConfiguration: true,
description:
"Value derived from the rest of the rows values. Displayed using any other field type. Requires Cloud Function setup.",
TableCell: withBasicCell(BasicCell),

View File

@@ -16,7 +16,6 @@ export default function LongText({
disabled,
}: ISideDrawerFieldProps) {
const classes = useStyles();
return (
<Controller
control={control}
@@ -37,7 +36,7 @@ export default function LongText({
disabled={disabled}
multiline
InputProps={{ classes: { multiline: classes.multiline } }}
inputProps={{ rowsMin: 5, maxLength: column.config?.maxLength }}
inputProps={{ minRows: 5, maxLength: column.config?.maxLength }}
/>
);
}}

View File

@@ -40,5 +40,12 @@ export const config: IFieldConfig = {
TableEditor: NullEditor as any,
SideDrawerField,
settings: Settings,
csvImportParser: (v) => {
if (v.includes(",")) {
return v.split(",").map((i) => i.trim());
} else if (v !== "") return [v];
else return v;
},
requireConfiguration: true,
};
export default config;

View File

@@ -18,7 +18,7 @@ export default function Settings({ handleChange, config }) {
<Subheading>Validation Regex</Subheading>
<TextField
type="text"
value={config.maxLength}
value={config.validationRegex}
label={"Validation Regex"}
fullWidth
onChange={(e) => {

View File

@@ -37,5 +37,6 @@ export const config: IFieldConfig = {
TableEditor: NullEditor as any,
SideDrawerField,
settings: Settings,
requireConfiguration: true,
};
export default config;

View File

@@ -30,5 +30,6 @@ export const config: IFieldConfig = {
TableEditor: NullEditor as any,
settings: Settings,
SideDrawerField,
requireConfiguration: true,
};
export default config;

View File

@@ -31,5 +31,6 @@ export const config: IFieldConfig = {
TableEditor: NullEditor as any,
SideDrawerField,
initializable: false,
requireConfiguration: true,
};
export default config;

View File

@@ -11,6 +11,7 @@ export interface IFieldConfig {
name: string;
dataType: string;
initializable?: boolean;
requireConfiguration?: boolean;
initialValue: any;
icon?: React.ReactNode;
description?: string;

View File

@@ -1,5 +1,6 @@
export enum routes {
home = "/",
projectSettings = "/?modal=settings",
auth = "/auth",
impersonatorAuth = "/impersonatorAuth",
jwtAuth = "/jwtAuth",

View File

@@ -3,11 +3,11 @@ import meta from "../../package.json";
const WIKI_PATHS = {
updatingFiretable: "/Updating-Firetable",
derivatives: "/Derivatives",
derivatives: "/Derivative-Fields",
defaultValues: "/Default-Values",
cloudRunFtBuilder: "/Setting-up-cloud-Run-FT-Builder",
FtFunctions: "/Firetable-Cloud-Functions",
securityRules: "/Role-Based-Security-Rules",
setUpAuth: "/Set-Up-Firebase-Authentication",
setUpAuth: "/Setting-Up-Firebase-Authentication",
};
const WIKI_LINK_ROOT = meta.repository.url.replace(".git", "/wiki");

View File

@@ -3,7 +3,7 @@ import { auth, db } from "../firebase";
import firebase from "firebase/app";
import useDoc from "hooks/useDoc";
import createPersistedState from "use-persisted-state";
import { analytics } from "analytics";
import {
useMediaQuery,
ThemeProvider,
@@ -53,6 +53,10 @@ export const AppProvider: React.FC = ({ children }) => {
// Get userDoc
useEffect(() => {
if (currentUser) {
analytics.setUserId(currentUser.uid);
analytics.setUserProperties({
ft_instance: window.location.hostname,
});
dispatchUserDoc({ path: `_FT_USERS/${currentUser.uid}` });
}
}, [currentUser]);

View File

@@ -13,7 +13,8 @@ import { useSnackContext } from "./SnackContext";
import { SideDrawerRef } from "components/SideDrawer";
import { ColumnMenuRef } from "components/Table/ColumnMenu";
import { ImportWizardRef } from "components/Wizards/ImportWizard";
import _find from "lodash/find";
import { deepen } from "utils/fns";
export type Table = {
collection: string;
name: string;

View File

@@ -4,4 +4,5 @@ export default {
databaseURL: `https://${process.env.REACT_APP_FIREBASE_PROJECT_ID}.firebaseio.com`,
projectId: process.env.REACT_APP_FIREBASE_PROJECT_ID,
storageBucket: `${process.env.REACT_APP_FIREBASE_PROJECT_ID}.appspot.com`,
appId: "x",
};

View File

@@ -11,6 +11,8 @@ import {
isCollectionGroup,
generateSmallerId,
missingFieldsReducer,
deepMerge,
deepen,
} from "utils/fns";
import { projectId } from "../../firebase";
import _findIndex from "lodash/findIndex";
@@ -327,7 +329,7 @@ const useTable = (initialOverrides: any) => {
const row = rows[rowIndex];
const { ref, _ft_missingRequiredFields, ...rowData } = row;
const _rows = [...rows];
_rows[rowIndex] = { ...row, ...update };
_rows[rowIndex] = { ...deepMerge(row, { ...deepen(update) }), ...update };
const missingRequiredFields = _ft_missingRequiredFields
? _ft_missingRequiredFields.reduce(
missingFieldsReducer(_rows[rowIndex]),
@@ -335,7 +337,11 @@ const useTable = (initialOverrides: any) => {
)
: [];
if (missingRequiredFields.length === 0) {
ref.set({ ...rowData, ...update }, options).then(onSuccess, onError);
const _rowData = {
...deepMerge(rowData, { ...deepen(update) }),
...update,
};
ref.set(_rowData, options).then(onSuccess, onError);
delete _rows[rowIndex]._ft_missingRequiredFields;
}
rowsDispatch({ type: "set", rows: _rows });

View File

@@ -27,10 +27,14 @@ const useTableConfig = (tablePath?: string) => {
});
useEffect(() => {
const { doc, columns } = tableConfigState;
const { doc, columns, rowHeight } = tableConfigState;
// TODO: REMOVE THIS
// Copy columns, rowHeight to tableConfigState
if (doc && columns !== doc.columns) {
documentDispatch({ columns: doc.columns, rowHeight: doc.rowHeight });
documentDispatch({ columns: doc.columns });
}
if (doc && rowHeight !== doc.rowHeight) {
documentDispatch({ rowHeight: doc.rowHeight });
}
}, [tableConfigState.doc]);
/** used for specifying the table in use

View File

@@ -30,9 +30,11 @@ import TableSettingsDialog, {
TableSettingsDialogModes,
} from "components/TableSettings";
import queryString from "query-string";
import ProjectSettings from "components/ProjectSettings";
import EmptyState from "components/EmptyState";
import WIKI_LINKS from "constants/wikiLinks";
import BuilderInstaller from "../components/BuilderInstaller";
const useStyles = makeStyles((theme) =>
createStyles({
"@global": {
@@ -102,6 +104,20 @@ export default function HomePage() {
data: null,
});
useEffect(() => {
const modal = decodeURIComponent(
queryString.parse(window.location.search).modal as string
);
if (modal) {
switch (modal) {
case "settings":
setOpenProjectSettings(true);
break;
default:
break;
}
}
}, [window.location.search]);
const { sections } = useFiretableContext();
const { userDoc } = useAppContext();
@@ -116,6 +132,7 @@ export default function HomePage() {
});
const [open, setOpen] = useState(false);
const [openProjectSettings, setOpenProjectSettings] = useState(false);
const [openBuilderInstaller, setOpenBuilderInstaller] = useState(false);
const [settingsDocState, settingsDocDispatch] = useDoc({
path: "_FIRETABLE_/settings",
@@ -141,7 +158,11 @@ export default function HomePage() {
</Typography>
<Typography variant="body2">
If you are the project owner please follow the instructions{" "}
<a href={WIKI_LINKS.securityRules} target="_blank" rel="noopener">
<a
href={WIKI_LINKS.securityRules}
target="_blank"
rel="noopener noreferrer"
>
here
</a>{" "}
to setup the project rules.
@@ -304,7 +325,13 @@ export default function HomePage() {
data={settingsDialogState.data}
/>
{openProjectSettings && (
<ProjectSettings handleClose={() => setOpenProjectSettings(false)} />
<ProjectSettings
handleClose={() => setOpenProjectSettings(false)}
handleOpenBuilderInstaller={() => setOpenBuilderInstaller(true)}
/>
)}
{openBuilderInstaller && (
<BuilderInstaller handleClose={() => setOpenBuilderInstaller(false)} />
)}
</HomeNavigation>
);

View File

@@ -1,5 +1,4 @@
import _get from "lodash/get";
/**
* reposition an element in an array
* @param arr array
@@ -117,3 +116,58 @@ export const getCellValue = (row: Record<string, any>, key: string) => {
if (key.includes(".")) return _get(row, key);
return row[key];
};
// convert dot notation to nested object
export function deepen(obj) {
const result = {};
// For each object path (property key) in the object
for (const objectPath in obj) {
// Split path into component parts
const parts = objectPath.split(".");
// Create sub-objects along path as needed
let target = result;
while (parts.length > 1) {
const part = parts.shift();
target = target[part!] = target[part!] || {};
}
// Set value at end of path
target[parts[0]] = obj[objectPath];
}
return result;
}
export function flattenObject(ob) {
var toReturn = {};
for (var i in ob) {
if (!ob.hasOwnProperty(i)) continue;
if (typeof ob[i] == "object" && ob[i] !== null) {
var flatObject = flattenObject(ob[i]);
for (var x in flatObject) {
if (!flatObject.hasOwnProperty(x)) continue;
toReturn[i + "." + x] = flatObject[x];
}
} else {
toReturn[i] = ob[i];
}
}
return toReturn;
}
export const deepMerge = (target, source) => {
for (const key in source) {
if (source[key] && typeof source[key] === "object") {
if (!target[key]) target[key] = {};
deepMerge(target[key], source[key]);
} else {
target[key] = source[key];
}
}
return target;
};

View File

@@ -13378,10 +13378,10 @@ react-floater@^0.7.2:
react-proptype-conditional-require "^1.0.4"
tree-changes "^0.5.1"
react-hook-form@^6.15.5:
version "6.15.5"
resolved "https://registry.yarnpkg.com/react-hook-form/-/react-hook-form-6.15.5.tgz#c2578f9ce6a6df7b33015587d40cd880dc13e2db"
integrity sha512-so2jEPYKdVk1olMo+HQ9D9n1hVzaPPFO4wsjgSeZ964R7q7CHsYRbVF0PGBi83FcycA5482WHflasdwLIUVENg==
react-hook-form@^6:
version "6.15.8"
resolved "https://registry.yarnpkg.com/react-hook-form/-/react-hook-form-6.15.8.tgz#725c139d308c431c4611e4b9d85a49f01cfc0e7a"
integrity sha512-prq82ofMbnRyj5wqDe8hsTRcdR25jQ+B8KtCS7BLCzjFHAwNuCjRwzPuP4eYLsEBjEIeYd6try+pdLdw0kPkpg==
react-hook-form@^7.10.0:
version "7.11.0"