Merge pull request #488 from FiretableProject/develop

Develop
This commit is contained in:
Shams
2021-07-27 18:56:55 +10:00
committed by GitHub
50 changed files with 30 additions and 9830 deletions

View File

@@ -1,2 +0,0 @@
antler*.json
.gitignore

1
ft_build/.gitignore vendored
View File

@@ -1 +0,0 @@
build/

View File

@@ -1,20 +0,0 @@
# Use the official lightweight Node.js image.
# https://hub.docker.com/_/node
FROM node:14-slim
# Create and change to the app directory.
WORKDIR /workdir
# Copy local code to the container image.
# If you've done yarn install locally, node_modules will be copied to
# docker work directory to save time to perform the same actions again.
COPY . ./
# Install production missing dependencies from above copy command.
# If you add a package-lock.json, speed your build by switching to 'npm ci'.
# RUN npm ci --only=production
RUN yarn
# Run the web service on container startup.
CMD [ "yarn", "start" ]

View File

@@ -1,80 +0,0 @@
import { addPackages, addSparkLib, asyncExecute } from "./terminal";
const fs = require("fs");
import { generateConfigFromTableSchema } from "./loader";
import { commandErrorHandler } from "../utils";
const path = require("path");
import admin from "firebase-admin";
export default async function generateConfig(
schemaPath: string,
user: admin.auth.UserRecord,
streamLogger
) {
return await generateConfigFromTableSchema(
schemaPath,
user,
streamLogger
).then(async (success) => {
if (!success) {
await streamLogger.info(
`generateConfigFromTableSchema failed to complete`
);
return false;
}
await streamLogger.info(`generateConfigFromTableSchema done`);
const configFile = fs.readFileSync(
path.resolve(__dirname, "../functions/src/functionConfig.ts"),
"utf-8"
);
await streamLogger.info(`configFile: ${JSON.stringify(configFile)}`);
const requiredDependencies = configFile.match(
/(?<=(require\(("|'))).*?(?=("|')\))/g
);
if (requiredDependencies) {
const packgesAdded = await addPackages(
requiredDependencies.map((p: any) => ({ name: p })),
user,
streamLogger
);
if (!packgesAdded) {
return false;
}
}
await streamLogger.info(
`requiredDependencies: ${JSON.stringify(requiredDependencies)}`
);
const isFunctionConfigValid = await asyncExecute(
"cd build/functions/src; tsc functionConfig.ts",
commandErrorHandler(
{
user,
functionConfigTs: configFile,
description: `Invalid compiled functionConfig.ts`,
},
streamLogger
)
);
await streamLogger.info(
`isFunctionConfigValid: ${JSON.stringify(isFunctionConfigValid)}`
);
if (!isFunctionConfigValid) {
return false;
}
const { sparksConfig } = require("../functions/src/functionConfig.js");
const requiredSparks = sparksConfig.map((s: any) => s.type);
await streamLogger.info(
`requiredSparks: ${JSON.stringify(requiredSparks)}`
);
for (const lib of requiredSparks) {
const success = await addSparkLib(lib, user, streamLogger);
if (!success) {
return false;
}
}
return true;
});
}

View File

@@ -1,207 +0,0 @@
import { db } from "../firebaseConfig";
const fs = require("fs");
const beautify = require("js-beautify").js;
import admin from "firebase-admin";
import { parseSparksConfig } from "../utils";
export const generateConfigFromTableSchema = async (
schemaDocPath: string,
user: admin.auth.UserRecord,
streamLogger
) => {
await streamLogger.info("getting schema...");
const schemaDoc = await db.doc(schemaDocPath).get();
const schemaData = schemaDoc.data();
try {
if (!schemaData) throw new Error("no schema found");
// Temporarily disabled because this is super long
// await streamLogger.info(`schemaData: ${JSON.stringify(schemaData)}`);
const derivativeColumns = Object.values(schemaData.columns).filter(
(col: any) => col.type === "DERIVATIVE"
);
await streamLogger.info(
`derivativeColumns: ${JSON.stringify(derivativeColumns)}`
);
const derivativesConfig = `[${derivativeColumns.reduce(
(acc, currColumn: any) => {
if (
!currColumn.config.listenerFields ||
currColumn.config.listenerFields.length === 0
)
throw new Error(
`${currColumn.key} derivative is missing listener fields`
);
if (currColumn.config.listenerFields.includes(currColumn.key))
throw new Error(
`${currColumn.key} derivative has its own key as a listener field`
);
return `${acc}{\nfieldName:'${
currColumn.key
}',evaluate:async ({row,ref,db,auth,storage,utilFns}) =>{${
currColumn.config.script
}},\nlistenerFields:[${currColumn.config.listenerFields
.map((fieldKey: string) => `"${fieldKey}"`)
.join(",\n")}]},\n`;
},
""
)}]`;
await streamLogger.info(
`derivativesConfig: ${JSON.stringify(derivativesConfig)}`
);
const initializableColumns = Object.values(
schemaData.columns
).filter((col: any) => Boolean(col.config?.defaultValue));
await streamLogger.info(
`initializableColumns: ${JSON.stringify(initializableColumns)}`
);
const initializeConfig = `[${initializableColumns.reduce(
(acc, currColumn: any) => {
if (currColumn.config.defaultValue.type === "static") {
return `${acc}{\nfieldName:'${currColumn.key}',
type:"${currColumn.config.defaultValue.type}",
value:${
typeof currColumn.config.defaultValue.value === "string"
? `"${currColumn.config.defaultValue.value}"`
: JSON.stringify(currColumn.config.defaultValue.value)
},
},\n`;
} else if (currColumn.config.defaultValue.type === "dynamic") {
return `${acc}{\nfieldName:'${currColumn.key}',
type:"${currColumn.config.defaultValue.type}",
script:async ({row,ref,db,auth,utilFns}) =>{${currColumn.config.defaultValue.script}},
},\n`;
} else {
return `${acc}{\nfieldName:'${currColumn.key}',
type:"${currColumn.config.defaultValue.type}"
},\n`;
}
},
""
)}]`;
await streamLogger.info(
`initializeConfig: ${JSON.stringify(initializeConfig)}`
);
const documentSelectColumns = Object.values(schemaData.columns).filter(
(col: any) => col.type === "DOCUMENT_SELECT" && col.config?.trackedFields
);
const documentSelectConfig = `[${documentSelectColumns.reduce(
(acc, currColumn: any) => {
return `${acc}{\nfieldName:'${
currColumn.key
}',\ntrackedFields:[${currColumn.config.trackedFields
.map((fieldKey: string) => `"${fieldKey}"`)
.join(",\n")}]},\n`;
},
""
)}]`;
await streamLogger.info(
`documentSelectColumns: ${JSON.stringify(documentSelectColumns)}`
);
const sparksConfig = parseSparksConfig(
schemaData.sparks,
user,
streamLogger
);
await streamLogger.info(`sparksConfig: ${JSON.stringify(sparksConfig)}`);
const collectionType = schemaDocPath.includes("subTables")
? "subCollection"
: schemaDocPath.includes("groupSchema")
? "groupCollection"
: "collection";
let collectionId = "";
let functionName = "";
let triggerPath = "";
switch (collectionType) {
case "collection":
collectionId = schemaDocPath.split("/").pop() ?? "";
functionName = `"${collectionId}"`;
triggerPath = `"${collectionId}/{docId}"`;
break;
case "subCollection":
let pathParentIncrement = 0;
triggerPath =
'"' +
schemaDocPath
.replace("_FIRETABLE_/settings/schema/", "")
.replace(/subTables/g, function () {
pathParentIncrement++;
return `{parentDoc${pathParentIncrement}}`;
}) +
"/{docId}" +
'"';
functionName =
'"' +
schemaDocPath
.replace("_FIRETABLE_/settings/schema/", "")
.replace(/\/subTables\//g, "_") +
'"';
break;
case "groupCollection":
collectionId = schemaDocPath.split("/").pop() ?? "";
const triggerDepth = schemaData.triggerDepth
? schemaData.triggerDepth
: 1;
triggerPath = "";
for (let i = 1; i <= triggerDepth; i++) {
triggerPath = triggerPath + `{parentCol${i}}/{parentDoc${i}}/`;
}
triggerPath = '"' + triggerPath + collectionId + "/" + "{docId}" + '"';
functionName = `"CG_${collectionId}${
triggerDepth > 1 ? `_D${triggerDepth}` : ""
}"`;
break;
default:
break;
}
await streamLogger.info(
`collectionType: ${JSON.stringify(collectionType)}`
);
// generate field types from table meta data
const fieldTypes = JSON.stringify(
Object.keys(schemaData.columns).reduce((acc, cur) => {
const field = schemaData.columns[cur];
let fieldType = field.type;
if (fieldType === "DERIVATIVE") {
fieldType = field.config.renderFieldType;
}
return {
[cur]: fieldType,
...acc,
};
}, {})
);
await streamLogger.info(`fieldTypes: ${JSON.stringify(fieldTypes)}`);
const exports: any = {
fieldTypes,
triggerPath,
functionName: functionName.replace(/-/g, "_"),
derivativesConfig,
initializeConfig,
documentSelectConfig,
sparksConfig,
};
await streamLogger.info(`exports: ${JSON.stringify(exports)}`);
const fileData = Object.keys(exports).reduce((acc, currKey) => {
return `${acc}\nexport const ${currKey} = ${exports[currKey]}`;
}, ``);
await streamLogger.info(`fileData: ${JSON.stringify(fileData)}`);
const path = require("path");
fs.writeFileSync(
path.resolve(__dirname, "../functions/src/functionConfig.ts"),
beautify(fileData, { indent_size: 2 })
);
return true;
} catch (error) {
streamLogger.error(error.message);
return false;
}
};

View File

@@ -1,83 +0,0 @@
import * as child from "child_process";
import admin from "firebase-admin";
import { commandErrorHandler, logErrorToDB } from "../utils";
function execute(command: string, callback: any) {
console.log(command);
child.exec(command, function (error, stdout, stderr) {
console.log({ error, stdout, stderr });
callback(stdout);
});
}
export const asyncExecute = async (command: string, callback: any) =>
new Promise(async (resolve, reject) => {
child.exec(command, async function (error, stdout, stderr) {
console.log({ error, stdout, stderr });
await callback(error, stdout, stderr);
resolve(!error);
});
});
export const addPackages = async (
packages: { name: string; version?: string }[],
user: admin.auth.UserRecord,
streamLogger
) => {
const packagesString = packages.reduce((acc, currPackage) => {
return `${acc} ${currPackage.name}@${currPackage.version ?? "latest"}`;
}, "");
if (packagesString.trim().length !== 0) {
const success = await asyncExecute(
`cd build/functions;yarn add ${packagesString}`,
commandErrorHandler(
{
user,
description: "Error adding packages",
},
streamLogger
)
);
return success;
}
return true;
};
export const addSparkLib = async (
name: string,
user: admin.auth.UserRecord,
streamLogger
) => {
try {
const { dependencies } = require(`../sparksLib/${name}`);
const packages = Object.keys(dependencies).map((key) => ({
name: key,
version: dependencies[key],
}));
const success = await addPackages(packages, user, streamLogger);
if (!success) {
return false;
}
} catch (error) {
logErrorToDB(
{
user,
errorDescription: "Error parsing dependencies",
},
streamLogger
);
return false;
}
const success = await asyncExecute(
`cp build/sparksLib/${name}.ts build/functions/src/sparks/${name}.ts`,
commandErrorHandler(
{
user,
description: "Error copying sparksLib",
},
streamLogger
)
);
return success;
};

View File

@@ -1,18 +0,0 @@
{
"compilerOptions": {
"module": "commonjs",
"noImplicitReturns": true,
"noUnusedLocals": true,
"outDir": "lib",
"sourceMap": true,
"strict": true,
"noImplicitAny": false,
"resolveJsonModule": true,
"target": "es6",
"lib": ["ESNext"],
"strictNullChecks": false
},
"compileOnSave": true,
"include": ["src", "generateConfig.ts"],
"ignore": ["sparks", "sparksLib"]
}

View File

@@ -1,29 +0,0 @@
#!/bin/bash
helpFunction()
{
echo "Usage: ./deploy.sh --project-id [YOUR GCLOUD PROJECT ID]"
exit 0
}
while test $# -gt 0; do
case "$1" in
--project-id)
shift
project_id=$1
shift
;;
*)
echo "$1 is not a recognized flag!"
return 1;
;;
esac
done
if [[ -z "$project_id" ]];
then
helpFunction
fi
gcloud config set project $project_id
gcloud builds submit --tag gcr.io/$project_id/ft-builder
gcloud run deploy ft-builder --image gcr.io/$project_id/ft-builder --platform managed --memory 4Gi --allow-unauthenticated --set-env-vars="_PROJECT_ID=$project_id" --region=australia-southeast1

View File

@@ -1,9 +0,0 @@
{
"functions": {
"predeploy": [
"npm --prefix \"$RESOURCE_DIR\" run lint",
"npm --prefix \"$RESOURCE_DIR\" run build"
],
"source": "functions"
}
}

View File

@@ -1,10 +0,0 @@
// Initialize Firebase Admin
import * as admin from "firebase-admin";
admin.initializeApp();
const db = admin.firestore();
const auth = admin.auth();
db.settings({ timestampsInSnapshots: true, ignoreUndefinedProperties: true });
export { db, admin, auth };

View File

@@ -1,64 +0,0 @@
module.exports = {
env: {
browser: true,
es6: true,
node: true,
},
extends: ["plugin:import/errors", "plugin:import/warnings"],
parser: "@typescript-eslint/parser",
parserOptions: {
project: "tsconfig.json",
sourceType: "module",
},
plugins: ["@typescript-eslint", "import"],
rules: {
"@typescript-eslint/adjacent-overload-signatures": "error",
"@typescript-eslint/no-empty-function": "error",
"@typescript-eslint/no-empty-interface": "warn",
"@typescript-eslint/no-floating-promises": "error",
"@typescript-eslint/no-namespace": "error",
"@typescript-eslint/no-unnecessary-type-assertion": "error",
"@typescript-eslint/prefer-for-of": "warn",
"@typescript-eslint/triple-slash-reference": "error",
"@typescript-eslint/unified-signatures": "warn",
"comma-dangle": "warn",
"constructor-super": "error",
eqeqeq: ["warn", "always"],
"import/no-deprecated": "warn",
"import/no-extraneous-dependencies": "error",
"import/no-unassigned-import": "warn",
"no-cond-assign": "error",
"no-duplicate-case": "error",
"no-duplicate-imports": "error",
"no-empty": [
"error",
{
allowEmptyCatch: true,
},
],
"no-invalid-this": "error",
"no-new-wrappers": "error",
"no-param-reassign": "error",
"no-redeclare": "error",
"no-sequences": "error",
"no-shadow": [
"error",
{
hoist: "all",
},
],
"no-throw-literal": "error",
"no-unsafe-finally": "error",
"no-unused-labels": "error",
"no-var": "warn",
"no-void": "error",
"prefer-const": "warn",
},
settings: {
jsdoc: {
tagNamePreference: {
returns: "return",
},
},
},
};

View File

@@ -1,12 +0,0 @@
# Compiled JavaScript files
**/*.js
**/*.js.map
# Except the ESLint config file
!.eslintrc.js
# TypeScript v1 declaration files
typings/
# Node.js dependency directory
node_modules/

View File

@@ -1,38 +0,0 @@
{
"name": "functions",
"version": "0.0.1",
"scripts": {
"lint": "tslint --project tsconfig.json",
"build": "tsc",
"serve": "npm run build && firebase serve --only functions",
"shell": "npm run build && firebase functions:shell",
"start": "npm run shell",
"deployFT": "echo 'n\n' | firebase deploy --interactive",
"logs": "firebase functions:log"
},
"engines": {
"node": "14"
},
"main": "lib/index.js",
"dependencies": {
"@google-cloud/secret-manager": "^3.2.3",
"firebase-admin": "^9.4.2",
"firebase-functions": "^3.13.1"
},
"devDependencies": {
"@types/node": "^14.14.11",
"husky": "^4.2.5",
"prettier": "^2.1.1",
"pretty-quick": "^3.0.0",
"ts-node": "^8.6.2",
"tsc": "^1.20150623.0",
"tslint": "^6.1.0",
"typescript": "^4.1.2"
},
"husky": {
"hooks": {
"pre-commit": "pretty-quick --staged"
}
},
"private": true
}

View File

@@ -1,63 +0,0 @@
import * as functions from "firebase-functions";
import * as admin from "firebase-admin";
import { db, auth, storage } from "../firebaseConfig";
import utilFns from "../utils";
const derivative = (
functionConfig: {
fieldName: string;
listenerFields: string[];
evaluate: (props: {
row: any;
ref: FirebaseFirestore.DocumentReference<FirebaseFirestore.DocumentData>;
db: FirebaseFirestore.Firestore;
auth: admin.auth.Auth;
storage: admin.storage.Storage;
utilFns: any;
}) => any;
}[]
) => async (change: functions.Change<functions.firestore.DocumentSnapshot>) => {
try {
const row = change.after?.data();
const ref = change.after ? change.after.ref : change.before.ref;
const update = await functionConfig.reduce(
async (accUpdates: any, currDerivative) => {
const shouldEval = utilFns.hasChanged(change)([
...currDerivative.listenerFields,
"_ft_forcedUpdateAt",
]);
if (shouldEval) {
try {
const newValue = await currDerivative.evaluate({
row,
ref,
db,
auth,
storage,
utilFns,
});
if (
newValue !== undefined &&
newValue !== row[currDerivative.fieldName]
) {
return {
...(await accUpdates),
[currDerivative.fieldName]: newValue,
};
}
} catch (error) {
console.log(error);
}
}
return await accUpdates;
},
{}
);
return update;
} catch (error) {
console.log(`Derivatives Error`, error);
return {};
}
};
export default derivative;

View File

@@ -1,19 +0,0 @@
// Initialize Firebase Admin
import * as functions from "firebase-functions";
import * as admin from "firebase-admin";
admin.initializeApp();
// Initialize Cloud Firestore Database
export const db = admin.firestore();
// Initialize Auth
export const auth = admin.auth();
// Initialize Storage
export const storage = admin.storage();
const settings = {
timestampsInSnapshots: true,
ignoreUndefinedProperties: true,
};
db.settings(settings);
export const env = functions.config();

View File

@@ -1,68 +0,0 @@
import * as functions from "firebase-functions";
import derivative from "./derivatives";
import spark from "./sparks";
import {
functionName,
triggerPath,
derivativesConfig,
documentSelectConfig,
sparksConfig,
initializeConfig,
fieldTypes,
} from "./functionConfig";
import { getTriggerType, changedDocPath } from "./utils";
import propagate from "./propagates";
import initialize from "./initialize";
export const FT = {
[functionName]: functions.firestore
.document(triggerPath)
.onWrite(async (change, context) => {
const triggerType = getTriggerType(change);
let promises: Promise<any>[] = [];
const sparkPromises = sparksConfig
.filter((sparkConfig) => sparkConfig.triggers.includes(triggerType))
.map((sparkConfig) => spark(sparkConfig, fieldTypes)(change, context));
console.log(
`#${
sparkPromises.length
} sparks will be evaluated on ${triggerType} of ${changedDocPath(
change
)}`
);
promises = sparkPromises;
const propagatePromise = propagate(
change,
documentSelectConfig,
triggerType
);
promises.push(propagatePromise);
try {
let docUpdates = {};
if (triggerType === "update") {
try {
docUpdates = await derivative(derivativesConfig)(change);
} catch (err) {
console.log(`caught error: ${err}`);
}
} else if (triggerType === "create") {
try {
const initialData = await initialize(initializeConfig)(
change.after
);
const derivativeData = await derivative(derivativesConfig)(change);
docUpdates = { ...initialData, ...derivativeData };
} catch (err) {
console.log(`caught error: ${err}`);
}
}
if (Object.keys(docUpdates).length !== 0) {
promises.push(change.after.ref.update(docUpdates));
}
const result = await Promise.allSettled(promises);
console.log(JSON.stringify(result));
} catch (err) {
console.log(`caught error: ${err}`);
}
}),
};

View File

@@ -1,30 +0,0 @@
import * as functions from "firebase-functions";
import utilFns from "../utils";
import { db, auth, storage } from "../firebaseConfig";
const initializedDoc = (
columns: { fieldName: string; type: string; value?: any; script?: any }[]
) => async (snapshot: functions.firestore.DocumentSnapshot) =>
columns.reduce(async (acc, column) => {
if (snapshot.get(column.fieldName) !== undefined) return { ...(await acc) }; // prevents overwriting already initialised values
if (column.type === "static") {
return {
...(await acc),
[column.fieldName]: column.value,
};
} else if (column.type === "null") {
return { ...(await acc), [column.fieldName]: null };
} else if (column.type === "dynamic") {
return {
...(await acc),
[column.fieldName]: await column.script({
row: snapshot.data(),
ref: snapshot.ref,
db,
auth,
storage,
utilFns,
}),
};
} else return { ...(await acc) };
}, {});
export default initializedDoc;

View File

@@ -1,98 +0,0 @@
import { DocumentSnapshot } from "firebase-functions/lib/providers/firestore";
import { rowReducer } from "../utils";
import { db } from "../firebaseConfig";
const TARGET_SUB_COLLECTION = "_FT_BINDINGS";
//sample binding document
// /_FT_BINDINGS/{docId}
// docId is encodeURIComponent of docPath
/**
{
[targetCollectionName]:{
[targetField]:{
trackedFields:[]
targets{
[docId]:true
}
}
}
}
*/
// source changes Trigger
// check and propagate any tracked changes to
export const propagateChanges = (docSnapshot: DocumentSnapshot) =>
new Promise((resolve, reject) =>
db
.collection(TARGET_SUB_COLLECTION)
.doc(encodeURIComponent(docSnapshot.ref.path))
.get()
.then((doc) => {
const promises = [];
const docData = doc.data();
if (!doc.exists) {
resolve(false);
return;
}
const targetCollectionPaths = Object.keys(docData);
targetCollectionPaths.forEach((cPath) => {
const targetFieldKeys = Object.keys(docData[cPath]);
targetFieldKeys.forEach((targetFieldKey) => {
const { trackedFields, targets } = docData[cPath][targetFieldKey];
const fieldPromises = Object.keys(targets).map(
async (targetDocId) => {
const targetRef = db
.collection(decodeURIComponent(cPath))
.doc(targetDocId);
const targetDoc = await targetRef.get();
if (!targetDoc.exists) return false;
const targetFieldValue = targetDoc.get(targetFieldKey);
const indexOfCurrentTarget = targetFieldValue.findIndex(
(element) => element.docPath === docSnapshot.ref.path
);
if (indexOfCurrentTarget > -1) {
targetFieldValue[indexOfCurrentTarget].snapshot = rowReducer(
trackedFields,
docSnapshot.data()
);
await targetRef.update({
[targetFieldKey]: targetFieldValue,
});
return true;
} else return false;
}
);
fieldPromises.forEach((p) => promises.push(p));
});
});
resolve(Promise.allSettled(promises));
return;
})
);
// when deleting a document all snapshot copies of it in
export const removeCopiesOfDeleteDoc = (
sourceDocRef: FirebaseFirestore.DocumentReference
) =>
sourceDocRef
.collection(TARGET_SUB_COLLECTION)
.get()
.then((queryResult) => {
queryResult.docs.map(async (doc) => {
const { targetRef, targetFieldKey } = doc.data() as {
targetRef: FirebaseFirestore.DocumentReference;
targetFieldKey: string;
};
const targetDoc = await targetRef.get();
const currentTargetFieldValue = targetDoc.get(targetFieldKey);
const newTargetFieldValue = currentTargetFieldValue.filter(
({ docPath }: { docPath: string; snapshot: any }) =>
docPath !== sourceDocRef.path
);
await targetRef.update({ [targetFieldKey]: newTargetFieldValue });
await doc.ref.delete();
});
});

View File

@@ -1,103 +0,0 @@
import * as admin from "firebase-admin";
const fieldValue = admin.firestore.FieldValue;
import { db } from "../firebaseConfig";
const TARGET_SUB_COLLECTION = "_FT_BINDINGS";
//sample bindings document
// /_FT_BINDINGS/{docId}
// docId is encodeURIComponent of docPath
/**
{
[targetCollectionName]:{
[targetField]:{
trackedFields:[]
targets{
[docId]:true
}
}
}
}
*/
// Target changes Trigger
// add propagation reference from source subcollection
export const addTargetRef = (
targetRef: FirebaseFirestore.DocumentReference,
sourceDocPath: string,
targetFieldKey: string,
trackedFields
) =>
db.doc(`${TARGET_SUB_COLLECTION}/${encodeURIComponent(sourceDocPath)}`).set(
{
[encodeURIComponent(targetRef.parent.path)]: {
[targetFieldKey]: {
trackedFields,
targets: { [targetRef.id]: true },
},
},
},
{ merge: true }
);
// remove propagation reference from source subcollection
export const removeTargetRef = (
targetRef: FirebaseFirestore.DocumentReference,
sourceDocPath: string,
targetFieldKey: string
) =>
db.doc(`${TARGET_SUB_COLLECTION}/${encodeURIComponent(sourceDocPath)}`).set(
{
[encodeURIComponent(targetRef.parent.path)]: {
[targetFieldKey]: {
targets: { [targetRef.id]: fieldValue.delete() },
},
},
},
{ merge: true }
);
// db
// .doc(`${sourceDocPath}/${TARGET_SUB_COLLECTION}/${encodeURIComponent(targetRef.parent.path)}`)
// .set({ [targetFieldKey]:{targets:{[targetRef.id]:fieldValue.delete()}}},{merge: true});
// new Promise((resolve, reject) => db
// .collection(`${sourceDocPath}/${TARGET_SUB_COLLECTION}`)
// .where("targetRef", "==", targetRef)
// .where("targetFieldKey","==",targetFieldKey)
// .get()
// .then((queryResult) => resolve(Promise.all(queryResult.docs.map((doc) => doc.ref.delete())))));
// removes all references of deleted targets
export const removeRefsOnTargetDelete = (
targetRef: FirebaseFirestore.DocumentReference,
targetFieldKey: string
) =>
new Promise((resolve, reject) =>
db
.collection(TARGET_SUB_COLLECTION)
.where(
`${targetRef.parent.path}.${targetFieldKey}.targets.${targetRef.id}`,
"==",
true
)
.get()
.then((queryResult) =>
resolve(
Promise.all(
queryResult.docs.map((doc) =>
doc.ref.set(
{
[encodeURIComponent(targetRef.parent.path)]: {
[targetFieldKey]: {
targets: { [targetRef.id]: fieldValue.delete() },
},
},
},
{ merge: true }
)
)
)
)
)
);

View File

@@ -1,81 +0,0 @@
import * as functions from "firebase-functions";
import { propagateChanges, removeCopiesOfDeleteDoc } from "./SourceFns";
import {
addTargetRef,
removeTargetRef,
removeRefsOnTargetDelete,
} from "./TargetFns";
//import { asyncForEach} from '../utils'
const propagateChangesOnTrigger = (
change: functions.Change<functions.firestore.DocumentSnapshot>,
triggerType: "delete" | "create" | "update"
) => {
switch (triggerType) {
case "update":
return propagateChanges(change.after);
case "delete":
return removeCopiesOfDeleteDoc(change.before.ref);
case "create":
default:
return false;
}
};
const updateLinks = (
change: functions.Change<functions.firestore.DocumentSnapshot>,
config: { fieldName: string; trackedFields: string[] }
) => {
const beforeDocPaths = change.before.get(config.fieldName)
? change.before.get(config.fieldName).map((x) => x.docPath)
: [];
const afterDocPaths = change.after.get(config.fieldName)
? change.after.get(config.fieldName).map((x) => x.docPath)
: [];
const addedDocPaths = afterDocPaths.filter(
(x) => !beforeDocPaths.includes(x)
);
const removedDocPaths = beforeDocPaths.filter(
(x) => !afterDocPaths.includes(x)
);
if (addedDocPaths.length !== 0 || removedDocPaths.length !== 0) {
const addPromises = addedDocPaths.map((docPath) =>
addTargetRef(
change.after.ref,
docPath,
config.fieldName,
config.trackedFields
)
);
const removePromises = removedDocPaths.map((docPath) =>
removeTargetRef(change.after.ref, docPath, config.fieldName)
);
return Promise.all([...addPromises, ...removePromises]);
} else {
return false;
}
};
export default function propagate(
change: functions.Change<functions.firestore.DocumentSnapshot>,
config: { fieldName: string; trackedFields: string[] }[],
triggerType: "delete" | "create" | "update"
) {
const promises = [];
if (["delete", "update"].includes(triggerType)) {
const propagateChangesPromise = propagateChangesOnTrigger(
change,
triggerType
);
promises.push(propagateChangesPromise);
}
if (config.length > 0) {
if (triggerType === "delete") {
config.forEach((c) =>
promises.push(removeRefsOnTargetDelete(change.before.ref, c.fieldName))
);
} else if (triggerType === "update") {
config.forEach((c) => promises.push(updateLinks(change, c)));
}
}
return Promise.allSettled(promises);
}

View File

@@ -1,77 +0,0 @@
import * as functions from "firebase-functions";
import utilFns, { hasRequiredFields, getTriggerType } from "../utils";
import { db, auth, storage } from "../firebaseConfig";
const spark = (sparkConfig, fieldTypes) => async (
change: functions.Change<functions.firestore.DocumentSnapshot>,
context: functions.EventContext
) => {
const beforeData = change.before?.data();
const afterData = change.after?.data();
const ref = change.after ? change.after.ref : change.before.ref;
const triggerType = getTriggerType(change);
try {
const {
label,
type,
triggers,
shouldRun,
requiredFields,
sparkBody,
} = sparkConfig;
const sparkContext = {
row: triggerType === "delete" ? beforeData : afterData,
ref,
db,
auth,
change,
triggerType,
sparkConfig,
utilFns,
fieldTypes,
storage,
};
if (!triggers.includes(triggerType)) return false; //check if trigger type is included in the spark
if (
triggerType !== "delete" &&
requiredFields &&
requiredFields.length !== 0 &&
!hasRequiredFields(requiredFields, afterData)
) {
console.log("requiredFields are ", requiredFields, "type is", type);
return false; // check if it hase required fields for the spark to run
}
const dontRun = shouldRun
? !(typeof shouldRun === "function"
? await shouldRun(sparkContext)
: shouldRun)
: false; //
console.log(label, "type is ", type, "dontRun value is", dontRun);
if (dontRun) return false;
const sparkData = await Object.keys(sparkBody).reduce(
async (acc, key) => ({
[key]:
typeof sparkBody[key] === "function"
? await sparkBody[key](sparkContext)
: sparkBody[key],
...(await acc),
}),
{}
);
console.log(JSON.stringify(sparkData));
const sparkFn = require(`./${type}`).default;
await sparkFn(sparkData, sparkContext);
return true;
} catch (err) {
const { label, type } = sparkConfig;
console.log(
`error in ${label} spark of type ${type}, on ${context.eventType} in Doc ${context.resource.name}`
);
console.error(err);
return Promise.reject(err);
}
};
export default spark;

View File

@@ -1,17 +0,0 @@
import * as functions from "firebase-functions";
export const hasAnyRole = (
authorizedRoles: string[],
context: functions.https.CallableContext
) => {
if (!context.auth || !context.auth.token.roles) return false;
const userRoles = context.auth.token.roles as string[];
const authorization = authorizedRoles.reduce(
(authorized: boolean, role: string) => {
if (userRoles.includes(role)) return true;
else return authorized;
},
false
);
return authorization;
};

View File

@@ -1,3 +0,0 @@
export const sendEmail = (msg: any) => {
// pubsub.push
};

View File

@@ -1,98 +0,0 @@
import * as admin from "firebase-admin";
import * as functions from "firebase-functions";
export const serverTimestamp = admin.firestore.FieldValue.serverTimestamp;
import { sendEmail } from "./email";
import { hasAnyRole } from "./auth";
import { SecretManagerServiceClient } from "@google-cloud/secret-manager";
const secrets = new SecretManagerServiceClient();
export const getSecret = async (name: string, v: string = "latest") => {
const [version] = await secrets.accessSecretVersion({
name: `projects/${process.env.GCLOUD_PROJECT}/secrets/${name}/versions/${v}`,
});
const payload = version.payload?.data?.toString();
if (payload && payload[0] === "{") {
return JSON.parse(payload);
} else {
return payload;
}
};
const characters =
"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz";
export function generateId(length: number): string {
let result = "";
const charactersLength = characters.length;
for (let i = 0; i < length; i++) {
result += characters.charAt(Math.floor(Math.random() * charactersLength));
}
return result;
}
export const arrayUnion = (union: any) => {
return admin.firestore.FieldValue.arrayUnion(union);
};
export const arrayRemove = (union: any) => {
return admin.firestore.FieldValue.arrayRemove(union);
};
export const increment = (val: number) => {
return admin.firestore.FieldValue.increment(val);
};
export const hasRequiredFields = (requiredFields: string[], data: any) =>
requiredFields.reduce((acc: boolean, currField: string) => {
if (data[currField] === undefined || data[currField] === null) return false;
else return acc;
}, true);
export async function asyncForEach(array: any[], callback: Function) {
for (let index = 0; index < array.length; index++) {
await callback(array[index], index, array);
}
}
export const getTriggerType = (change) =>
Boolean(change.after.data()) && Boolean(change.before.data())
? "update"
: Boolean(change.after.data())
? "create"
: "delete";
export const changedDocPath = (
change: functions.Change<functions.firestore.DocumentSnapshot>
) => change.before?.ref.path ?? change.after.ref.path;
export const rowReducer = (fieldsToSync, row) =>
fieldsToSync.reduce((acc: any, curr: string) => {
if (row[curr] !== undefined && row[curr] !== null)
return { ...acc, [curr]: row[curr] };
else return acc;
}, {});
const hasChanged = (
change: functions.Change<functions.firestore.DocumentSnapshot>
) => (trackedFields: string[]) => {
const before = change.before?.data();
const after = change.after?.data();
if (!before && after) return true;
else if (before && !after) return false;
else
return trackedFields.some(
(trackedField) =>
JSON.stringify(before[trackedField]) !==
JSON.stringify(after[trackedField])
);
};
export default {
hasChanged,
getSecret,
hasRequiredFields,
generateId,
sendEmail,
serverTimestamp,
hasAnyRole,
asyncForEach,
arrayUnion,
arrayRemove,
increment,
};

View File

@@ -1,18 +0,0 @@
{
"compilerOptions": {
"module": "commonjs",
"noImplicitReturns": true,
"noUnusedLocals": true,
"outDir": "lib",
"sourceMap": true,
"strict": true,
"noImplicitAny": false,
"resolveJsonModule": true,
"target": "es6",
"lib": ["ESNext"],
"strictNullChecks": false
},
"compileOnSave": true,
"include": ["src", "generateConfig.ts"],
"ignore": ["sparks"]
}

View File

@@ -1,116 +0,0 @@
{
"rules": {
// -- Strict errors --
// These lint rules are likely always a good idea.
// Force function overloads to be declared together. This ensures readers understand APIs.
"adjacent-overload-signatures": true,
// Do not allow the subtle/obscure comma operator.
"ban-comma-operator": true,
// Do not allow internal modules or namespaces . These are deprecated in favor of ES6 modules.
"no-namespace": true,
// Do not allow parameters to be reassigned. To avoid bugs, developers should instead assign new values to new vars.
"no-parameter-reassignment": true,
// Force the use of ES6-style imports instead of /// <reference path=> imports.
"no-reference": true,
// Do not allow type assertions that do nothing. This is a big warning that the developer may not understand the
// code currently being edited (they may be incorrectly handling a different type case that does not exist).
"no-unnecessary-type-assertion": true,
// Disallow nonsensical label usage.
"label-position": true,
// Disallows the (often typo) syntax if (var1 = var2). Replace with if (var2) { var1 = var2 }.
"no-conditional-assignment": true,
// Disallows constructors for primitive types (e.g. new Number('123'), though Number('123') is still allowed).
"no-construct": true,
// Do not allow super() to be called twice in a constructor.
"no-duplicate-super": true,
// Do not allow the same case to appear more than once in a switch block.
"no-duplicate-switch-case": true,
// Do not allow a variable to be declared more than once in the same block. Consider function parameters in this
// rule.
"no-duplicate-variable": [true, "check-parameters"],
// Disallows a variable definition in an inner scope from shadowing a variable in an outer scope. Developers should
// instead use a separate variable name.
"no-shadowed-variable": true,
// Empty blocks are almost never needed. Allow the one general exception: empty catch blocks.
"no-empty": [true, "allow-empty-catch"],
// Functions must either be handled directly (e.g. with a catch() handler) or returned to another function.
// This is a major source of errors in Cloud Functions and the team strongly recommends leaving this rule on.
"no-floating-promises": true,
// Do not allow any imports for modules that are not in package.json. These will almost certainly fail when
// deployed.
"no-implicit-dependencies": true,
// The 'this' keyword can only be used inside of classes.
"no-invalid-this": true,
// Do not allow strings to be thrown because they will not include stack traces. Throw Errors instead.
"no-string-throw": true,
// Disallow control flow statements, such as return, continue, break, and throw in finally blocks.
"no-unsafe-finally": true,
// Expressions must always return a value. Avoids common errors like const myValue = functionReturningVoid();
"no-void-expression": [true, "ignore-arrow-function-shorthand"],
// Disallow duplicate imports in the same file.
"no-duplicate-imports": true,
// -- Strong Warnings --
// These rules should almost never be needed, but may be included due to legacy code.
// They are left as a warning to avoid frustration with blocked deploys when the developer
// understand the warning and wants to deploy anyway.
// Warn when an empty interface is defined. These are generally not useful.
"no-empty-interface": { "severity": "warning" },
// Warn when an import will have side effects.
"no-import-side-effect": { "severity": "warning" },
// Warn when variables are defined with var. Var has subtle meaning that can lead to bugs. Strongly prefer const for
// most values and let for values that will change.
"no-var-keyword": { "severity": "warning" },
// Prefer === and !== over == and !=. The latter operators support overloads that are often accidental.
"triple-equals": { "severity": "warning" },
// Warn when using deprecated APIs.
"deprecation": { "severity": "warning" },
// -- Light Warnings --
// These rules are intended to help developers use better style. Simpler code has fewer bugs. These would be "info"
// if TSLint supported such a level.
// prefer for( ... of ... ) to an index loop when the index is only used to fetch an object from an array.
// (Even better: check out utils like .map if transforming an array!)
"prefer-for-of": { "severity": "warning" },
// Warns if function overloads could be unified into a single function with optional or rest parameters.
"unified-signatures": { "severity": "warning" },
// Prefer const for values that will not change. This better documents code.
"prefer-const": { "severity": "warning" },
// Multi-line object literals and function calls should have a trailing comma. This helps avoid merge conflicts.
"trailing-comma": { "severity": "warning" }
},
"linterOptions": {
"exclude": ["./generator/templates/**"]
},
"defaultSeverity": "error"
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,140 +0,0 @@
const express = require("express");
const bodyParser = require("body-parser");
const cors = require("cors");
import { asyncExecute } from "./compiler/terminal";
import { createStreamLogger } from "./utils";
import generateConfig from "./compiler";
import { auth } from "./firebaseConfig";
import meta from "./package.json";
import { commandErrorHandler, logErrorToDB } from "./utils";
import firebase from "firebase-admin";
const app = express();
const jsonParser = bodyParser.json();
app.use(cors());
app.get("/", async (req: any, res: any) => {
res.send(`Firetable cloud function builder version ${meta.version}`);
});
app.post("/", jsonParser, async (req: any, res: any) => {
let user: firebase.auth.UserRecord;
const userToken = req?.body?.token;
if (!userToken) {
console.log("missing auth token");
res.send({
success: false,
reason: "missing auth token",
});
return;
}
try {
const decodedToken = await auth.verifyIdToken(userToken);
const uid = decodedToken.uid;
user = await auth.getUser(uid);
const roles = user?.customClaims?.roles;
if (!roles || !Array.isArray(roles) || !roles?.includes("ADMIN")) {
await logErrorToDB({
errorDescription: `user is not admin`,
user,
});
res.send({
success: false,
reason: `user is not admin`,
});
return;
}
console.log("successfully authenticated");
} catch (error) {
await logErrorToDB({
errorDescription: `error verifying auth token: ${error}`,
user,
});
res.send({
success: false,
reason: `error verifying auth token: ${error}`,
});
return;
}
const configPath = req?.body?.configPath;
console.log("configPath:", configPath);
if (!configPath) {
await logErrorToDB({
errorDescription: `Invalid configPath (${configPath})`,
user,
});
res.send({
success: false,
reason: "invalid configPath",
});
}
const streamLogger = await createStreamLogger(configPath);
await streamLogger.info("streamLogger created");
const success = await generateConfig(configPath, user, streamLogger);
if (!success) {
await streamLogger.error("generateConfig failed to complete");
await streamLogger.fail();
res.send({
success: false,
reason: `generateConfig failed to complete`,
});
return;
}
await streamLogger.info("generateConfig success");
let hasEnvError = false;
if (!process.env._PROJECT_ID) {
await logErrorToDB(
{
errorDescription: `Invalid env: _PROJECT_ID (${process.env._PROJECT_ID})`,
user,
},
streamLogger
);
hasEnvError = true;
}
if (hasEnvError) {
await streamLogger.error("Invalid env:_PROJECT_ID");
await streamLogger.fail();
res.send({
success: false,
reason: "Invalid env:_PROJECT_ID",
});
return;
}
await asyncExecute(
`cd build/functions; \
yarn install`,
commandErrorHandler({ user }, streamLogger)
);
await asyncExecute(
`cd build/functions; \
yarn deployFT \
--project ${process.env._PROJECT_ID} \
--only functions`,
commandErrorHandler({ user }, streamLogger)
);
await streamLogger.end();
res.send({
success: true,
});
});
const port = process.env.PORT || 8080;
app.listen(port, () => {
console.log(
`Firetable cloud function builder ${meta.version}: listening on port ${port}`
);
});

View File

@@ -1,34 +0,0 @@
{
"name": "ft-functions-builder",
"description": "Manages the build and deployment of Firetable cloud functions",
"version": "1.0.0",
"private": true,
"main": "index.js",
"scripts": {
"start": "yarn build && node build",
"build": "rm -rf build && tsc --project ./ && cp -r functions build && cp -r sparksLib build",
"deploy": "./deploy.sh"
},
"engines": {
"node": "14"
},
"dependencies": {
"body-parser": "^1.19.0",
"cors": "^2.8.5",
"express": "^4.17.1",
"firebase-admin": "^9.2.0",
"firebase-functions": "^3.11.0"
},
"devDependencies": {
"@types/express": "^4.17.11",
"@types/node": "^14.14.33",
"firebase-tools": "^8.7.0",
"husky": "^4.2.5",
"js-beautify": "^1.13.0",
"prettier": "^2.1.1",
"pretty-quick": "^3.0.0",
"ts-node": "^9.1.1",
"tslint": "^6.1.0",
"typescript": "^4.2.3"
}
}

View File

@@ -1,109 +0,0 @@
export const dependencies = {
algoliasearch: "^4.8.3",
};
const get = (obj, path, defaultValue = undefined) => {
const travel = (regexp) =>
String.prototype.split
.call(path, regexp)
.filter(Boolean)
.reduce(
(res, key) => (res !== null && res !== undefined ? res[key] : res),
obj
);
const result = travel(/[,[\]]+?/) || travel(/[,[\].]+?/);
return result === undefined || result === obj ? defaultValue : result;
};
const filterSnapshot = (
field: { docPath: string; snapshot: any },
preservedKeys: string[]
) => {
return {
docPath: field.docPath,
...preservedKeys.reduce((acc: any, currentKey: string) => {
const value = get(field.snapshot, currentKey);
if (value) {
return { ...acc, snapshot: { [currentKey]: value, ...acc.snapshot } };
} else return acc;
}, {}),
};
};
// returns object of fieldsToSync
const rowReducer = (fieldsToSync, row) =>
fieldsToSync.reduce(
(
acc: any,
curr: string | { fieldName: string; snapshotFields: string[] }
) => {
if (typeof curr === "string") {
if (row[curr] && typeof row[curr].toDate === "function") {
return {
...acc,
[curr]: row[curr].toDate().getTime() / 1000,
};
} else if (row[curr] !== undefined || row[curr] !== null) {
return { ...acc, [curr]: row[curr] };
} else {
return acc;
}
} else {
if (row[curr.fieldName] && curr.snapshotFields) {
return {
...acc,
[curr.fieldName]: row[curr.fieldName].map((snapshot) =>
filterSnapshot(snapshot, curr.snapshotFields)
),
};
} else {
return acc;
}
}
},
{}
);
const significantDifference = (fieldsToSync, change) => {
const beforeData = change.before.data();
const afterData = change.after.data();
return fieldsToSync.reduce((acc, field) => {
const key = typeof field === "string" ? field : field.fieldName;
if (JSON.stringify(beforeData[key]) !== JSON.stringify(afterData[key]))
return true;
else return acc;
}, false);
};
const algoliaIndex = async (data, sparkContext) => {
const { row, objectID, index, fieldsToSync } = data;
const { triggerType, change } = sparkContext;
const record = rowReducer(fieldsToSync, row);
const algoliasearch = require("algoliasearch");
const { getSecret } = require("../utils");
const { appId, adminKey } = await getSecret("algolia");
console.log(`algolia app id : ${appId}`);
const client = algoliasearch(appId, adminKey);
const _index = client.initIndex(index); // initialize algolia index
switch (triggerType) {
case "delete":
await _index.deleteObject(objectID);
break;
case "update":
if (
significantDifference([...fieldsToSync, "_ft_forcedUpdateAt"], change)
) {
_index.saveObject({ ...record, objectID });
}
break;
case "create":
await _index.saveObject({ ...record, objectID });
break;
default:
break;
}
return true;
};
export default algoliaIndex;

View File

@@ -1,11 +0,0 @@
export const dependencies = {
"node-fetch": "2.6.1",
};
const api = async (args) => {
const { body, url, method, callback } = args;
const fetch = require("node-fetch");
return fetch(url, { method: method, body: body })
.then((res) => res.json())
.then((json) => callback(json));
};
export default api;

View File

@@ -1,424 +0,0 @@
export const dependencies = {
"@google-cloud/bigquery": "^5.5.0",
};
const get = (obj, path, defaultValue = undefined) => {
const travel = (regexp) =>
String.prototype.split
.call(path, regexp)
.filter(Boolean)
.reduce(
(res, key) => (res !== null && res !== undefined ? res[key] : res),
obj
);
const result = travel(/[,[\]]+?/) || travel(/[,[\].]+?/);
return result === undefined || result === obj ? defaultValue : result;
};
const filterSnapshot = (
field: { docPath: string; snapshot: any },
preservedKeys: string[]
) => {
return {
docPath: field.docPath,
...preservedKeys.reduce((acc: any, currentKey: string) => {
const value = get(field.snapshot, currentKey);
if (value) {
return { ...acc, snapshot: { [currentKey]: value, ...acc.snapshot } };
} else return acc;
}, {}),
};
};
// returns object of fieldsToSync
const rowReducer = (fieldsToSync, row) =>
fieldsToSync.reduce(
(
acc: any,
curr: string | { fieldName: string; snapshotFields: string[] }
) => {
if (typeof curr === "string") {
if (row[curr] && typeof row[curr].toDate === "function") {
return {
...acc,
[curr]: row[curr].toDate().getTime() / 1000,
};
} else if (row[curr] !== undefined || row[curr] !== null) {
return { ...acc, [curr]: row[curr] };
} else {
return acc;
}
} else {
if (row[curr.fieldName] && curr.snapshotFields) {
return {
...acc,
[curr.fieldName]: row[curr.fieldName].map((snapshot) =>
filterSnapshot(snapshot, curr.snapshotFields)
),
};
} else {
return acc;
}
}
},
{}
);
const significantDifference = (fieldsToSync, change) => {
const beforeData = change.before.data();
const afterData = change.after.data();
return fieldsToSync.reduce((acc, field) => {
const key = typeof field === "string" ? field : field.fieldName;
if (JSON.stringify(beforeData[key]) !== JSON.stringify(afterData[key]))
return true;
else return acc;
}, false);
};
const transformToSQLData = (value: any, ftType: string) => {
if (value === null || value === undefined) {
return {
value: `null`,
type: "STRING",
};
}
const sanitise = (x: string) =>
x?.replace?.(/\"/g, '\\"')?.replace?.(/\n/g, "\\n") ?? "";
switch (ftType) {
case "SIMPLE_TEXT":
case "LONG_TEXT":
case "EMAIL":
case "PHONE_NUMBER":
case "CODE":
case "RICH_TEXT":
case "ID":
case "SINGLE_SELECT":
case "URL":
return {
value: `"${sanitise(value)}"`,
type: "STRING",
};
case "JSON": // JSON
case "FILE": // JSON
case "IMAGE": // JSON
case "USER": // JSON
case "COLOR": // JSON
case "DOCUMENT_SELECT":
case "SERVICE_SELECT":
case "ACTION":
case "AGGREGATE":
case "MULTI_SELECT": // array
return {
value: `"${sanitise(JSON.stringify(value))}"`,
type: "STRING",
};
case "CHECK_BOX":
return {
value: value ? `true` : `false`,
type: "BOOLEAN",
};
case "NUMBER":
case "PERCENTAGE":
case "RATING":
case "SLIDER":
return {
value: Number(value),
type: "NUMERIC",
};
case "DATE":
case "DATE_TIME":
case "DURATION":
if (!value?.toDate) {
return {
value: `null`,
type: "TIMESTAMP",
};
}
return {
value: `timestamp("${value?.toDate?.()}")`,
type: "TIMESTAMP",
};
case "LAST":
case "STATUS":
case "SUB_TABLE":
default:
// unknown or meaningless to sync
return {
value: `null`,
type: "STRING",
};
}
};
const transformToSQLValue = (ftValue: any, ftType: string) => {
const { value } = transformToSQLData(ftValue, ftType);
return value;
};
const transformToSQLType = (ftType: string) => {
const { type } = transformToSQLData("", ftType);
return type;
};
const bigqueryIndex = async (payload, sparkContext) => {
const { objectID, index, fieldsToSync, projectID, datasetLocation } = payload;
const { triggerType, change, fieldTypes } = sparkContext;
const record = rowReducer(fieldsToSync, sparkContext.row);
const { BigQuery } = require("@google-cloud/bigquery");
const bigquery = new BigQuery();
const _projectID = projectID ?? process.env.GCLOUD_PROJECT;
const tableFullName = `${_projectID}.firetable.${index}`;
console.log(
`projectID: ${_projectID}, index: ${index}, tableFullName: ${tableFullName}`
);
// create dataset with exact name "firetable" if not exists
async function preprocessDataset() {
const dataset = bigquery.dataset("firetable", {
location: datasetLocation ?? "US",
});
const res = await dataset.exists();
const exists = res[0];
if (!exists) {
console.log("Dataset 'firetable' does not exist, creating dataset...");
await dataset.create();
console.log("Dataset 'firetable' created.");
} else {
console.log("Dataset 'firetable' exists.");
}
}
async function preprocessTable() {
const dataset = bigquery.dataset("firetable");
const table = dataset.table(index);
const res = await table.exists();
const exists = res[0];
if (!exists) {
console.log(
`Table '${index}' does not exist in dataset 'firetable', creating dataset...`
);
await table.create();
console.log(`Table '${index}' created in dataset 'firetable'.`);
} else {
console.log(`Table ${index} exists in 'firetable'.`);
}
}
async function preprocessSchema() {
const dataset = bigquery.dataset("firetable");
const table = dataset.table(index);
const generatedTypes = Object.keys(fieldTypes)
.filter((field) => fieldsToSync.includes(field))
.reduce((acc, cur) => {
return {
[cur]: transformToSQLType(fieldTypes[cur]),
...acc,
};
}, {});
const generatedSchema = [
{ name: "objectID", type: "STRING", mode: "REQUIRED" },
...Object.keys(generatedTypes).map((key) => {
return {
name: key,
type: generatedTypes[key],
mode: "NULLABLE",
};
}),
];
const pushSchema = async () => {
console.log("pushing schema:", generatedSchema);
const metadata = {
schema: generatedSchema,
};
await table.setMetadata(metadata);
console.log("schema pushed.");
};
const existingRes = await table.getMetadata();
const existingSchema = existingRes[0].schema?.fields;
if (!existingSchema) {
console.log("Existing schema does not exist, pushing schema...");
await pushSchema();
return;
}
// check if schema update is needed
const objectIDFilter = (field) => field.name !== "objectID";
const schemaIdentical =
Object.keys(generatedTypes).length ===
existingSchema.filter(objectIDFilter).length &&
existingSchema
.filter(objectIDFilter)
.every((field) => generatedTypes[field.name] === field.type);
if (schemaIdentical) {
// no change to schema
console.log("Existing schema detected, no update needeed.");
return;
}
// check schema compatibility (only new field is accpted)
const compatible =
Object.keys(generatedTypes).length >
existingSchema.filter(objectIDFilter).length &&
existingSchema
.filter(objectIDFilter)
.filter((field) => Object.keys(generatedTypes).includes(field.name))
.every((field) => generatedTypes[field.name] === field.type);
if (!compatible) {
const errorMessage =
"New update to field types is not compatible with existing schema. Please manually remove the current bigquery table or update spark index";
console.log(errorMessage);
throw errorMessage;
} else {
console.log(
"New field types detected and it is compatible with current schema."
);
}
// push schema
await pushSchema();
}
// return if the objectID exists in bool
async function exist() {
const query = `SELECT objectID FROM ${tableFullName}
WHERE objectID="${objectID}"
;`;
console.log(query);
const res = await bigquery.query(query);
const rows = res?.[0];
return !!rows?.length;
}
function getTypeKnownRecord(data) {
const knownTypes = Object.keys(fieldTypes);
const givenKeys = Object.keys(data);
const knownKeys = givenKeys.filter((key) => knownTypes.includes(key));
const unknownKeys = givenKeys.filter((key) => !knownTypes.includes(key));
const knownRecord = Object.keys(data)
.filter((key) => knownKeys.includes(key))
.reduce((obj, key) => {
return {
...obj,
[key]: data[key],
};
}, {});
if (unknownKeys?.length > 0) {
console.log(
"The following fields do not exist in Firetable and are ignored.",
unknownKeys
);
}
return knownRecord;
}
async function insert(data) {
const keys = Object.keys(data).join(",");
const values = Object.keys(data)
.map((key) => transformToSQLValue(data[key], fieldTypes[key]))
.join(",");
const query = `INSERT INTO ${tableFullName}
(objectID, ${keys})
VALUES ("${objectID}", ${values})
;`;
console.log(query);
await executeQuery(query);
}
// execute a query, if rate limited, sleep and try again until success
// ATTENTION: cloud function might timeout the function execution time at 60,000ms
const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
async function executeQuery(query, delayDepth = 1) {
try {
const res = await bigquery.query(query);
console.log(res);
} catch (error) {
if (
error?.errors?.length === 1 &&
(error?.errors?.[0]?.reason === "rateLimitExceeded" ||
error?.errors?.[0]?.reason === "quotaExceeded")
) {
const delay = Math.round(
Math.floor(Math.random() * 3_000 * (delayDepth % 20) + 1000)
);
console.log(`API rate limited, try again in ${delay}ms`);
await sleep(delay);
await executeQuery(query, delayDepth + 1);
} else {
console.log(error?.errors ?? error);
}
}
if (delayDepth === 1) {
console.log("Query finished.");
}
}
async function update(data) {
const values = Object.keys(data)
.map((key) => `${key}=${transformToSQLValue(data[key], fieldTypes[key])}`)
.join(",");
const query = `UPDATE ${tableFullName}
SET ${values}
WHERE objectID="${objectID}"
;`;
console.log(query);
await executeQuery(query);
}
async function insertOrUpdate(data) {
const objectExists = await exist();
if (objectExists) {
await update(data);
} else {
await insert(data);
}
}
async function remove() {
const query = `DELETE FROM ${tableFullName}
WHERE objectID="${objectID}"
;`;
console.log(query);
await executeQuery(query);
}
// preprocess before starting index logic
await preprocessDataset();
await preprocessTable();
await preprocessSchema();
// only proceed with fields that have known types
const typeKnownRecord = getTypeKnownRecord(record);
switch (triggerType) {
case "delete":
await remove();
break;
case "update":
if (
significantDifference([...fieldsToSync, "_ft_forcedUpdateAt"], change)
) {
await insertOrUpdate(typeKnownRecord);
} else {
console.log("significantDifference is false, no update needed.");
}
break;
case "create":
await insertOrUpdate(typeKnownRecord);
break;
default:
break;
}
return true;
};
export default bigqueryIndex;

View File

@@ -1,56 +0,0 @@
export const dependencies = {};
// returns object of fieldsToSync
const rowReducer = (fieldsToSync, row) =>
fieldsToSync.reduce((acc: any, curr: string) => {
if (row[curr] !== undefined && row[curr] !== null)
return { ...acc, [curr]: row[curr] };
else return acc;
}, {});
const significantDifference = (fieldsToSync, change) => {
const beforeData = change.before.data();
const afterData = change.after.data();
return fieldsToSync.reduce((acc, field) => {
if (JSON.stringify(beforeData[field]) !== JSON.stringify(afterData[field]))
return true;
else return acc;
}, false);
};
const docSync = async (data, sparkContext) => {
const { row, targetPath, fieldsToSync } = data;
const { triggerType, change } = sparkContext;
const record = rowReducer(fieldsToSync, row);
const { db } = require("../firebaseConfig");
switch (triggerType) {
case "delete":
try {
await db.doc(targetPath).delete();
}
catch (error) {
console.log(error);
}
break;
case "update":
if (
significantDifference([...fieldsToSync, "_ft_forcedUpdateAt"], change)
) {
try {
await db.doc(targetPath).update(record);
} catch (error) {
console.log(error);
}
}
break;
case "create":
await db.doc(targetPath).set(record, { merge: true });
break;
default:
break;
}
return true;
};
export default docSync;

View File

@@ -1,33 +0,0 @@
export const dependencies = {};
const significantDifference = (fieldsToSync, change) => {
const beforeData = change.before.data();
const afterData = change.after.data();
return fieldsToSync.reduce((acc, field) => {
if (JSON.stringify(beforeData[field]) !== JSON.stringify(afterData[field]))
return true;
else return acc;
}, false);
};
const historySnapshot = async (data, sparkContext) => {
const { trackedFields } = data;
const { triggerType, change } = sparkContext;
if (
(triggerType === "update" &&
significantDifference(trackedFields, change)) ||
triggerType === "delete"
) {
try {
await change.before.ref.collection("historySnapshots").add({
...change.before.data(),
archivedAt: new Date(),
archiveEvent: triggerType,
});
} catch (error) {
console.log(error);
}
}
return true;
};
export default historySnapshot;

View File

@@ -1,25 +0,0 @@
export const dependencies = {
"mailchimp-api-v3": "1.15.0",
};
// method : 'get|post|put|patch|delete'
// path :`/lists/${listId}/members`
const mailchimp = async (data) => {
const { path, method, path_params, body, query } = data;
const mailchimpLib = require("mailchimp-api-v3");
const utilFns = require("../utils");
const mailchimpKey = await utilFns.getSecret("mailchimp");
const _mailchimp = new mailchimpLib(mailchimpKey);
return new Promise((resolve, reject) => {
_mailchimp.request(
{
method,
path,
path_params,
body,
query,
},
resolve
);
});
};
export default mailchimp;

View File

@@ -1,131 +0,0 @@
export const dependencies = {
meilisearch: "^0.18.1",
};
const get = (obj, path, defaultValue = undefined) => {
const travel = (regexp) =>
String.prototype.split
.call(path, regexp)
.filter(Boolean)
.reduce(
(res, key) => (res !== null && res !== undefined ? res[key] : res),
obj
);
const result = travel(/[,[\]]+?/) || travel(/[,[\].]+?/);
return result === undefined || result === obj ? defaultValue : result;
};
const filterSnapshot = (
field: { docPath: string; snapshot: any },
preservedKeys: string[]
) => {
return {
docPath: field.docPath,
...preservedKeys.reduce((acc: any, currentKey: string) => {
const value = get(field.snapshot, currentKey);
if (value) {
return { ...acc, snapshot: { [currentKey]: value, ...acc.snapshot } };
} else return acc;
}, {}),
};
};
// returns object of fieldsToSync
const rowReducer = (fieldsToSync, row) =>
fieldsToSync.reduce(
(
acc: any,
curr: string | { fieldName: string; snapshotFields: string[] }
) => {
if (typeof curr === "string") {
if (row[curr] && typeof row[curr].toDate === "function") {
return {
...acc,
[curr]: row[curr].toDate().getTime() / 1000,
};
} else if (row[curr] !== undefined || row[curr] !== null) {
return { ...acc, [curr]: row[curr] };
} else {
return acc;
}
} else {
if (row[curr.fieldName] && curr.snapshotFields) {
return {
...acc,
[curr.fieldName]: row[curr.fieldName].map((snapshot) =>
filterSnapshot(snapshot, curr.snapshotFields)
),
};
} else {
return acc;
}
}
},
{}
);
const significantDifference = (fieldsToSync, change) => {
const beforeData = change.before.data();
const afterData = change.after.data();
return fieldsToSync.reduce((acc, field) => {
const key = typeof field === "string" ? field : field.fieldName;
if (JSON.stringify(beforeData[key]) !== JSON.stringify(afterData[key]))
return true;
else return acc;
}, false);
};
const meiliIndex = async (data, sparkContext) => {
const { row, objectID, index, fieldsToSync } = data;
const { triggerType, change } = sparkContext;
const record = rowReducer(fieldsToSync, row);
const { MeiliSearch } = require("meilisearch");
const { getSecret } = require("../utils");
const meiliConfig = await getSecret("meilisearch");
console.log(`meilisearch host : ${meiliConfig.host}, index: ${index}`);
const client = new MeiliSearch(meiliConfig);
const _index = client.index(index);
let res;
switch (triggerType) {
case "delete":
console.log("Deleting...");
res = await _index.deleteDocument(objectID);
break;
case "update":
if (
significantDifference([...fieldsToSync, "_ft_forcedUpdateAt"], change)
) {
console.log("Updating...");
res = await _index.updateDocuments([
{
id: objectID,
...record,
},
]);
}
break;
case "create":
console.log("Creating...");
res = await _index.addDocuments([
{
id: objectID,
...record,
},
]);
break;
default:
console.log("No match.");
break;
}
console.log("Checking status...");
if (res?.updateId) {
console.log("Querying status...");
const status = await client.index(index).getUpdateStatus(res.updateId);
console.log("Status:", status);
}
return true;
};
export default meiliIndex;

View File

@@ -1,13 +0,0 @@
export const dependencies = {
"@sendgrid/mail": "^7.4.2",
};
const sendgridEmail = async (data) => {
const { msg } = data;
const sgMail = require("@sendgrid/mail");
const utilFns = require("../utils");
sgMail.setSubstitutionWrappers("{{", "}}");
const sendgridKey = await utilFns.getSecret("sendgrid");
sgMail.setApiKey(sendgridKey);
return sgMail.send(msg);
};
export default sendgridEmail;

View File

@@ -1,92 +0,0 @@
/*
{ channels?:string[], emails?:string[], text?:string, blocks?:any,attachments?:any }
*/
export const dependencies = {
"@slack/web-api": "^6.0.0",
};
const initSlack = async () => {
const { getSecret } = require("../utils");
const { token } = await getSecret("slack");
const { WebClient } = require("@slack/web-api");
return new WebClient(token);
};
const messageByChannel = (slackClient) => async ({
text,
channel,
blocks,
attachments,
}: {
channel: string;
text: string;
blocks: any[];
attachments: any[];
}) =>
await slackClient.chat.postMessage({
text,
channel,
blocks,
attachments,
});
const messageByEmail = (slackClient) => async ({
email,
text,
blocks,
attachments,
}: {
email: string;
text: string;
blocks: any[];
attachments: any[];
}) => {
try {
const user = await slackClient.users.lookupByEmail({ email });
if (user.ok) {
const channel = user.user.id;
return await messageByChannel(slackClient)({
text,
blocks,
attachments,
channel,
});
} else {
return await false;
}
} catch (error) {
console.log(`${error} maybe${email} is not on slack`);
console.log(`${error}`);
return await false;
}
};
const slackMessage = async (data) => {
const slackClient = await initSlack();
const { channels, emails, text, blocks, attachments } = data;
if (channels) {
const messages = channels.map((channel: string) =>
messageByChannel(slackClient)({
text,
blocks: blocks ?? [],
channel,
attachments,
})
);
await Promise.all(messages);
}
if (emails) {
const messages = emails.map((email: string) =>
messageByEmail(slackClient)({
text: text,
blocks: blocks ?? [],
email,
attachments,
})
);
await Promise.all(messages);
}
return true;
};
export default slackMessage;

View File

@@ -1,7 +0,0 @@
export const dependencies = {};
const task = async (args) => {
const { promises } = args;
const result = await Promise.allSettled(Array.isArray(promises)?promises:[promises])
return result
};
export default task;

View File

@@ -1,13 +0,0 @@
export const dependencies = {
twilio: "3.56.0",
};
const twilioMessage = async (data) => {
const utilFns = require("../utils");
const { accountSid, authToken } = await utilFns.getSecret("twilio");
const client = require("twilio")(accountSid, authToken);
const { body, from, to } = data;
return client.messages
.create({ body, from, to })
.then((message) => console.log(message.sid));
};
export default twilioMessage;

View File

@@ -1,20 +0,0 @@
{
"compilerOptions": {
"target": "es6",
"module": "commonjs",
"rootDir": "./",
"outDir": "./build",
"esModuleInterop": true,
"strict": true,
"noImplicitReturns": true,
"noUnusedLocals": false,
"sourceMap": true,
"noImplicitAny": false,
"resolveJsonModule": true,
"lib": ["ESNext"],
"strictNullChecks": false
},
"compileOnSave": true,
"exclude": ["functions", "build"],
"include": ["*.ts", "firebase.json", "sparksLib"]
}

View File

@@ -1,198 +0,0 @@
import { db } from "./firebaseConfig";
import admin from "firebase-admin";
function firetableUser(user: admin.auth.UserRecord) {
return {
displayName: user?.displayName,
email: user?.email,
uid: user?.uid,
emailVerified: user?.emailVerified,
photoURL: user?.photoURL,
timestamp: new Date(),
};
}
async function insertErrorRecordToDB(errorRecord: object) {
await db.collection("_FT_ERRORS").add(errorRecord);
}
async function insertErrorToStreamer(errorRecord: object, streamLogger) {
let errorString = "";
for (const key of [
"command",
"description",
"functionConfigTs",
"sparksConfig",
"stderr",
"errorStackTrace",
]) {
const value = errorRecord[key];
if (value) {
errorString += `\n\n${key}: ${value}`;
}
}
await streamLogger.error(errorString);
}
function commandErrorHandler(
meta: {
user: admin.auth.UserRecord;
description?: string;
functionConfigTs?: string;
sparksConfig?: string;
},
streamLogger
) {
return async function (error, stdout, stderr) {
await streamLogger.info(stdout);
if (!error) {
return;
}
const errorRecord = {
errorType: "commandError",
ranBy: firetableUser(meta.user),
createdAt: admin.firestore.FieldValue.serverTimestamp(),
stdout: stdout ?? "",
stderr: stderr ?? "",
errorStackTrace: error?.stack ?? "",
command: error?.cmd ?? "",
description: meta?.description ?? "",
functionConfigTs: meta?.functionConfigTs ?? "",
sparksConfig: meta?.sparksConfig ?? "",
};
await insertErrorToStreamer(errorRecord, streamLogger);
insertErrorRecordToDB(errorRecord);
};
}
async function logErrorToDB(
data: {
errorDescription: string;
errorExtraInfo?: string;
errorTraceStack?: string;
user: admin.auth.UserRecord;
sparksConfig?: string;
},
streamLogger?
) {
console.error(data.errorDescription);
const errorRecord = {
errorType: "codeError",
ranBy: firetableUser(data.user),
description: data.errorDescription,
createdAt: admin.firestore.FieldValue.serverTimestamp(),
sparksConfig: data?.sparksConfig ?? "",
errorExtraInfo: data?.errorExtraInfo ?? "",
errorStackTrace: data?.errorTraceStack ?? "",
};
if (streamLogger) {
await insertErrorToStreamer(errorRecord, streamLogger);
}
insertErrorRecordToDB(errorRecord);
}
function parseSparksConfig(
sparks: string | undefined,
user: admin.auth.UserRecord,
streamLogger
) {
if (sparks) {
try {
// remove leading "sparks.config(" and trailing ")"
return sparks
.replace(/^(\s*)sparks.config\(/, "")
.replace(/\);?\s*$/, "");
} catch (error) {
logErrorToDB(
{
errorDescription: "Sparks is not wrapped with sparks.config",
errorTraceStack: error.stack,
user,
sparksConfig: sparks,
},
streamLogger
);
}
}
return "[]";
}
async function createStreamLogger(tableConfigPath: string) {
const startTimeStamp = Date.now();
const fullLog: {
log: string;
level: "info" | "error";
timestamp: number;
}[] = [];
const logRef = db
.doc(tableConfigPath)
.collection("ftBuildLogs")
.doc(startTimeStamp.toString());
await logRef.set({ startTimeStamp, status: "BUILDING" });
console.log(
`streamLogger created. tableConfigPath: ${tableConfigPath}, startTimeStamp: ${startTimeStamp}`
);
return {
info: async (log: string) => {
console.log(log);
fullLog.push({
log,
level: "info",
timestamp: Date.now(),
});
await logRef.update({
fullLog,
});
},
error: async (log: string) => {
console.error(log);
fullLog.push({
log,
level: "error",
timestamp: Date.now(),
});
await logRef.update({
fullLog,
});
},
end: async () => {
const logsDoc = await logRef.get();
const errorLog = logsDoc
.get("fullLog")
.filter((log) => log.level === "error");
if (errorLog.length !== 0) {
console.log("streamLogger marked as FAIL");
await logRef.update({
status: "FAIL",
failTimeStamp: Date.now(),
});
} else {
console.log("streamLogger marked as SUCCESS");
await logRef.update({
status: "SUCCESS",
successTimeStamp: Date.now(),
});
}
},
fail: async () => {
console.log("streamLogger marked as FAIL");
await logRef.update({
status: "FAIL",
failTimeStamp: Date.now(),
});
},
};
}
export {
commandErrorHandler,
logErrorToDB,
parseSparksConfig,
createStreamLogger,
};

File diff suppressed because it is too large Load Diff

View File

@@ -1,20 +1,6 @@
<!DOCTYPE html>
<html lang="en">
<head>
<script
async
src="https://www.googletagmanager.com/gtag/js?id=UA-140647798-6"
></script>
<script>
window.dataLayer = window.dataLayer || [];
function gtag() {
dataLayer.push(arguments);
}
gtag("js", new Date());
gtag("config", "UA-140647798-6");
</script>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<meta name="theme-color" content="#000000" />

View File

@@ -20,6 +20,7 @@ import routes from "constants/routes";
import AuthView from "pages/Auth";
import SignOutView from "pages/Auth/SignOut";
import TestView from "pages/Test";
import { analytics } from "analytics";
const AuthSetupGuidePage = lazy(
() => import("pages/Auth/SetupGuide" /* webpackChunkName: "AuthSetupGuide" */)
);

17
www/src/analytics.ts Normal file
View File

@@ -0,0 +1,17 @@
import firebase from "firebase/app";
import "firebase/analytics";
var firebaseConfig = {
apiKey: "AIzaSyBwgfb-GmsCZ_d4B5kRElzWMoPWwjdKioM",
authDomain: "firetable-service.firebaseapp.com",
projectId: "firetable-service",
storageBucket: "firetable-service.appspot.com",
messagingSenderId: "831080389",
appId: "1:831080389:web:ab0bbacccdd887ab3b6dac",
measurementId: "G-K97G7PBDNT",
};
// Initialize Firebase
const firetableServiceApp = firebase.initializeApp(
firebaseConfig,
"firetable-service"
);
export const analytics = firebase.analytics(firetableServiceApp);

View File

@@ -8,7 +8,7 @@ import Modal from "components/Modal";
import { FieldType } from "constants/fields";
import FieldsDropdown from "./FieldsDropdown";
import { getFieldProp } from "components/fields";
import { analytics } from "analytics";
const useStyles = makeStyles((theme) =>
createStyles({
helperText: {
@@ -122,6 +122,10 @@ export default function FormDialog({
...data.initializeColumn,
});
} else handleClose();
analytics.logEvent("new_column", {
type,
origin: window.location.hostname,
});
},
disabled: !columnLabel || !fieldKey || !type,
children: requireConfiguration ? "Next" : "Add",

View File

@@ -3,7 +3,7 @@ import { useState } from "react";
import { IMenuModalProps } from ".";
import Modal from "components/Modal";
import FieldsDropdown from "./FieldsDropdown";
import { analytics } from "analytics";
export default function FormDialog({
fieldName,
type,
@@ -32,6 +32,11 @@ export default function FormDialog({
onClick: () => {
handleSave(fieldName, { type: newType });
handleClose();
analytics.logEvent("update_column_type", {
newType,
prevType: type,
origin: window.location.hostname,
});
},
children: "Update",
},

View File

@@ -4,4 +4,5 @@ export default {
databaseURL: `https://${process.env.REACT_APP_FIREBASE_PROJECT_ID}.firebaseio.com`,
projectId: process.env.REACT_APP_FIREBASE_PROJECT_ID,
storageBucket: `${process.env.REACT_APP_FIREBASE_PROJECT_ID}.appspot.com`,
appId: "x",
};