Merge branch 'develop' into dependabot/npm_and_yarn/cloud_functions/functions/lodash-4.17.21

This commit is contained in:
Sidney Alcantara
2021-06-29 14:07:10 +10:00
committed by GitHub
176 changed files with 6624 additions and 9166 deletions

View File

@@ -1,31 +0,0 @@
steps:
- name: node:14.9.0
entrypoint: yarn
args: ["install"]
dir: "FT_functions/compiler"
- name: node:14.9.0
entrypoint: yarn
args:
- "compile"
- "${_SCHEMA_PATH}"
dir: "FT_functions/compiler"
- name: node:14.9.0
entrypoint: yarn
args: ["install"]
dir: "FT_functions/functions"
- name: node:14.9.0
entrypoint: yarn
args:
- "deployFT"
- "--project"
- "${_PROJECT_ID}"
- "--token"
- "${_FIREBASE_TOKEN}"
- "--only"
- "functions"
dir: "FT_functions/functions"
substitutions:
_PROJECT_ID: "project-id" # default value
options:
machineType: "N1_HIGHCPU_8"

View File

@@ -1,66 +0,0 @@
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
firebase-debug.log*
firebase-debug.*.log*
# Firebase cache
.firebase/
# Firebase config
# Uncomment this if you'd like others to create their own Firebase project.
# For a team working on the same Firebase project(s), it is recommended to leave
# it commented so all members can deploy to the same project(s) in .firebaserc.
# .firebaserc
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
# nyc test coverage
.nyc_output
# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (http://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variables file
.env

View File

@@ -1,28 +0,0 @@
import { addPackages, addSparkLib } from "./terminal";
const fs = require("fs");
import { generateConfigFromTableSchema } from "./loader";
async function asyncForEach(array: any[], callback: Function) {
for (let index = 0; index < array.length; index++) {
await callback(array[index], index, array);
}
}
generateConfigFromTableSchema(process.argv[2]).then(async () => {
const configFile = fs.readFileSync(
"../functions/src/functionConfig.ts",
"utf-8"
);
const requiredDependencies = configFile.match(
/(?<=(require\(("|'))).*?(?=("|')\))/g
);
if (requiredDependencies) {
await addPackages(requiredDependencies.map((p) => ({ name: p })));
}
const { sparksConfig } = require("../functions/src/functionConfig");
const requiredSparks = sparksConfig.map((s) => s.type);
console.log({ requiredSparks });
await asyncForEach(requiredSparks, async (s) => await addSparkLib(s));
});

View File

@@ -1,153 +0,0 @@
const fs = require("fs");
const beautify = require("js-beautify").js;
// Initialize Firebase Admin
import * as admin from "firebase-admin";
// Initialize Firebase Admin
//const serverTimestamp = admin.firestore.FieldValue.serverTimestamp;
admin.initializeApp();
//const serviceAccount = require("./antler-vc-firebase.json");
//admin.initializeApp({ credential: admin.credential.cert(serviceAccount) });
const db = admin.firestore();
export const generateConfigFromTableSchema = async (schemaDocPath) => {
const schemaDoc = await db.doc(schemaDocPath).get();
const schemaData = schemaDoc.data();
if (!schemaData) throw new Error("no schema found");
const derivativeColumns = Object.values(schemaData.columns).filter(
(col: any) => col.type === "DERIVATIVE"
);
const derivativesConfig = `[${derivativeColumns.reduce(
(acc, currColumn: any) => {
if (
!currColumn.config.listenerFields ||
currColumn.config.listenerFields.length === 0
)
throw new Error(
`${currColumn.key} derivative is missing listener fields`
);
if (currColumn.config.listenerFields.includes(currColumn.key))
throw new Error(
`${currColumn.key} derivative has its own key as a listener field`
);
return `${acc}{\nfieldName:'${
currColumn.key
}',evaluate:async ({row,ref,db,auth,utilFns}) =>{${
currColumn.config.script
}},\nlistenerFields:[${currColumn.config.listenerFields
.map((fieldKey) => `"${fieldKey}"`)
.join(",\n")}]},\n`;
},
""
)}]`;
const initializableColumns = Object.values(
schemaData.columns
).filter((col: any) => Boolean(col.config?.defaultValue));
console.log(JSON.stringify({ initializableColumns }));
const initializeConfig = `[${initializableColumns.reduce(
(acc, currColumn: any) => {
if (currColumn.config.defaultValue.type === "static") {
return `${acc}{\nfieldName:'${currColumn.key}',
type:"${currColumn.config.defaultValue.type}",
value:${
typeof currColumn.config.defaultValue.value === "string"
? `"${currColumn.config.defaultValue.value}"`
: currColumn.config.defaultValue.value
},
},\n`;
} else if (currColumn.config.defaultValue.type === "dynamic") {
return `${acc}{\nfieldName:'${currColumn.key}',
type:"${currColumn.config.defaultValue.type}",
script:async ({row,ref,db,auth,utilFns}) =>{${currColumn.config.defaultValue.script}},
},\n`;
} else {
return `${acc}{\nfieldName:'${currColumn.key}',
type:"${currColumn.config.defaultValue.type}"
},\n`;
}
},
""
)}]`;
const documentSelectColumns = Object.values(schemaData.columns).filter(
(col: any) => col.type === "DOCUMENT_SELECT" && col.config?.trackedFields
);
const documentSelectConfig = `[${documentSelectColumns.reduce(
(acc, currColumn: any) => {
return `${acc}{\nfieldName:'${
currColumn.key
}',\ntrackedFields:[${currColumn.config.trackedFields
.map((fieldKey) => `"${fieldKey}"`)
.join(",\n")}]},\n`;
},
""
)}]`;
const sparksConfig = schemaData.sparks ? schemaData.sparks : "[]";
const collectionType = schemaDocPath.includes("subTables")
? "subCollection"
: schemaDocPath.includes("groupSchema")
? "groupCollection"
: "collection";
let collectionId = "";
let functionName = "";
let triggerPath = "";
switch (collectionType) {
case "collection":
collectionId = schemaDocPath.split("/").pop();
functionName = `"${collectionId}"`;
triggerPath = `"${collectionId}/{docId}"`;
break;
case "subCollection":
let pathParentIncrement = 0;
triggerPath =
'"' +
schemaDocPath
.replace("_FIRETABLE_/settings/schema/", "")
.replace(/subTables/g, function () {
pathParentIncrement++;
return `{parentDoc${pathParentIncrement}}`;
}) +
"/{docId}" +
'"';
functionName =
'"' +
schemaDocPath
.replace("_FIRETABLE_/settings/schema/", "")
.replace(/\/subTables\//g, "_") +
'"';
break;
case "groupCollection":
collectionId = schemaDocPath.split("/").pop();
const triggerDepth = schemaData.triggerDepth
? schemaData.triggerDepth
: 1;
triggerPath = "";
for (let i = 1; i <= triggerDepth; i++) {
triggerPath = triggerPath + `{parentCol${i}}/{parentDoc${i}}/`;
}
triggerPath = '"' + triggerPath + collectionId + "/" + "{docId}" + '"';
functionName = `"CG_${collectionId}${
triggerDepth > 1 ? `_D${triggerDepth}` : ""
}"`;
break;
default:
break;
}
const exports = {
triggerPath,
functionName: functionName.replace(/-/g, "_"),
derivativesConfig,
initializeConfig,
documentSelectConfig,
sparksConfig,
};
const fileData = Object.keys(exports).reduce((acc, currKey) => {
return `${acc}\nexport const ${currKey} = ${exports[currKey]}`;
}, ``);
fs.writeFileSync(
"../functions/src/functionConfig.ts",
beautify(fileData, { indent_size: 2 })
);
};

View File

@@ -1,30 +0,0 @@
{
"name": "firetable-functions-compiler",
"scripts": {
"compile": "ts-node index"
},
"engines": {
"node": "14"
},
"main": "lib/index.js",
"dependencies": {
"firebase-admin": "^9.2.0"
},
"devDependencies": {
"@types/node": "^14.14.11",
"firebase-tools": "^8.7.0",
"husky": "^4.2.5",
"js-beautify": "^1.13.0",
"prettier": "^2.1.1",
"pretty-quick": "^3.0.0",
"ts-node": "^8.6.2",
"tslint": "^6.1.0",
"typescript": "^4.1.2"
},
"husky": {
"hooks": {
"pre-commit": "pretty-quick --staged"
}
},
"private": true
}

View File

@@ -1,39 +0,0 @@
import * as child from "child_process";
function execute(command, callback) {
child.exec(command, function (error, stdout, stderr) {
console.log({ error, stdout, stderr });
callback(stdout);
});
}
export const addPackages = (packages: { name: string; version?: string }[]) =>
new Promise((resolve, reject) => {
//const command =`cd FT_functions/functions;yarn add ${packageName}@${version}`
const packagesString = packages.reduce((acc, currPackage) => {
return `${acc} ${currPackage.name}@${currPackage.version ?? "latest"}`;
}, "");
if (packagesString.trim().length !== 0) {
execute("ls", function () {});
const command = `cd ../functions;yarn add ${packagesString}`;
console.log(command);
execute(command, function () {
resolve(true);
});
} else resolve(false);
});
export const addSparkLib = (name: string) =>
new Promise(async (resolve, reject) => {
const { dependencies } = require(`../sparksLib/${name}`);
const packages = Object.keys(dependencies).map((key) => ({
name: key,
version: dependencies[key],
}));
await addPackages(packages);
const command = `cp ../sparksLib/${name}.ts ../functions/src/sparks/${name}.ts`;
execute(command, function () {
resolve(true);
});
});

View File

@@ -1,69 +0,0 @@
import * as functions from "firebase-functions";
import * as admin from "firebase-admin";
import { db, auth } from "../firebaseConfig";
import utilFns from "../utils";
const shouldEvaluateReducer = (listeners, before, after) =>
listeners.reduce((acc: Boolean, currField: string) => {
if (acc) return true;
else
return (
JSON.stringify(before[currField]) !== JSON.stringify(after[currField])
);
}, false);
const derivative = (
functionConfig: {
fieldName: string;
listenerFields: string[];
evaluate: (props: {
row: any;
ref: FirebaseFirestore.DocumentReference<FirebaseFirestore.DocumentData>;
db: FirebaseFirestore.Firestore;
auth: admin.auth.Auth;
utilFns: any;
}) => any;
}[]
) => async (
change: functions.Change<functions.firestore.DocumentSnapshot>,
) => {
try {
const beforeData = change.before?.data();
const afterData = change.after?.data();
const ref = change.after ? change.after.ref : change.before.ref;
const update = await functionConfig.reduce(
async (accUpdates: any, currDerivative) => {
const shouldEval = shouldEvaluateReducer(
currDerivative.listenerFields,
beforeData,
afterData
);
if (shouldEval) {
const newValue = await currDerivative.evaluate({
row: afterData,
ref,
db,
auth,
utilFns,
});
if (
newValue !== undefined &&
newValue !== afterData[currDerivative.fieldName]
) {
return {
...(await accUpdates),
[currDerivative.fieldName]: newValue,
};
}
}
return await accUpdates;
},
{}
);
return update;
} catch (error) {
console.log(`Derivatives Error`, error);
return {};
}
};
export default derivative;

View File

@@ -1,12 +0,0 @@
export const cloudActionScript = async ({
row,
db,
ref,
auth,
utilFns,
actionParams,
context,
}: any) => {
const claims = context.auth.token;
return { message: `hi ${JSON.stringify(claims.name)}!`, success: true };
};

File diff suppressed because it is too large Load Diff

View File

@@ -1,64 +0,0 @@
module.exports = {
env: {
browser: true,
es6: true,
node: true,
},
extends: ["plugin:import/errors", "plugin:import/warnings"],
parser: "@typescript-eslint/parser",
parserOptions: {
project: "tsconfig.json",
sourceType: "module",
},
plugins: ["@typescript-eslint", "import"],
rules: {
"@typescript-eslint/adjacent-overload-signatures": "error",
"@typescript-eslint/no-empty-function": "error",
"@typescript-eslint/no-empty-interface": "warn",
"@typescript-eslint/no-floating-promises": "error",
"@typescript-eslint/no-namespace": "error",
"@typescript-eslint/no-unnecessary-type-assertion": "error",
"@typescript-eslint/prefer-for-of": "warn",
"@typescript-eslint/triple-slash-reference": "error",
"@typescript-eslint/unified-signatures": "warn",
"comma-dangle": "warn",
"constructor-super": "error",
eqeqeq: ["warn", "always"],
"import/no-deprecated": "warn",
"import/no-extraneous-dependencies": "error",
"import/no-unassigned-import": "warn",
"no-cond-assign": "error",
"no-duplicate-case": "error",
"no-duplicate-imports": "error",
"no-empty": [
"error",
{
allowEmptyCatch: true,
},
],
"no-invalid-this": "error",
"no-new-wrappers": "error",
"no-param-reassign": "error",
"no-redeclare": "error",
"no-sequences": "error",
"no-shadow": [
"error",
{
hoist: "all",
},
],
"no-throw-literal": "error",
"no-unsafe-finally": "error",
"no-unused-labels": "error",
"no-var": "warn",
"no-void": "error",
"prefer-const": "warn",
},
settings: {
jsdoc: {
tagNamePreference: {
returns: "return",
},
},
},
};

View File

@@ -1,13 +0,0 @@
// Initialize Firebase Admin
import * as functions from "firebase-functions";
import * as admin from "firebase-admin";
admin.initializeApp();
// Initialize Cloud Firestore Database
export const db = admin.firestore();
// Initialize Auth
export const auth = admin.auth();
const settings = { timestampsInSnapshots: true };
db.settings(settings);
export const env = functions.config();

View File

@@ -1,153 +0,0 @@
import * as path from "path";
import * as os from "os";
import * as fs from "fs";
import * as request from "request";
import * as functions from "firebase-functions";
import { db, auth } from "./config";
import * as admin from "firebase-admin";
import utilFns from "./utils";
type ActionData = {
ref: {
id: string;
path: string;
parentId: string;
tablePath: string;
};
schemaDocPath: string;
row: any;
column: any;
action: "run" | "redo" | "undo";
actionParams: any;
};
// import {
// makeId,
// hasGoogleMailServer,
// hasMissingFields,
// } from "../utils";
import { hasAnyRole } from "./utils/auth";
const missingFieldsReducer = (data: any) => (acc: string[], curr: string) => {
if (data[curr] === undefined) {
return [...acc, curr];
} else return acc;
};
// const generateSchemaDocPath = (tablePath:string) => {
// const pathComponents = tablePath.split("/");
// return `_FIRETABLE_/settings/${
// pathComponents[1] === "table" ? "schema" : "groupSchema"
// }/${pathComponents[2]}`;
// };
const serverTimestamp = admin.firestore.FieldValue.serverTimestamp;
const scriptLoader = (uri: string, file: fs.WriteStream) =>
new Promise((resolve, reject) => {
request({
/* Here you should specify the exact link to the file you are trying to download */
uri,
})
.pipe(file)
.on("finish", () => {
console.log(`The file is finished downloading.`);
resolve();
})
.on("error", (error: any) => {
reject(error);
});
}).catch((error) => {
console.log(`Something happened: ${error}`);
});
export const actionScript = functions.https.onCall(
async (data: ActionData, context: functions.https.CallableContext) => {
try {
if (!context) {
throw Error(`You are unauthenticated`);
}
const { ref, actionParams, row, column, action, schemaDocPath } = data;
const schemaDoc = await db.doc(schemaDocPath).get();
const schemaDocData = schemaDoc.data();
if (!schemaDocData) {
return {
success: false,
message: "no schema found",
};
}
const config = schemaDocData.columns[column.key].config;
const { requiredRoles, requiredFields } = config;
if (!hasAnyRole(requiredRoles, context)) {
throw Error(`You don't have the required roles permissions`);
}
const missingRequiredFields = requiredFields
? requiredFields.reduce(missingFieldsReducer(row), [])
: [];
if (missingRequiredFields.length > 0) {
throw new Error(
`Missing required fields:${missingRequiredFields.join(", ")}`
);
}
//
// get auth
const scriptSource: string =
"https://gist.githubusercontent.com/shamsmosowi/3a0a93aec9faa0edba55fa228a9f9495/raw/3ffb8ed3a118caf0dd8140254794f523fa370cdd/sript.js";
const filePath = "script.js";
const baseFileName = path.basename(filePath, path.extname(filePath));
const scriptLocation = path.join(
os.tmpdir(),
baseFileName + path.extname(filePath)
);
const file = fs.createWriteStream(scriptLocation);
await scriptLoader(scriptSource, file);
const loadedScript: any = await import(scriptLocation);
const {
cloudActionScript,
}: { cloudActionScript: Function } = loadedScript;
const result: {
message: string;
status: string;
success: boolean;
} = await cloudActionScript({
row,
db,
ref,
auth,
utilFns,
actionParams,
context,
});
if (result.success)
return {
success: result.success,
message: result.message,
cellValue: {
redo: config["redo.enabled"],
status: result.status,
completedAt: serverTimestamp(),
meta: { ranBy: context.auth!.token.email },
undo: action !== "undo" && config["undo.enabled"],
},
undo: config["undo.enabled"],
redo: config["redo.enabled"],
};
else
return {
success: false,
message: result.message,
};
} catch (error) {
return {
success: false,
error,
message: error.message,
};
}
}
);

View File

@@ -1,12 +0,0 @@
export const cloudActionScript = async ({
row,
db,
ref,
auth,
utilFns,
actionParams,
context,
}: any) => {
const claims = context.auth.token;
return { message: `hi ${JSON.stringify(claims.name)}!`, success: true };
};

View File

@@ -1,66 +0,0 @@
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
firebase-debug.log*
firebase-debug.*.log*
# Firebase cache
.firebase/
# Firebase config
# Uncomment this if you'd like others to create their own Firebase project.
# For a team working on the same Firebase project(s), it is recommended to leave
# it commented so all members can deploy to the same project(s) in .firebaserc.
# .firebaserc
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
# nyc test coverage
.nyc_output
# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (http://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variables file
.env

View File

@@ -1,8 +0,0 @@
{
"functions": {
"predeploy": [
"npm --prefix \"$RESOURCE_DIR\" run lint",
"npm --prefix \"$RESOURCE_DIR\" run build"
]
}
}

View File

@@ -1,68 +0,0 @@
module.exports = {
env: {
browser: true,
es6: true,
node: true,
},
extends: [
"plugin:import/errors",
"plugin:import/warnings",
"plugin:import/typescript",
],
parser: "@typescript-eslint/parser",
parserOptions: {
project: "tsconfig.json",
sourceType: "module",
},
plugins: ["@typescript-eslint", "import"],
rules: {
"@typescript-eslint/adjacent-overload-signatures": "error",
"@typescript-eslint/no-empty-function": "error",
"@typescript-eslint/no-empty-interface": "warn",
"@typescript-eslint/no-floating-promises": "error",
"@typescript-eslint/no-namespace": "error",
"@typescript-eslint/no-unnecessary-type-assertion": "error",
"@typescript-eslint/prefer-for-of": "warn",
"@typescript-eslint/triple-slash-reference": "error",
"@typescript-eslint/unified-signatures": "warn",
"comma-dangle": ["error", "always-multiline"],
"constructor-super": "error",
eqeqeq: ["warn", "always"],
"import/no-deprecated": "warn",
"import/no-extraneous-dependencies": "error",
"import/no-unassigned-import": "warn",
"no-cond-assign": "error",
"no-duplicate-case": "error",
"no-duplicate-imports": "error",
"no-empty": [
"error",
{
allowEmptyCatch: true,
},
],
"no-invalid-this": "error",
"no-new-wrappers": "error",
"no-param-reassign": "error",
"no-redeclare": "error",
"no-sequences": "error",
"no-shadow": [
"error",
{
hoist: "all",
},
],
"no-throw-literal": "error",
"no-unsafe-finally": "error",
"no-unused-labels": "error",
"no-var": "warn",
"no-void": "error",
"prefer-const": "warn",
},
settings: {
jsdoc: {
tagNamePreference: {
returns: "return",
},
},
},
};

View File

@@ -1,12 +0,0 @@
# Compiled JavaScript files
**/*.js
**/*.js.map
# Except the ESLint config file
!.eslintrc.js
# TypeScript v1 declaration files
typings/
# Node.js dependency directory
node_modules/

View File

@@ -1,30 +0,0 @@
{
"name": "functions",
"scripts": {
"lint": "eslint \"src/**/*\"",
"build": "tsc",
"serve": "npm run build && firebase emulators:start --only functions",
"shell": "npm run build && firebase functions:shell",
"start": "npm run shell",
"deploy": "firebase deploy --only functions",
"logs": "firebase functions:log"
},
"engines": {
"node": "12"
},
"main": "lib/index.js",
"dependencies": {
"@google-cloud/cloudbuild": "^2.0.6",
"firebase-admin": "^9.2.0",
"firebase-functions": "^3.11.0"
},
"devDependencies": {
"@typescript-eslint/eslint-plugin": "^3.9.1",
"@typescript-eslint/parser": "^3.8.0",
"eslint": "^7.6.0",
"eslint-plugin-import": "^2.22.0",
"firebase-functions-test": "^0.2.0",
"typescript": "^3.8.0"
},
"private": true
}

View File

@@ -1,82 +0,0 @@
import * as functions from "firebase-functions";
import { hasAnyRole } from "./utils/auth";
//import { serverTimestamp } from "./utils";
import { db } from "./firebaseConfig";
const { CloudBuildClient } = require("@google-cloud/cloudbuild");
const cb = new CloudBuildClient();
export const FT_triggerCloudBuild = functions.https.onCall(
async (
data: {
schemaPath: string;
},
context: functions.https.CallableContext
) => {
try {
const authorized = hasAnyRole(["ADMIN"], context);
const { schemaPath } = data;
const firetableSettingsDoc = await db.doc("_FIRETABLE_/settings").get();
const firetableSettings = firetableSettingsDoc.data();
if (!firetableSettings) throw Error("Error: firetableSettings not found");
const { triggerId, branch } = firetableSettings.cloudBuild;
if (!context.auth || !authorized) {
console.warn(`unauthorized user${context}`);
return {
success: false,
message: "you don't have permission to trigger a build",
};
}
// Starts a build against the branch provided.
const [resp] = await cb.runBuildTrigger({
projectId: process.env.GCLOUD_PROJECT, //project hosting cloud build
triggerId,
source: {
branchName: branch,
substitutions: {
_PROJECT_ID: process.env.GCLOUD_PROJECT,
_SCHEMA_PATH: schemaPath,
},
},
});
const buildId = resp.metadata.build.id;
const logUrl = resp.metadata.build.logUrl;
await db.doc(schemaPath).update({ cloudBuild: { logUrl, buildId } });
console.log({ buildId, logUrl });
if (buildId && logUrl) {
return {
message: "Deploying latest configuration",
success: true,
};
}
return false;
} catch (err) {
return {
message: err,
success: false,
};
}
}
);
export const FT_cloudBuildUpdates = functions.pubsub
.topic("cloud-builds")
.onPublish(async (message, context) => {
console.log(JSON.stringify(message));
const { buildId, status } = message.attributes;
console.log(JSON.stringify({ buildId, status }));
//message
//status: "SUCCESS"
//buildId: "1a6d7819-aa35-486c-a29c-fb67eb39430f"
const query = await db
.collection("_FIRETABLE_/settings/schema")
.where("cloudBuild.buildId", "==", buildId)
.get();
if (query.docs.length !== 0) {
await query.docs[0].ref.update({ "cloudBuild.status": status });
}
return true;
});

View File

@@ -1,17 +0,0 @@
import * as functions from "firebase-functions";
export const hasAnyRole = (
authorizedRoles: string[],
context: functions.https.CallableContext
) => {
if (!context.auth || !context.auth.token.roles) return false;
const userRoles = context.auth.token.roles as string[];
const authorization = authorizedRoles.reduce(
(authorized: boolean, role: string) => {
if (userRoles.includes(role)) return true;
else return authorized;
},
false
);
return authorization;
};

View File

@@ -1,57 +0,0 @@
import * as admin from "firebase-admin";
export const serverTimestamp = admin.firestore.FieldValue.serverTimestamp;
// import { sendEmail } from "./email";
// import { hasAnyRole } from "./auth";
// import { SecretManagerServiceClient } from "@google-cloud/secret-manager";
// const secrets = new SecretManagerServiceClient();
// export const getSecret = async (name: string, v: string = "latest") => {
// const [version] = await secrets.accessSecretVersion({
// name: `projects/${process.env.GCLOUD_PROJECT}/secrets/${name}/versions/${v}`,
// });
// const payload = version.payload?.data?.toString();
// if (payload && payload[0] === "{") {
// return JSON.parse(payload);
// } else {
// return payload;
// }
// };
// const characters =
// "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz";
// export function generateId(length: number): string {
// let result = "";
// const charactersLength = characters.length;
// for (let i = 0; i < length; i++) {
// result += characters.charAt(Math.floor(Math.random() * charactersLength));
// }
// return result;
// }
// export const hasRequiredFields = (requiredFields: string[], data: any) =>
// requiredFields.reduce((acc: boolean, currField: string) => {
// if (data[currField] === undefined || data[currField] === null) return false;
// else return acc;
// }, true);
// async function asyncForEach(array: any[], callback: Function) {
// for (let index = 0; index < array.length; index++) {
// await callback(array[index], index, array);
// }
// }
// export const getTriggerType = (change) =>
// Boolean(change.after.data()) && Boolean(change.before.data())
// ? "update"
// : Boolean(change.after.data())
// ? "create"
// : "delete";
// export default {
// getSecret,
// hasRequiredFields,
// generateId,
// sendEmail,
// serverTimestamp,
// hasAnyRole,
// asyncForEach,
// };

View File

@@ -1,13 +0,0 @@
{
"compilerOptions": {
"module": "commonjs",
"noImplicitReturns": true,
"noUnusedLocals": true,
"outDir": "lib",
"sourceMap": true,
"strict": true,
"target": "es2017"
},
"compileOnSave": true,
"include": ["src"]
}

View File

@@ -17,17 +17,17 @@
"main": "lib/index.js",
"dependencies": {
"@google-cloud/cloudbuild": "^2.0.6",
"@google-cloud/firestore": "^4.9.1",
"@google-cloud/firestore": "^4.9.7",
"@google-cloud/pubsub": "^2.5.0",
"@google-cloud/storage": "^5.1.2",
"@sendgrid/mail": "^7.4.2",
"@slack/web-api": "^6.0.0",
"algoliasearch": "^4.6.0",
"algoliasearch": "^4.8.6",
"firebase-admin": "^9.4.2",
"firebase-functions": "^3.13.1",
"imagemin": "^7.0.1",
"imagemin-mozjpeg": "^9.0.0",
"imagemin-pngquant": "^9.0.0",
"imagemin-pngquant": "^9.0.2",
"lodash": "^4.17.21",
"sharp": "^0.25.4"
},

View File

@@ -0,0 +1,51 @@
import algoliasearch from "algoliasearch";
import * as functions from "firebase-functions";
import { env } from "./config";
const algoliaClient = algoliasearch(env.algolia.app, env.algolia.key);
export const getAlgoliaSearchKey = functions.https.onCall(async (
data: { index: string },
context: functions.https.CallableContext
) => {
const requestedIndex = data.index
try {
if (!context.auth || !context.auth.token) throw new Error("Unauthenticated")
const allIndicesRoles = ['ADMIN',"TEAM"] // you can add more roles here that need access to all algolia indices
const rolesIndicesAccess = {
"ROLE":["index_1","index_2"]
}
const userRoles = context.auth.token.roles
if (userRoles.some(role=> allIndicesRoles.includes(role)||rolesIndicesAccess[role].includes(requestedIndex))){
const validUntil = Math.floor(Date.now() / 1000) + 3600;
const key = algoliaClient.generateSecuredApiKey(
env.algolia.search,
{
filters:"",
validUntil,
restrictIndices: [requestedIndex],
userToken: context.auth.uid,
}
);
return {
data: key,
success: true,
};
}else{
return {
message: 'Missing Required roles for this index',
success: false,
};
}
} catch (error) {
return {
success: false,
error,
message: error.message,
};
}
})

View File

@@ -0,0 +1,3 @@
export const collectionPath = ''
export const functionName = ''
export default {}

View File

@@ -1,32 +1,33 @@
export { triggerCloudBuild, cloudBuildUpdates } from "./buildTriggers"; // a callable used for triggering cloudbuild to build and deploy configurable cloud functions
export {
scheduledFirestoreBackup, // callableFirestoreBackup
} from "./backup";
import * as callableFns from "./callable";
// export { triggerCloudBuild, cloudBuildUpdates } from "./buildTriggers"; // a callable used for triggering cloudbuild to build and deploy configurable cloud functions
// export {
// scheduledFirestoreBackup, // callableFirestoreBackup
// } from "./backup";
// import * as callableFns from "./callable";
export const callable = callableFns;
// export const callable = callableFns;
// all the cloud functions bellow are deployed using the triggerCloudBuild callable function
// these functions are designed to be built and deployed based on the configuration passed through the callable
// // all the cloud functions bellow are deployed using the triggerCloudBuild callable function
// // these functions are designed to be built and deployed based on the configuration passed through the callable
export { FT_aggregates } from "./aggregates";
export { FT_subTableStats } from "./subTableStats";
// export { FT_aggregates } from "./aggregates";
// export { FT_subTableStats } from "./subTableStats";
export { actionScript } from "./actionScript";
// export { actionScript } from "./actionScript";
export { webhook } from "./webhooks";
// export { webhook } from "./webhooks";
export { FT_snapshotSync } from "./snapshotSync";
// export { FT_snapshotSync } from "./snapshotSync";
export { FT_compressedThumbnail } from "./compressedThumbnail";
// export { FT_compressedThumbnail } from "./compressedThumbnail";
export { slackBotMessageOnCreate } from "./slackOnTrigger/trigger";
export {getAlgoliaSearchKey} from './algoliaSearchKey'
//deprecated, moved to FT_functions folder and used within sparks table functions
export { FT_derivatives } from "./derivatives";
export { FT_algolia } from "./algolia";
export { FT_email } from "./emailOnTrigger";
export { FT_slack } from "./slackOnTrigger";
export { FT_sync } from "./collectionSync";
export { FT_spark } from "./sparks";
export { FT_history } from "./history";
//deprecated, updated implementation moved to FT_build folder and used within sparks table functions
// export { FT_derivatives } from "./derivatives";
// export { FT_algolia } from "./algolia";
// export { FT_email } from "./emailOnTrigger";
// export { FT_slack } from "./slackOnTrigger";
// export { FT_sync } from "./collectionSync";
// export { FT_spark } from "./sparks";
// export { FT_history } from "./history";
// export { slackBotMessageOnCreate } from "./slackOnTrigger/trigger";

View File

@@ -1,88 +0,0 @@
import * as functions from "firebase-functions";
import { hasRequiredFields } from "../utils";
const { PubSub } = require("@google-cloud/pubsub");
const pubSubClient = new PubSub();
import { db } from "../config";
import config, { collectionPath } from "../functionConfig";
// generated using generateConfig.ts
const functionConfig: any = config;
const sparkTrigger = async (
change: functions.Change<functions.firestore.DocumentSnapshot>,
context: functions.EventContext
) => {
const beforeData = change.before?.data();
const afterData = change.after?.data();
const ref = change.after ? change.after.ref : change.before.ref;
const triggerType =
Boolean(beforeData) && Boolean(afterData)
? "update"
: Boolean(afterData)
? "create"
: "delete";
try {
const sparkPromises = functionConfig.map(async (sparkConfig) => {
const {
topic,
triggers,
shouldRun,
requiredFields,
sparkBody,
} = sparkConfig;
const sparkContext = {
row: triggerType === "delete" ? beforeData : afterData,
ref,
db,
change,
triggerType,
sparkConfig,
};
if (!triggers.includes(triggerType)) return false; //check if trigger type is included in the spark
if (
triggerType !== "delete" &&
requiredFields &&
requiredFields.length !== 0 &&
!hasRequiredFields(requiredFields, afterData)
)
return false; // check if it hase required fields for the spark to run
const dontRun = shouldRun ? !(await shouldRun(sparkContext)) : false; //
if (dontRun) return false;
const sparkData = await Object.keys(sparkBody).reduce(
async (acc, key) => ({
[key]: await sparkBody[key](sparkContext),
...(await acc),
}),
{}
);
console.log(JSON.stringify(sparkData));
const messageBuffer = Buffer.from(JSON.stringify(sparkData), "utf8");
const messageId = await pubSubClient.topic(topic).publish(messageBuffer);
console.log(`Message ${messageId} published.`);
return true;
});
await Promise.all(sparkPromises);
return true;
} catch (err) {
console.error(err);
return Promise.reject(err);
}
};
const subCollectionTriggerPath = (schemaPath) => {
const subtables = schemaPath.match(/\/subTables\//g);
if (subtables === null) return schemaPath;
const collection = schemaPath.split("/").pop();
return `${subtables
.map((_, i) => `{col${i}}/{doc${i}}/`)
.join("")}${collection}`;
};
export const FT_spark = {
[collectionPath
.replace(/\/subTables\//g, "_sub_")
.replace(/-/g, "_")]: functions.firestore
.document(`${subCollectionTriggerPath(collectionPath)}/{docId}`)
.onWrite(sparkTrigger),
};

View File

@@ -2,109 +2,109 @@
# yarn lockfile v1
"@algolia/cache-browser-local-storage@4.8.4":
version "4.8.4"
resolved "https://registry.yarnpkg.com/@algolia/cache-browser-local-storage/-/cache-browser-local-storage-4.8.4.tgz#6a03ffc6b0b5b5aa7f74732bf8091a0f3d2b0986"
integrity sha512-qSS3VMP3oMhcLrYIFveRyt3F5XB6MqWogF4Vooj8KvOvqv6jBmYwkAueSXCF5pkJEaA72VL9+9NbBpfC8ez2ww==
"@algolia/cache-browser-local-storage@4.8.6":
version "4.8.6"
resolved "https://registry.yarnpkg.com/@algolia/cache-browser-local-storage/-/cache-browser-local-storage-4.8.6.tgz#6be9644b68efbbc231ac3f0a4cfa985ef31eade9"
integrity sha512-Bam7otzjIEgrRXWmk0Amm1+B3ROI5dQnUfJEBjIy0YPM0kMahEoJXCw6160tGKxJLl1g6icoC953nGshQKO7cA==
dependencies:
"@algolia/cache-common" "4.8.4"
"@algolia/cache-common" "4.8.6"
"@algolia/cache-common@4.8.4":
version "4.8.4"
resolved "https://registry.yarnpkg.com/@algolia/cache-common/-/cache-common-4.8.4.tgz#b105bdfe3fa0ba15db936177c4db420befed2ab7"
integrity sha512-5+dLmj6qFy4WOtnNQuFRfWTIIDdpUigv+dXaKMFplNPBvZHGFy3hcRjWqYzGcqaeLqcXbN8cU5r75mvrlJIxcw==
"@algolia/cache-common@4.8.6":
version "4.8.6"
resolved "https://registry.yarnpkg.com/@algolia/cache-common/-/cache-common-4.8.6.tgz#dff1697a0fe3d7856630071559661ec5ad90f31c"
integrity sha512-eGQlsXU5G7n4RvV/K6qe6lRAeL6EKAYPT3yZDBjCW4pAh7JWta+77a7BwUQkTqXN1MEQWZXjex3E4z/vFpzNrg==
"@algolia/cache-in-memory@4.8.4":
version "4.8.4"
resolved "https://registry.yarnpkg.com/@algolia/cache-in-memory/-/cache-in-memory-4.8.4.tgz#e978935dd8c4bbd555820e9b9fc863a24f3d38dd"
integrity sha512-PBN4YKxn/L+HjVKqUE5rtLiFKqzm4qnUoF7QvCFFmFAViCdYwZSMFVmDobstqWY3KULfsEqaeD4eU4jxZbKhEA==
"@algolia/cache-in-memory@4.8.6":
version "4.8.6"
resolved "https://registry.yarnpkg.com/@algolia/cache-in-memory/-/cache-in-memory-4.8.6.tgz#9a100a1be05e700a253ef4bdabd3bd45df2f67d4"
integrity sha512-kbJrvCFANxL/l5Pq1NFyHLRphKDwmqcD/OJga0IbNKEulRGDPkt1+pC7/q8d2ikP12adBjLLg2CVias9RJpIaw==
dependencies:
"@algolia/cache-common" "4.8.4"
"@algolia/cache-common" "4.8.6"
"@algolia/client-account@4.8.4":
version "4.8.4"
resolved "https://registry.yarnpkg.com/@algolia/client-account/-/client-account-4.8.4.tgz#a0af429e3587b33a988fec98ce0c739fd16143aa"
integrity sha512-mrsOnGV4O2b+t1CumUH72+Psw9d9qwngBEp2le7IMSceJQywQvNCyJ4B4qyoozHsIGapXfcVAOhRxqUsNQ6U6g==
"@algolia/client-account@4.8.6":
version "4.8.6"
resolved "https://registry.yarnpkg.com/@algolia/client-account/-/client-account-4.8.6.tgz#050cfd6a6d3e06a5a8e1029f24d6d50524d186c6"
integrity sha512-FQVJE/BgCb78jtG7V0r30sMl9P5JKsrsOacGtGF2YebqI0YF25y8Z1nO39lbdjahxUS3QkDw2d0P2EVMj65g2Q==
dependencies:
"@algolia/client-common" "4.8.4"
"@algolia/client-search" "4.8.4"
"@algolia/transporter" "4.8.4"
"@algolia/client-common" "4.8.6"
"@algolia/client-search" "4.8.6"
"@algolia/transporter" "4.8.6"
"@algolia/client-analytics@4.8.4":
version "4.8.4"
resolved "https://registry.yarnpkg.com/@algolia/client-analytics/-/client-analytics-4.8.4.tgz#77c81b699909b50ecd9bf97997f014cfeb358fc3"
integrity sha512-Xy70njSUgG/QTv5+rPjsTIzBF/bjxseS5h9SawrQGzovTosbJbu9JBlg4YwVJnYvjovzpr7S39+gPIPc8M7+Rg==
"@algolia/client-analytics@4.8.6":
version "4.8.6"
resolved "https://registry.yarnpkg.com/@algolia/client-analytics/-/client-analytics-4.8.6.tgz#ac644cfc9d87a085b9e53c71a42ef6e90d828501"
integrity sha512-ZBYFUlzNaWDFtt0rYHI7xbfVX0lPWU9lcEEXI/BlnkRgEkm247H503tNatPQFA1YGkob52EU18sV1eJ+OFRBLA==
dependencies:
"@algolia/client-common" "4.8.4"
"@algolia/client-search" "4.8.4"
"@algolia/requester-common" "4.8.4"
"@algolia/transporter" "4.8.4"
"@algolia/client-common" "4.8.6"
"@algolia/client-search" "4.8.6"
"@algolia/requester-common" "4.8.6"
"@algolia/transporter" "4.8.6"
"@algolia/client-common@4.8.4":
version "4.8.4"
resolved "https://registry.yarnpkg.com/@algolia/client-common/-/client-common-4.8.4.tgz#a1b35645253c7f96925bfe91bac486e755329b77"
integrity sha512-sQlRa+KWFn+D8AOEZb4kj6RE/i6DnPwVOF4AnNf9IjNB0mUUhLWw96cQN6GDx0KE4lhW67t+qR39ZuuDBgR9ww==
"@algolia/client-common@4.8.6":
version "4.8.6"
resolved "https://registry.yarnpkg.com/@algolia/client-common/-/client-common-4.8.6.tgz#c8b81af250ed8beb741a0e5cfdd3236bb4292c94"
integrity sha512-8dI+K3Nvbes2YRZm2LY7bdCUD05e60BhacrMLxFuKxnBGuNehME1wbxq/QxcG1iNFJlxLIze5TxIcNN3+pn76g==
dependencies:
"@algolia/requester-common" "4.8.4"
"@algolia/transporter" "4.8.4"
"@algolia/requester-common" "4.8.6"
"@algolia/transporter" "4.8.6"
"@algolia/client-recommendation@4.8.4":
version "4.8.4"
resolved "https://registry.yarnpkg.com/@algolia/client-recommendation/-/client-recommendation-4.8.4.tgz#1aaa9735e96865ff06321a8bc850829445c945d1"
integrity sha512-CE0CVqLGWotVOaUXyU33FVD9FZ/7rqcbwFPH5MgSjVdE0B1YWVedhR0s2BNKodXLcIGVLVYfXR05CLdvOlTw+A==
"@algolia/client-recommendation@4.8.6":
version "4.8.6"
resolved "https://registry.yarnpkg.com/@algolia/client-recommendation/-/client-recommendation-4.8.6.tgz#2518a09bfbeaec78b0d7a4213107f0899f80f9ac"
integrity sha512-Kg8DpjwvaWWujNx6sAUrSL+NTHxFe/UNaliCcSKaMhd3+FiPXN+CrSkO0KWR7I+oK2qGBTG/2Y0BhFOJ5/B/RA==
dependencies:
"@algolia/client-common" "4.8.4"
"@algolia/requester-common" "4.8.4"
"@algolia/transporter" "4.8.4"
"@algolia/client-common" "4.8.6"
"@algolia/requester-common" "4.8.6"
"@algolia/transporter" "4.8.6"
"@algolia/client-search@4.8.4":
version "4.8.4"
resolved "https://registry.yarnpkg.com/@algolia/client-search/-/client-search-4.8.4.tgz#0320c4a109d2cc220a9d1002f9ec64655a4494dc"
integrity sha512-eH2tRPnDU3tqpp0BSqP6coRRQe8fceqsupuf/1ho+Mcs5DM13mEuFmNOyPywHRlYLVPmbbCPRhDr5rB8QoN7XQ==
"@algolia/client-search@4.8.6":
version "4.8.6"
resolved "https://registry.yarnpkg.com/@algolia/client-search/-/client-search-4.8.6.tgz#1ca3f28c04ef4120b0563a293b30fcfe1b3fd1d0"
integrity sha512-vXLS6umL/9G3bwqc6pkrS9K5/s8coq55mpfRARL+bs0NsToOf77WSTdwzlxv/KdbVF7dHjXgUpBvJ6RyR4ZdAw==
dependencies:
"@algolia/client-common" "4.8.4"
"@algolia/requester-common" "4.8.4"
"@algolia/transporter" "4.8.4"
"@algolia/client-common" "4.8.6"
"@algolia/requester-common" "4.8.6"
"@algolia/transporter" "4.8.6"
"@algolia/logger-common@4.8.4":
version "4.8.4"
resolved "https://registry.yarnpkg.com/@algolia/logger-common/-/logger-common-4.8.4.tgz#42ecab3c92388a0d81b8532cefb47670da46cdd3"
integrity sha512-6hOaFG75Onmant9adcaeCZgvPYfnif7n0H1ycbixm6/WH3SmxqPMG+CMiW8mTNTRrrAEceQVrq6tDHD8jdnOOw==
"@algolia/logger-common@4.8.6":
version "4.8.6"
resolved "https://registry.yarnpkg.com/@algolia/logger-common/-/logger-common-4.8.6.tgz#8c44a4f550e12418b0ec8d76a068e4f1c64206d1"
integrity sha512-FMRxZGdDxSzd0/Mv0R1021FvUt0CcbsQLYeyckvSWX8w+Uk4o0lcV6UtZdERVR5XZsGOqoXLMIYDbR2vkbGbVw==
"@algolia/logger-console@4.8.4":
version "4.8.4"
resolved "https://registry.yarnpkg.com/@algolia/logger-console/-/logger-console-4.8.4.tgz#adfac58df84848443bff1326986a0ca98db866b9"
integrity sha512-+9T3t/eB9vseANFz9YbFHG0cHjzVP/DVfGqzTAkeSlvMHP69JzJga9Wb0Ai6J3xXE3d4k9K+k6t+kkjCQjzEqg==
"@algolia/logger-console@4.8.6":
version "4.8.6"
resolved "https://registry.yarnpkg.com/@algolia/logger-console/-/logger-console-4.8.6.tgz#77176570fa6532fa846c7cfa2c6280935b1a3a06"
integrity sha512-TYw9lwUCjvApC6Z0zn36T6gkCl7hbfJmnU+Z/D8pFJ3Yp7lz06S3oWGjbdrULrYP1w1VOhjd0X7/yGNsMhzutQ==
dependencies:
"@algolia/logger-common" "4.8.4"
"@algolia/logger-common" "4.8.6"
"@algolia/requester-browser-xhr@4.8.4":
version "4.8.4"
resolved "https://registry.yarnpkg.com/@algolia/requester-browser-xhr/-/requester-browser-xhr-4.8.4.tgz#30c5c9d129fafd863b7c9c7a988c36ec5754b973"
integrity sha512-BYa8O/pht0UL2bcm0ZkLZiyC+5dHrbc6gvKIo+OgqxmDb/K4KrVo6RIof3BVpR8fgcfxQJohjNVHKXHxEUhBCQ==
"@algolia/requester-browser-xhr@4.8.6":
version "4.8.6"
resolved "https://registry.yarnpkg.com/@algolia/requester-browser-xhr/-/requester-browser-xhr-4.8.6.tgz#dbcb5906d10c619d7f08fced2f68fa09abffe5fd"
integrity sha512-omh6uJ3CJXOmcrU9M3/KfGg8XkUuGJGIMkqEbkFvIebpBJxfs6TVs0ziNeMFAcAfhi8/CGgpLbDSgJtWdGQa6w==
dependencies:
"@algolia/requester-common" "4.8.4"
"@algolia/requester-common" "4.8.6"
"@algolia/requester-common@4.8.4":
version "4.8.4"
resolved "https://registry.yarnpkg.com/@algolia/requester-common/-/requester-common-4.8.4.tgz#670f5e43e4d09ff9a3b9bfda7ac0c03476a9e4b1"
integrity sha512-br3LXb6srfAy7F04axwExmrkPOlXCDckgTFoLFv/RT9Oo28SpoyvHqktyBovQLdzdTs+Laglf+LtOHr0iUrZJg==
"@algolia/requester-common@4.8.6":
version "4.8.6"
resolved "https://registry.yarnpkg.com/@algolia/requester-common/-/requester-common-4.8.6.tgz#37ea1f9ecc1afcd91532b9f9c952c62fdef42bca"
integrity sha512-r5xJqq/D9KACkI5DgRbrysVL5DUUagikpciH0k0zjBbm+cXiYfpmdflo/h6JnY6kmvWgjr/4DoeTjKYb/0deAQ==
"@algolia/requester-node-http@4.8.4":
version "4.8.4"
resolved "https://registry.yarnpkg.com/@algolia/requester-node-http/-/requester-node-http-4.8.4.tgz#8af2cceb45e5bb2b9e7ed3b7daa34f3c2580912a"
integrity sha512-o5Cc4UxYPn3IBHQSDBNFFhq1LQLv40eYvCvK0FPJ8xZkrnNXhjPvaLCu/lQTHpk/HX7DaE6fQ/KboU0OSPKevQ==
"@algolia/requester-node-http@4.8.6":
version "4.8.6"
resolved "https://registry.yarnpkg.com/@algolia/requester-node-http/-/requester-node-http-4.8.6.tgz#e966293224f3bd1ba32ce4f9bc0fdada5d8e69ec"
integrity sha512-TB36OqTVOKyHCOtdxhn/IJyI/NXi/BWy8IEbsiWwwZWlL79NWHbetj49jXWFolEYEuu8PgDjjZGpRhypSuO9XQ==
dependencies:
"@algolia/requester-common" "4.8.4"
"@algolia/requester-common" "4.8.6"
"@algolia/transporter@4.8.4":
version "4.8.4"
resolved "https://registry.yarnpkg.com/@algolia/transporter/-/transporter-4.8.4.tgz#09452334e380ff0706676303e6642e76b72ae0bf"
integrity sha512-EvXFYICxrr9QEO6m6awUeNOBstOxePQ2Fy0jtYlS1v9TY2P5HqKRzkxmaZjeYRBsXOImpVjgQIzTzj1Au4br2w==
"@algolia/transporter@4.8.6":
version "4.8.6"
resolved "https://registry.yarnpkg.com/@algolia/transporter/-/transporter-4.8.6.tgz#b605dcd971aed374bdd95dd8938b93b9df650109"
integrity sha512-NRb31J0TP7EPoVMpXZ4yAtr61d26R8KGaf6qdULknvq5sOVHuuH4PwmF08386ERfIsgnM/OBhl+uzwACdCIjSg==
dependencies:
"@algolia/cache-common" "4.8.4"
"@algolia/logger-common" "4.8.4"
"@algolia/requester-common" "4.8.4"
"@algolia/cache-common" "4.8.6"
"@algolia/logger-common" "4.8.6"
"@algolia/requester-common" "4.8.6"
"@apidevtools/json-schema-ref-parser@^9.0.3":
version "9.0.7"
@@ -217,14 +217,15 @@
retry-request "^4.1.1"
teeny-request "^7.0.0"
"@google-cloud/firestore@^4.5.0", "@google-cloud/firestore@^4.9.1":
version "4.9.1"
resolved "https://registry.yarnpkg.com/@google-cloud/firestore/-/firestore-4.9.1.tgz#ce56e3df9c10aaa32522efd6c871b476bc44f651"
integrity sha512-lmg9xXGq56uS/T2PxyYWZCJ4FeCMgWPwx5iNU/IjLvru90qXRlxVcT4pg2acxng+MC7RntnZJQaExWvJR7Jgsg==
"@google-cloud/firestore@^4.5.0", "@google-cloud/firestore@^4.9.7":
version "4.9.7"
resolved "https://registry.yarnpkg.com/@google-cloud/firestore/-/firestore-4.9.7.tgz#8fb9080ba0f6e074013412835b60db926515d139"
integrity sha512-s5W6rRxD5y3Oe3KJUNztIy4eIi9dBwJU36jd/QM3L8frpCuSh1fn6z0BD8IAV0AirQAg6aOzSlcwAwd/yeXCkw==
dependencies:
fast-deep-equal "^3.1.1"
functional-red-black-tree "^1.0.1"
google-gax "^2.9.2"
protobufjs "^6.8.6"
"@google-cloud/paginator@^3.0.0":
version "3.0.5"
@@ -720,25 +721,25 @@ ajv@^6.12.2, ajv@^6.12.3:
json-schema-traverse "^0.4.1"
uri-js "^4.2.2"
algoliasearch@^4.6.0:
version "4.8.4"
resolved "https://registry.yarnpkg.com/algoliasearch/-/algoliasearch-4.8.4.tgz#ac2fc9335dfe06f55b9bd4faf6050ea0c8e0feea"
integrity sha512-QbXpFvBKj/QhKWE7xBoqaWOWyw7ni6W6THSuFJHOcADRrInhjFCBYjrv+YsIhv9huCepKXWpfV4UJup9BslVhQ==
algoliasearch@^4.8.6:
version "4.8.6"
resolved "https://registry.yarnpkg.com/algoliasearch/-/algoliasearch-4.8.6.tgz#8d6d7d2315bb052705a8ef5c8dbf57a19d357c2b"
integrity sha512-G8IA3lcgaQB4r9HuQ4G+uSFjjz0Wv2OgEPiQ8emA+G2UUlroOfMl064j1bq/G+QTW0LmTQp9JwrFDRWxFM9J7w==
dependencies:
"@algolia/cache-browser-local-storage" "4.8.4"
"@algolia/cache-common" "4.8.4"
"@algolia/cache-in-memory" "4.8.4"
"@algolia/client-account" "4.8.4"
"@algolia/client-analytics" "4.8.4"
"@algolia/client-common" "4.8.4"
"@algolia/client-recommendation" "4.8.4"
"@algolia/client-search" "4.8.4"
"@algolia/logger-common" "4.8.4"
"@algolia/logger-console" "4.8.4"
"@algolia/requester-browser-xhr" "4.8.4"
"@algolia/requester-common" "4.8.4"
"@algolia/requester-node-http" "4.8.4"
"@algolia/transporter" "4.8.4"
"@algolia/cache-browser-local-storage" "4.8.6"
"@algolia/cache-common" "4.8.6"
"@algolia/cache-in-memory" "4.8.6"
"@algolia/client-account" "4.8.6"
"@algolia/client-analytics" "4.8.6"
"@algolia/client-common" "4.8.6"
"@algolia/client-recommendation" "4.8.6"
"@algolia/client-search" "4.8.6"
"@algolia/logger-common" "4.8.6"
"@algolia/logger-console" "4.8.6"
"@algolia/requester-browser-xhr" "4.8.6"
"@algolia/requester-common" "4.8.6"
"@algolia/requester-node-http" "4.8.6"
"@algolia/transporter" "4.8.6"
ansi-align@^3.0.0:
version "3.0.0"
@@ -2886,9 +2887,9 @@ github-from-package@0.0.0:
integrity sha1-l/tdlr/eiXMxPyDoKI75oWf6ZM4=
glob-parent@^5.1.0, glob-parent@~5.1.0:
version "5.1.1"
resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.1.tgz#b6c1ef417c4e5663ea498f1c45afac6916bbc229"
integrity sha512-FnI+VGOpnlGHWZxthPGR+QhR78fuiK0sNLkHQv+bL9fQi57lNNdquIbna/WrfROrolq8GK5Ek6BiMwqL/voRYQ==
version "5.1.2"
resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4"
integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==
dependencies:
is-glob "^4.0.1"
@@ -3127,9 +3128,9 @@ home-dir@^1.0.0:
integrity sha1-KRfrRL3JByztqUJXlUOEfjAX/k4=
hosted-git-info@^2.1.4:
version "2.8.8"
resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.8.tgz#7539bd4bc1e0e0a895815a2e0262420b12858488"
integrity sha512-f/wzC2QaWBs7t9IYqB4T3sR1xviIViXJRJTWBlx2Gf3g0Xi5vI7Yy4koXQ1c9OYDGHN9sBy1DQ2AB8fqZBWhUg==
version "2.8.9"
resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9"
integrity sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==
http-cache-semantics@3.8.1:
version "3.8.1"
@@ -3241,10 +3242,10 @@ imagemin-mozjpeg@^9.0.0:
is-jpg "^2.0.0"
mozjpeg "^7.0.0"
imagemin-pngquant@^9.0.0:
version "9.0.1"
resolved "https://registry.yarnpkg.com/imagemin-pngquant/-/imagemin-pngquant-9.0.1.tgz#ecf22f522bdb734a503ecc21bdd7bc3d0230edcc"
integrity sha512-PYyo9G/xwddf+Qqlqe3onz5ZH7p6vHYVVkiuuczUjxZmfekyY77RXaOA/AR6FnVoeQxGa/pDtEK5xUKOcVo+sA==
imagemin-pngquant@^9.0.2:
version "9.0.2"
resolved "https://registry.yarnpkg.com/imagemin-pngquant/-/imagemin-pngquant-9.0.2.tgz#38155702b0cc4f60f671ba7c2b086ea3805d9567"
integrity sha512-cj//bKo8+Frd/DM8l6Pg9pws1pnDUjgb7ae++sUX1kUVdv2nrngPykhiUOgFeE0LGY/LmUbCf4egCHC4YUcZSg==
dependencies:
execa "^4.0.0"
is-png "^2.0.0"
@@ -6498,9 +6499,9 @@ write-file-atomic@^3.0.0:
typedarray-to-buffer "^3.1.5"
ws@^7.2.3:
version "7.4.2"
resolved "https://registry.yarnpkg.com/ws/-/ws-7.4.2.tgz#782100048e54eb36fe9843363ab1c68672b261dd"
integrity sha512-T4tewALS3+qsrpGI/8dqNMLIVdq/g/85U98HPMa6F0m6xTbvhXU6RCQLqPH3+SlomNV/LdY6RXEbBpMH6EOJnA==
version "7.4.6"
resolved "https://registry.yarnpkg.com/ws/-/ws-7.4.6.tgz#5654ca8ecdeee47c33a9a4bf6d28e2be2980377c"
integrity sha512-YmhHDO4MzaDLB+M9ym/mDA5z0naX8j7SIlT8f8z+I0VtzsRbekxEutHSme7NPS2qE8StCYQNUnfWdXta/Yu85A==
xdg-basedir@^4.0.0:
version "4.0.0"

View File

@@ -280,6 +280,11 @@ buffer-from@^1.0.0:
resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef"
integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==
commander@^6.1.0:
version "6.2.1"
resolved "https://registry.yarnpkg.com/commander/-/commander-6.2.1.tgz#0792eb682dfbc325999bb2b84fddddba110ac73c"
integrity sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==
compressible@^2.0.12:
version "2.0.18"
resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.18.tgz#af53cca6b070d4c3c0750fbd77286a6d7cc46fba"
@@ -324,13 +329,11 @@ crypto-random-string@^2.0.0:
resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-2.0.0.tgz#ef2a7a966ec11083388369baa02ebead229b30d5"
integrity sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA==
date-and-time@^0.14.2:
version "0.14.2"
resolved "https://registry.yarnpkg.com/date-and-time/-/date-and-time-0.14.2.tgz#a4266c3dead460f6c231fe9674e585908dac354e"
integrity sha512-EFTCh9zRSEpGPmJaexg7HTuzZHh6cnJj1ui7IGCFNXzd2QdpsNh05Db5TF3xzJm30YN+A8/6xHSuRcQqoc3kFA==
debug@4, debug@^4.1.1:
version "4.2.0"
resolved "https://registry.yarnpkg.com/debug/-/debug-4.2.0.tgz#7f150f93920e94c58f5574c2fd01a3110effe7f1"
@@ -1170,9 +1173,9 @@ write-file-atomic@^3.0.0:
typedarray-to-buffer "^3.1.5"
ws@^7.3.1:
version "7.3.1"
resolved "https://registry.yarnpkg.com/ws/-/ws-7.3.1.tgz#d0547bf67f7ce4f12a72dfe31262c68d7dc551c8"
integrity sha512-D3RuNkynyHmEJIpD2qrgVkc9DQ23OrN/moAwZX4L8DfvszsJxpjQuUq3LMx6HoYji9fbIOBY18XWBsAux1ZZUA==
version "7.4.6"
resolved "https://registry.yarnpkg.com/ws/-/ws-7.4.6.tgz#5654ca8ecdeee47c33a9a4bf6d28e2be2980377c"
integrity sha512-YmhHDO4MzaDLB+M9ym/mDA5z0naX8j7SIlT8f8z+I0VtzsRbekxEutHSme7NPS2qE8StCYQNUnfWdXta/Yu85A==
xdg-basedir@^4.0.0:
version "4.0.0"

View File

@@ -10,14 +10,12 @@
"logs": "firebase functions:log"
},
"engines": {
"node": "12"
"node": "14"
},
"main": "lib/index.js",
"dependencies": {
"@types/request": "^2.48.5",
"firebase-admin": "^9.2.0",
"firebase-functions": "^3.11.0",
"request": "^2.88.2"
"firebase-admin": "^9.10.0",
"firebase-functions": "^3.14.1"
},
"devDependencies": {
"firebase-tools": "^8.7.0",

View File

@@ -8,6 +8,6 @@ export const db = admin.firestore();
// Initialize Auth
export const auth = admin.auth();
const settings = { timestampsInSnapshots: true };
const settings = { timestampsInSnapshots: true, ignoreUndefinedProperties: true};
db.settings(settings);
export const env = functions.config();

View File

@@ -0,0 +1,113 @@
import * as functions from "firebase-functions";
import * as _ from "lodash";
import { db, auth } from "./firebaseConfig";
import * as admin from "firebase-admin";
import utilFns from "./utils";
type ActionData = {
ref: {
id: string;
path: string;
parentId: string;
tablePath: string;
};
schemaDocPath?: string;
column: any;
action: "run" | "redo" | "undo";
actionParams: any;
};
const missingFieldsReducer = (data: any) => (acc: string[], curr: string) => {
if (data[curr] === undefined) {
return [...acc, curr];
} else return acc;
};
const generateSchemaDocPath = (tablePath) => {
const pathComponents = tablePath.split("/");
return `_FIRETABLE_/settings/${
pathComponents[1] === "table" ? "schema" : "groupSchema"
}/${pathComponents[2]}`;
};
const serverTimestamp = admin.firestore.FieldValue.serverTimestamp;
export const actionScript = functions.https.onCall(
async (data: ActionData, context: functions.https.CallableContext) => {
try {
if (!context) {
throw Error(`You are unauthenticated`);
}
const { ref, actionParams, column, action, schemaDocPath } = data;
const _schemaDocPath =
schemaDocPath ?? generateSchemaDocPath(ref.tablePath);
const [schemaDoc, rowQuery] = await Promise.all([
db.doc(_schemaDocPath).get(),
db.doc(ref.path).get(),
]);
const row = rowQuery.data();
const schemaDocData = schemaDoc.data();
if (!schemaDocData) {
return {
success: false,
message: "no schema found",
};
}
const config = schemaDocData.columns[column.key].config;
const { script, requiredRoles, requiredFields } = config;
if (!requiredRoles || requiredRoles.length === 0) {
throw Error(`You need to specify at least one role to run this script`);
}
if (!utilFns.hasAnyRole(requiredRoles, context)) {
throw Error(`You don't have the required roles permissions`);
}
const missingRequiredFields = requiredFields
? requiredFields.reduce(missingFieldsReducer(row), [])
: [];
if (missingRequiredFields.length > 0) {
throw new Error(
`Missing required fields:${missingRequiredFields.join(", ")}`
);
}
const result: {
message: string;
status: string;
success: boolean;
} = await eval(
`async({row,db, ref,auth,utilFns,actionParams,context})=>{${
action === "undo" ? config["undo.script"] : script
}}`
)({ row, db, auth, utilFns, ref, actionParams, context });
if (result.success){
const cellValue = {
redo: config["redo.enabled"],
status: result.status,
completedAt: serverTimestamp(),
ranBy: context.auth!.token.email,
undo: config["undo.enabled"],
}
const userDoc = await db.collection("_FT_USERS").doc(context.auth!.uid).get()
const user = userDoc?.get('user')
await db.doc(ref.path).update({[column.key]:cellValue, _ft_updatedBy:user? {
...user,
...context.auth!,
timestamp: new Date(),
}:null })
return {
...result,
cellValue,
}
}
else return {
success: false,
message: result.message,
};
} catch (error) {
return {
success: false,
error,
message: error.message,
};
}
}
);

View File

@@ -41,55 +41,55 @@
enabled "2.0.x"
kuler "^2.0.0"
"@firebase/app-types@0.6.1":
version "0.6.1"
resolved "https://registry.yarnpkg.com/@firebase/app-types/-/app-types-0.6.1.tgz#dcbd23030a71c0c74fc95d4a3f75ba81653850e9"
integrity sha512-L/ZnJRAq7F++utfuoTKX4CLBG5YR7tFO3PLzG1/oXXKEezJ0kRL3CMRoueBEmTCzVb/6SIs2Qlaw++uDgi5Xyg==
"@firebase/app-types@0.6.2":
version "0.6.2"
resolved "https://registry.yarnpkg.com/@firebase/app-types/-/app-types-0.6.2.tgz#8578cb1061a83ced4570188be9e225d54e0f27fb"
integrity sha512-2VXvq/K+n8XMdM4L2xy5bYp2ZXMawJXluUIDzUBvMthVR+lhxK4pfFiqr1mmDbv9ydXvEAuFsD+6DpcZuJcSSw==
"@firebase/auth-interop-types@0.1.5":
version "0.1.5"
resolved "https://registry.yarnpkg.com/@firebase/auth-interop-types/-/auth-interop-types-0.1.5.tgz#9fc9bd7c879f16b8d1bb08373a0f48c3a8b74557"
integrity sha512-88h74TMQ6wXChPA6h9Q3E1Jg6TkTHep2+k63OWg3s0ozyGVMeY+TTOti7PFPzq5RhszQPQOoCi59es4MaRvgCw==
"@firebase/auth-interop-types@0.1.6":
version "0.1.6"
resolved "https://registry.yarnpkg.com/@firebase/auth-interop-types/-/auth-interop-types-0.1.6.tgz#5ce13fc1c527ad36f1bb1322c4492680a6cf4964"
integrity sha512-etIi92fW3CctsmR9e3sYM3Uqnoq861M0Id9mdOPF6PWIg38BXL5k4upCNBggGUpLIS0H1grMOvy/wn1xymwe2g==
"@firebase/component@0.1.19":
version "0.1.19"
resolved "https://registry.yarnpkg.com/@firebase/component/-/component-0.1.19.tgz#bd2ac601652c22576b574c08c40da245933dbac7"
integrity sha512-L0S3g8eqaerg8y0zox3oOHSTwn/FE8RbcRHiurnbESvDViZtP5S5WnhuAPd7FnFxa8ElWK0z1Tr3ikzWDv1xdQ==
"@firebase/component@0.5.3":
version "0.5.3"
resolved "https://registry.yarnpkg.com/@firebase/component/-/component-0.5.3.tgz#1ccd4d0814f9c1d7f179deab2122374f74571315"
integrity sha512-/TzwmlK35Mnr31zA9D4X0Obln7waAtV7nDLuNVtWhlXl0sSYRxnGES4dOhSXi0yWRneaNr+OiRBZ2gsc9PWWRg==
dependencies:
"@firebase/util" "0.3.2"
tslib "^1.11.1"
"@firebase/util" "1.1.0"
tslib "^2.1.0"
"@firebase/database-types@0.5.2", "@firebase/database-types@^0.5.2":
version "0.5.2"
resolved "https://registry.yarnpkg.com/@firebase/database-types/-/database-types-0.5.2.tgz#23bec8477f84f519727f165c687761e29958b63c"
integrity sha512-ap2WQOS3LKmGuVFKUghFft7RxXTyZTDr0Xd8y2aqmWsbJVjgozi0huL/EUMgTjGFrATAjcf2A7aNs8AKKZ2a8g==
"@firebase/database-types@0.7.2", "@firebase/database-types@^0.7.2":
version "0.7.2"
resolved "https://registry.yarnpkg.com/@firebase/database-types/-/database-types-0.7.2.tgz#449c4b36ec59a1ad9089797b540e2ba1c0d4fcbf"
integrity sha512-cdAd/dgwvC0r3oLEDUR+ULs1vBsEvy0b27nlzKhU6LQgm9fCDzgaH9nFGv8x+S9dly4B0egAXkONkVoWcOAisg==
dependencies:
"@firebase/app-types" "0.6.1"
"@firebase/app-types" "0.6.2"
"@firebase/database@^0.6.10":
version "0.6.13"
resolved "https://registry.yarnpkg.com/@firebase/database/-/database-0.6.13.tgz#b96fe0c53757dd6404ee085fdcb45c0f9f525c17"
integrity sha512-NommVkAPzU7CKd1gyehmi3lz0K78q0KOfiex7Nfy7MBMwknLm7oNqKovXSgQV1PCLvKXvvAplDSFhDhzIf9obA==
"@firebase/database@^0.10.0":
version "0.10.5"
resolved "https://registry.yarnpkg.com/@firebase/database/-/database-0.10.5.tgz#99de469642768766fdefcc560d04a091d1390de2"
integrity sha512-/KAFZGSvvL3J4EytZsl5kgqhZwEV+ZTz6mCS3VPigkkECzT1E/JRm9h8DY5/VWmoyfqc5O2F3kqrrLf7AovoHg==
dependencies:
"@firebase/auth-interop-types" "0.1.5"
"@firebase/component" "0.1.19"
"@firebase/database-types" "0.5.2"
"@firebase/auth-interop-types" "0.1.6"
"@firebase/component" "0.5.3"
"@firebase/database-types" "0.7.2"
"@firebase/logger" "0.2.6"
"@firebase/util" "0.3.2"
"@firebase/util" "1.1.0"
faye-websocket "0.11.3"
tslib "^1.11.1"
tslib "^2.1.0"
"@firebase/logger@0.2.6":
version "0.2.6"
resolved "https://registry.yarnpkg.com/@firebase/logger/-/logger-0.2.6.tgz#3aa2ca4fe10327cabf7808bd3994e88db26d7989"
integrity sha512-KIxcUvW/cRGWlzK9Vd2KB864HlUnCfdTH0taHE0sXW5Xl7+W68suaeau1oKNEqmc3l45azkd4NzXTCWZRZdXrw==
"@firebase/util@0.3.2":
version "0.3.2"
resolved "https://registry.yarnpkg.com/@firebase/util/-/util-0.3.2.tgz#87de27f9cffc2324651cabf6ec133d0a9eb21b52"
integrity sha512-Dqs00++c8rwKky6KCKLLY2T1qYO4Q+X5t+lF7DInXDNF4ae1Oau35bkD+OpJ9u7l1pEv7KHowP6CUKuySCOc8g==
"@firebase/util@1.1.0":
version "1.1.0"
resolved "https://registry.yarnpkg.com/@firebase/util/-/util-1.1.0.tgz#add2d57d0b2307a932520abdee303b66be0ac8b0"
integrity sha512-lfuSASuPKNdfebuFR8rjFamMQUPH9iiZHcKS755Rkm/5gRT0qC7BMhCh3ZkHf7NVbplzIc/GhmX2jM+igDRCag==
dependencies:
tslib "^1.11.1"
tslib "^2.1.0"
"@google-cloud/common@^3.3.0":
version "3.4.1"
@@ -106,14 +106,15 @@
retry-request "^4.1.1"
teeny-request "^7.0.0"
"@google-cloud/firestore@^4.0.0":
version "4.4.0"
resolved "https://registry.yarnpkg.com/@google-cloud/firestore/-/firestore-4.4.0.tgz#6cdbd462f32a8f94e138c57ef81195156c79e680"
integrity sha512-nixsumd4C7eL+hHEgyihspzhBBNe3agsvNFRX0xfqO3uR/6ro4CUj9XdcCvdnSSd3yTyqKfdBSRK2fEj1jIbYg==
"@google-cloud/firestore@^4.5.0":
version "4.12.3"
resolved "https://registry.yarnpkg.com/@google-cloud/firestore/-/firestore-4.12.3.tgz#eef62aceec5b1193385cfe3a2f39b628db353484"
integrity sha512-FTty3+paAj73KEfTJEpDxG9apLp9K3DySTeeewLLdljusRjZFgJ3jIiqi7tAKJjVsKOiXY4NRk4/0rpEQhHitQ==
dependencies:
fast-deep-equal "^3.1.1"
functional-red-black-tree "^1.0.1"
google-gax "^2.2.0"
google-gax "^2.12.0"
protobufjs "^6.8.6"
"@google-cloud/paginator@^2.0.0":
version "2.0.3"
@@ -188,7 +189,7 @@
arrify "^2.0.0"
compressible "^2.0.12"
concat-stream "^2.0.0"
date-and-time "^0.14.2"
date-and-time "^0.14.0"
duplexify "^3.5.0"
extend "^3.0.2"
gaxios "^3.0.0"
@@ -217,15 +218,12 @@
dependencies:
semver "^6.2.0"
"@grpc/grpc-js@~1.1.1":
version "1.1.7"
resolved "https://registry.yarnpkg.com/@grpc/grpc-js/-/grpc-js-1.1.7.tgz#d3d71c6da95397e2d63895ccc4a05e7572f7b7e6"
integrity sha512-EuxMstI0u778dp0nk6Fe3gHXYPeV6FYsWOe0/QFwxv1NQ6bc5Wl/0Yxa4xl9uBlKElL6AIxuASmSfu7KEJhqiw==
"@grpc/grpc-js@~1.3.0":
version "1.3.4"
resolved "https://registry.yarnpkg.com/@grpc/grpc-js/-/grpc-js-1.3.4.tgz#5c4f5df717cd10cc5ebbc7523504008d1ff7b322"
integrity sha512-AxtZcm0mArQhY9z8T3TynCYVEaSKxNCa9mVhVwBCUnsuUEe8Zn94bPYYKVQSLt+hJJ1y0ukr3mUvtWfcATL/IQ==
dependencies:
"@grpc/proto-loader" "^0.6.0-pre14"
"@types/node" "^12.12.47"
google-auth-library "^6.0.0"
semver "^6.2.0"
"@types/node" ">=12.12.47"
"@grpc/proto-loader@^0.5.1":
version "0.5.5"
@@ -235,22 +233,27 @@
lodash.camelcase "^4.3.0"
protobufjs "^6.8.6"
"@grpc/proto-loader@^0.6.0-pre14":
version "0.6.0-pre9"
resolved "https://registry.yarnpkg.com/@grpc/proto-loader/-/proto-loader-0.6.0-pre9.tgz#0c6fe42f6c5ef9ce1b3cef7be64d5b09d6fe4d6d"
integrity sha512-oM+LjpEjNzW5pNJjt4/hq1HYayNeQT+eGrOPABJnYHv7TyNPDNzkQ76rDYZF86X5swJOa4EujEMzQ9iiTdPgww==
"@grpc/proto-loader@^0.6.1":
version "0.6.3"
resolved "https://registry.yarnpkg.com/@grpc/proto-loader/-/proto-loader-0.6.3.tgz#9d3617e514295b2a6fe7e7ce28177af4783b6b12"
integrity sha512-AtMWwb7kY8DdtwIQh2hC4YFM1MzZ22lMA+gjbnCYDgICt14vX2tCa59bDrEjFyOI4LvORjpvT/UhHUdKvsX8og==
dependencies:
"@types/long" "^4.0.1"
lodash.camelcase "^4.3.0"
long "^4.0.0"
protobufjs "^6.9.0"
yargs "^15.3.1"
protobufjs "^6.10.0"
yargs "^16.1.1"
"@jsdevtools/ono@^7.1.3":
version "7.1.3"
resolved "https://registry.yarnpkg.com/@jsdevtools/ono/-/ono-7.1.3.tgz#9df03bbd7c696a5c58885c34aa06da41c8543796"
integrity sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg==
"@panva/asn1.js@^1.0.0":
version "1.0.0"
resolved "https://registry.yarnpkg.com/@panva/asn1.js/-/asn1.js-1.0.0.tgz#dd55ae7b8129e02049f009408b97c61ccf9032f6"
integrity sha512-UdkG3mLEqXgnlKsWanWcgb6dOjUzJ+XC5f+aWw30qrtjxeNUSfKX1cd5FBzOaXQumoe9nIqeZUvrRJS03HCCtw==
"@protobufjs/aspromise@^1.1.1", "@protobufjs/aspromise@^1.1.2":
version "1.1.2"
resolved "https://registry.yarnpkg.com/@protobufjs/aspromise/-/aspromise-1.1.2.tgz#9b8b0cc663d669a7d8f6f5d0893a14d348f30fbf"
@@ -329,11 +332,6 @@
"@types/connect" "*"
"@types/node" "*"
"@types/caseless@*":
version "0.12.2"
resolved "https://registry.yarnpkg.com/@types/caseless/-/caseless-0.12.2.tgz#f65d3d6389e01eeb458bd54dc8f52b95a9463bc8"
integrity sha512-6ckxMjBBD8URvjB6J3NcnuAn5Pkl7t3TizAg+xdlzzQGSPSmBcXf8KoIH0ua/i+tio+ZRUHEXp0HEmvaR4kt0w==
"@types/connect@*":
version "3.4.33"
resolved "https://registry.yarnpkg.com/@types/connect/-/connect-3.4.33.tgz#31610c901eca573b8713c3330abc6e6b9f588546"
@@ -348,6 +346,14 @@
dependencies:
"@types/node" "*"
"@types/express-jwt@0.0.42":
version "0.0.42"
resolved "https://registry.yarnpkg.com/@types/express-jwt/-/express-jwt-0.0.42.tgz#4f04e1fadf9d18725950dc041808a4a4adf7f5ae"
integrity sha512-WszgUddvM1t5dPpJ3LhWNH8kfNN8GPIBrAGxgIYXVCEGx6Bx4A036aAuf/r5WH9DIEdlmp7gHOYvSM6U87B0ag==
dependencies:
"@types/express" "*"
"@types/express-unless" "*"
"@types/express-serve-static-core@*":
version "4.17.13"
resolved "https://registry.yarnpkg.com/@types/express-serve-static-core/-/express-serve-static-core-4.17.13.tgz#d9af025e925fc8b089be37423b8d1eac781be084"
@@ -357,6 +363,32 @@
"@types/qs" "*"
"@types/range-parser" "*"
"@types/express-serve-static-core@^4.17.18":
version "4.17.21"
resolved "https://registry.yarnpkg.com/@types/express-serve-static-core/-/express-serve-static-core-4.17.21.tgz#a427278e106bca77b83ad85221eae709a3414d42"
integrity sha512-gwCiEZqW6f7EoR8TTEfalyEhb1zA5jQJnRngr97+3pzMaO1RKoI1w2bw07TK72renMUVWcWS5mLI6rk1NqN0nA==
dependencies:
"@types/node" "*"
"@types/qs" "*"
"@types/range-parser" "*"
"@types/express-unless@*":
version "0.5.1"
resolved "https://registry.yarnpkg.com/@types/express-unless/-/express-unless-0.5.1.tgz#4f440b905e42bbf53382b8207bc337dc5ff9fd1f"
integrity sha512-5fuvg7C69lemNgl0+v+CUxDYWVPSfXHhJPst4yTLcqi4zKJpORCxnDrnnilk3k0DTq/WrAUdvXFs01+vUqUZHw==
dependencies:
"@types/express" "*"
"@types/express@*":
version "4.17.12"
resolved "https://registry.yarnpkg.com/@types/express/-/express-4.17.12.tgz#4bc1bf3cd0cfe6d3f6f2853648b40db7d54de350"
integrity sha512-pTYas6FrP15B1Oa0bkN5tQMNqOcVXa9j4FTFtO8DWI9kppKib+6NJtfTOOLcwxuuYvcX2+dVG6et1SxW/Kc17Q==
dependencies:
"@types/body-parser" "*"
"@types/express-serve-static-core" "^4.17.18"
"@types/qs" "*"
"@types/serve-static" "*"
"@types/express@4.17.3":
version "4.17.3"
resolved "https://registry.yarnpkg.com/@types/express/-/express-4.17.3.tgz#38e4458ce2067873b09a73908df488870c303bd9"
@@ -393,15 +425,10 @@
resolved "https://registry.yarnpkg.com/@types/node/-/node-14.11.10.tgz#8c102aba13bf5253f35146affbf8b26275069bef"
integrity sha512-yV1nWZPlMFpoXyoknm4S56y2nlTAuFYaJuQtYRAOU7xA/FJ9RY0Xm7QOkaYMMmr8ESdHIuUb6oQgR/0+2NqlyA==
"@types/node@^10.10.0":
version "10.17.40"
resolved "https://registry.yarnpkg.com/@types/node/-/node-10.17.40.tgz#8a50e47daff15fd4a89dc56f5221b3729e506be6"
integrity sha512-3hZT2z2/531A5pc8hYhn1gU5Qb1SIRSgMLQ6zuHA5xtt16lWAxUGprtr8lJuc9zNJMXEIIBWfSnzqBP/4mglpA==
"@types/node@^12.12.47":
version "12.12.68"
resolved "https://registry.yarnpkg.com/@types/node/-/node-12.12.68.tgz#dd5acf4a52a458ff1d9ef4fd66406fba0afbbb33"
integrity sha512-3RW2s24ewB7F9dAHvgb9FRvNHn6nO9IK6Eaknbz7HTOe2a5GVne5XbUh5+YA+kcCn67glyHhClUUdFP73LWrgQ==
"@types/node@>=12.12.47", "@types/node@>=13.7.0":
version "15.12.5"
resolved "https://registry.yarnpkg.com/@types/node/-/node-15.12.5.tgz#9a78318a45d75c9523d2396131bd3cca54b2d185"
integrity sha512-se3yX7UHv5Bscf8f1ERKvQOD6sTyycH3hdaoozvaLxgUiY5lIGEeH37AD0G0Qi9kPqihPn0HOfd2yaIEN9VwEg==
"@types/node@^13.7.0":
version "13.13.26"
@@ -423,16 +450,6 @@
resolved "https://registry.yarnpkg.com/@types/range-parser/-/range-parser-1.2.3.tgz#7ee330ba7caafb98090bece86a5ee44115904c2c"
integrity sha512-ewFXqrQHlFsgc09MK5jP5iR7vumV/BYayNC6PgJO2LPe8vrnNFyjQjSppfEngITi0qvfKtzFvgKymGheFM9UOA==
"@types/request@^2.48.5":
version "2.48.5"
resolved "https://registry.yarnpkg.com/@types/request/-/request-2.48.5.tgz#019b8536b402069f6d11bee1b2c03e7f232937a0"
integrity sha512-/LO7xRVnL3DxJ1WkPGDQrp4VTV1reX9RkC85mJ+Qzykj2Bdw+mG15aAfDahc76HtknjzE16SX/Yddn6MxVbmGQ==
dependencies:
"@types/caseless" "*"
"@types/node" "*"
"@types/tough-cookie" "*"
form-data "^2.5.0"
"@types/serve-static@*":
version "1.13.5"
resolved "https://registry.yarnpkg.com/@types/serve-static/-/serve-static-1.13.5.tgz#3d25d941a18415d3ab092def846e135a08bbcf53"
@@ -441,11 +458,6 @@
"@types/express-serve-static-core" "*"
"@types/mime" "*"
"@types/tough-cookie@*":
version "4.0.0"
resolved "https://registry.yarnpkg.com/@types/tough-cookie/-/tough-cookie-4.0.0.tgz#fef1904e4668b6e5ecee60c52cc6a078ffa6697d"
integrity sha512-I99sngh224D0M7XgW1s120zxCt3VYQ3IQsuw3P3jbq5GG4yc79+ZjyKznyOGIQrflfylLgcfekeZW/vk0yng6A==
JSONStream@^1.2.1:
version "1.3.5"
resolved "https://registry.yarnpkg.com/JSONStream/-/JSONStream-1.3.5.tgz#3208c1f08d3a4d99261ab64f92302bc15e111ca0"
@@ -884,7 +896,7 @@ callsites@^3.0.0:
resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73"
integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==
camelcase@^5.0.0, camelcase@^5.3.1:
camelcase@^5.3.1:
version "5.3.1"
resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320"
integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==
@@ -1033,14 +1045,14 @@ cli-width@^2.0.0:
resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-2.2.1.tgz#b0433d0b4e9c847ef18868a4ef16fd5fc8271c48"
integrity sha512-GRMWDxpOB6Dgk2E5Uo+3eEBvtOOlimMmpbFiKuLFnQzYDavtLFY3K5ona41jgN/WdRZtG7utuVSVTL4HbZHGkw==
cliui@^6.0.0:
version "6.0.0"
resolved "https://registry.yarnpkg.com/cliui/-/cliui-6.0.0.tgz#511d702c0c4e41ca156d7d0e96021f23e13225b1"
integrity sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==
cliui@^7.0.2:
version "7.0.4"
resolved "https://registry.yarnpkg.com/cliui/-/cliui-7.0.4.tgz#a0265ee655476fc807aea9df3df8df7783808b4f"
integrity sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==
dependencies:
string-width "^4.2.0"
strip-ansi "^6.0.0"
wrap-ansi "^6.2.0"
wrap-ansi "^7.0.0"
clone-response@^1.0.2:
version "1.0.2"
@@ -1340,7 +1352,7 @@ dashdash@^1.12.0:
dependencies:
assert-plus "^1.0.0"
date-and-time@^0.14.2:
date-and-time@^0.14.0:
version "0.14.2"
resolved "https://registry.yarnpkg.com/date-and-time/-/date-and-time-0.14.2.tgz#a4266c3dead460f6c231fe9674e585908dac354e"
integrity sha512-EFTCh9zRSEpGPmJaexg7HTuzZHh6cnJj1ui7IGCFNXzd2QdpsNh05Db5TF3xzJm30YN+A8/6xHSuRcQqoc3kFA==
@@ -1373,10 +1385,12 @@ debug@^3.1.1:
dependencies:
ms "^2.1.1"
decamelize@^1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290"
integrity sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=
debug@^4.1.0:
version "4.3.1"
resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.1.tgz#f0d229c505e0c6d8c49ac553d1b13dc183f6b2ee"
integrity sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==
dependencies:
ms "2.1.2"
decompress-response@^3.3.0:
version "3.3.0"
@@ -1593,6 +1607,11 @@ es6-weak-map@^2.0.2:
es6-iterator "^2.0.3"
es6-symbol "^3.1.1"
escalade@^3.1.1:
version "3.1.1"
resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40"
integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==
escape-goat@^2.0.0:
version "2.1.1"
resolved "https://registry.yarnpkg.com/escape-goat/-/escape-goat-2.1.1.tgz#1b2dc77003676c457ec760b2dc68edb648188675"
@@ -1767,7 +1786,7 @@ fast-safe-stringify@^2.0.4:
resolved "https://registry.yarnpkg.com/fast-safe-stringify/-/fast-safe-stringify-2.0.7.tgz#124aa885899261f68aedb42a7c080de9da608743"
integrity sha512-Utm6CdzT+6xsDk2m8S6uL8VHxNwI6Jub+e9NYTcAms28T84pTa25GJQV9j0CY0N1rM8hK4x6grpF2BQf+2qwVA==
fast-text-encoding@^1.0.0:
fast-text-encoding@^1.0.0, fast-text-encoding@^1.0.3:
version "1.0.3"
resolved "https://registry.yarnpkg.com/fast-text-encoding/-/fast-text-encoding-1.0.3.tgz#ec02ac8e01ab8a319af182dae2681213cfe9ce53"
integrity sha512-dtm4QZH9nZtcDt8qJiOH9fcQd1NAgi+K1O2DbE6GG1PPCK/BWfOH3idCTRQ4ImXRUOyopDEgDEnVEE7Y/2Wrig==
@@ -1838,25 +1857,26 @@ find-versions@^3.2.0:
dependencies:
semver-regex "^2.0.0"
firebase-admin@^9.2.0:
version "9.2.0"
resolved "https://registry.yarnpkg.com/firebase-admin/-/firebase-admin-9.2.0.tgz#df5176e2d0c5711df6dbf7012320492a703538ea"
integrity sha512-LhnMYl71B4gP1FlTLfwaYlOWhBCAcNF+byb2CPTfaW/T4hkp4qlXOgo2bws/zbAv5X9GTFqGir3KexMslVGsIA==
firebase-admin@^9.10.0:
version "9.10.0"
resolved "https://registry.yarnpkg.com/firebase-admin/-/firebase-admin-9.10.0.tgz#7705003c5b01c01503b6a6cb0af9ef5a533143ed"
integrity sha512-4mB15zkzSpnLxpBrWJr7ad68ydYB/MMkS53N2XxfFwgz9QuFVCyHhznAno6FP7v+BtZkEJPdVd36nbH1yKS1UQ==
dependencies:
"@firebase/database" "^0.6.10"
"@firebase/database-types" "^0.5.2"
"@types/node" "^10.10.0"
"@firebase/database" "^0.10.0"
"@firebase/database-types" "^0.7.2"
"@types/node" ">=12.12.47"
dicer "^0.3.0"
jsonwebtoken "^8.5.1"
jwks-rsa "^2.0.2"
node-forge "^0.10.0"
optionalDependencies:
"@google-cloud/firestore" "^4.0.0"
"@google-cloud/firestore" "^4.5.0"
"@google-cloud/storage" "^5.3.0"
firebase-functions@^3.11.0:
version "3.11.0"
resolved "https://registry.yarnpkg.com/firebase-functions/-/firebase-functions-3.11.0.tgz#92f5a6af6a10641da6dc9b41b29974658b621a7b"
integrity sha512-i1uMhZ/M6i5SCI3ulKo7EWX0/LD+I5o6N+sk0HbOWfzyWfOl0iJTvQkR3BVDcjrlhPVC4xG1bDTLxd+DTkLqaw==
firebase-functions@^3.14.1:
version "3.14.1"
resolved "https://registry.yarnpkg.com/firebase-functions/-/firebase-functions-3.14.1.tgz#3ac5bc70989365874f41d06bca3b42a233dd6039"
integrity sha512-hL/qm+i5i1qKYmAFMlQ4mwRngDkP+3YT3F4E4Nd5Hj2QKeawBdZiMGgEt6zqTx08Zq04vHiSnSM0z75UJRSg6Q==
dependencies:
"@types/express" "4.17.3"
cors "^2.8.5"
@@ -1941,15 +1961,6 @@ forever-agent@~0.6.1:
resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91"
integrity sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=
form-data@^2.5.0:
version "2.5.1"
resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.5.1.tgz#f2cbec57b5e59e23716e128fe44d4e5dd23895f4"
integrity sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==
dependencies:
asynckit "^0.4.0"
combined-stream "^1.0.6"
mime-types "^2.1.12"
form-data@~2.3.2:
version "2.3.3"
resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6"
@@ -2075,6 +2086,17 @@ gaxios@^3.0.0:
is-stream "^2.0.0"
node-fetch "^2.3.0"
gaxios@^4.0.0:
version "4.3.0"
resolved "https://registry.yarnpkg.com/gaxios/-/gaxios-4.3.0.tgz#ad4814d89061f85b97ef52aed888c5dbec32f774"
integrity sha512-pHplNbslpwCLMyII/lHPWFQbJWOX0B3R1hwBEOvzYi1GmdKZruuEHK4N9V6f7tf1EaPYyF80mui1+344p6SmLg==
dependencies:
abort-controller "^3.0.0"
extend "^3.0.2"
https-proxy-agent "^5.0.0"
is-stream "^2.0.0"
node-fetch "^2.3.0"
gcp-metadata@^3.4.0:
version "3.5.0"
resolved "https://registry.yarnpkg.com/gcp-metadata/-/gcp-metadata-3.5.0.tgz#6d28343f65a6bbf8449886a0c0e4a71c77577055"
@@ -2091,6 +2113,14 @@ gcp-metadata@^4.1.0:
gaxios "^3.0.0"
json-bigint "^1.0.0"
gcp-metadata@^4.2.0:
version "4.3.0"
resolved "https://registry.yarnpkg.com/gcp-metadata/-/gcp-metadata-4.3.0.tgz#0423d06becdbfb9cbb8762eaacf14d5324997900"
integrity sha512-L9XQUpvKJCM76YRSmcxrR4mFPzPGsgZUH+GgHMxAET8qc6+BhRJq63RLhWakgEO2KKVgeSDVfyiNjkGSADwNTA==
dependencies:
gaxios "^4.0.0"
json-bigint "^1.0.0"
gcs-resumable-upload@^3.1.0:
version "3.1.1"
resolved "https://registry.yarnpkg.com/gcs-resumable-upload/-/gcs-resumable-upload-3.1.1.tgz#67c766a0555d6a352f9651b7603337207167d0de"
@@ -2104,7 +2134,7 @@ gcs-resumable-upload@^3.1.0:
pumpify "^2.0.0"
stream-events "^1.0.4"
get-caller-file@^2.0.1:
get-caller-file@^2.0.5:
version "2.0.5"
resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e"
integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==
@@ -2200,6 +2230,21 @@ google-auth-library@^6.0.0, google-auth-library@^6.1.1:
jws "^4.0.0"
lru-cache "^6.0.0"
google-auth-library@^7.0.2:
version "7.1.2"
resolved "https://registry.yarnpkg.com/google-auth-library/-/google-auth-library-7.1.2.tgz#29fc0fe8b6d5a59b93b7cb561b1a28bcc93360b7"
integrity sha512-FMipHgfe2u1LzWsf2n9zEB9KsJ8M3n8OYTHbHtlkzPCyo7IknXQR5X99nfvwUHGuX+iEpihUZxDuPm7+qBYeXg==
dependencies:
arrify "^2.0.0"
base64-js "^1.3.0"
ecdsa-sig-formatter "^1.0.11"
fast-text-encoding "^1.0.0"
gaxios "^4.0.0"
gcp-metadata "^4.2.0"
gtoken "^5.0.4"
jws "^4.0.0"
lru-cache "^6.0.0"
google-gax@^1.14.2:
version "1.15.3"
resolved "https://registry.yarnpkg.com/google-gax/-/google-gax-1.15.3.tgz#e88cdcbbd19c7d88cc5fd7d7b932c4d1979a5aca"
@@ -2221,20 +2266,22 @@ google-gax@^1.14.2:
semver "^6.0.0"
walkdir "^0.4.0"
google-gax@^2.2.0:
version "2.9.0"
resolved "https://registry.yarnpkg.com/google-gax/-/google-gax-2.9.0.tgz#84edef8715d82c0f91a6e5485b8f2803d2690f00"
integrity sha512-MFMwA7Fb8PEwjnYwfGXjZMidCNyMl3gSnvS/+kS8TQioJZQDpzK+W3dmwyNyig/U13+kbABqDnbkkAXJ5NiUkw==
google-gax@^2.12.0:
version "2.17.0"
resolved "https://registry.yarnpkg.com/google-gax/-/google-gax-2.17.0.tgz#363791efaca3bd242e34e92295937e9215666d17"
integrity sha512-Ze/Oq0atVNKyKvDzQFU8B82V9w36GELQruXGsiY1jnySbieZ9vS75v98V/Z10PktmSVqis4sQ+FwK2gkgwIiiw==
dependencies:
"@grpc/grpc-js" "~1.1.1"
"@grpc/proto-loader" "^0.5.1"
"@grpc/grpc-js" "~1.3.0"
"@grpc/proto-loader" "^0.6.1"
"@types/long" "^4.0.0"
abort-controller "^3.0.0"
duplexify "^4.0.0"
google-auth-library "^6.0.0"
fast-text-encoding "^1.0.3"
google-auth-library "^7.0.2"
is-stream-ended "^0.1.4"
node-fetch "^2.6.1"
protobufjs "^6.9.0"
object-hash "^2.1.1"
protobufjs "^6.10.2"
retry-request "^4.0.0"
google-gax@~1.12.0:
@@ -2262,7 +2309,7 @@ google-p12-pem@^2.0.0:
resolved "https://registry.yarnpkg.com/google-p12-pem/-/google-p12-pem-2.0.4.tgz#036462394e266472632a78b685f0cc3df4ef337b"
integrity sha512-S4blHBQWZRnEW44OcR7TL9WR+QCqByRvhNDZ/uuQfpxywfupikf/miba8js1jZi6ZOGv5slgSuoshCWh6EMDzg==
dependencies:
node-forge "^0.10.0"
node-forge "^0.9.0"
google-p12-pem@^3.0.3:
version "3.0.3"
@@ -2712,6 +2759,13 @@ join-path@^1.1.1:
url-join "0.0.1"
valid-url "^1"
jose@^2.0.5:
version "2.0.5"
resolved "https://registry.yarnpkg.com/jose/-/jose-2.0.5.tgz#29746a18d9fff7dcf9d5d2a6f62cb0c7cd27abd3"
integrity sha512-BAiDNeDKTMgk4tvD0BbxJ8xHEHBZgpeRZ1zGPPsitSyMgjoMWiLGYAE7H7NpP5h0lPppQajQs871E8NHUrzVPA==
dependencies:
"@panva/asn1.js" "^1.0.0"
js-tokens@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499"
@@ -2844,6 +2898,17 @@ jwa@^2.0.0:
ecdsa-sig-formatter "1.0.11"
safe-buffer "^5.0.1"
jwks-rsa@^2.0.2:
version "2.0.3"
resolved "https://registry.yarnpkg.com/jwks-rsa/-/jwks-rsa-2.0.3.tgz#4059f25e27f1d9cb5681dd12a98e46f8aa39fcbd"
integrity sha512-/rkjXRWAp0cS00tunsHResw68P5iTQru8+jHufLNv3JHc4nObFEndfEUSuPugh09N+V9XYxKUqi7QrkmCHSSSg==
dependencies:
"@types/express-jwt" "0.0.42"
debug "^4.1.0"
jose "^2.0.5"
limiter "^1.1.5"
lru-memoizer "^2.1.2"
jws@^3.2.2:
version "3.2.2"
resolved "https://registry.yarnpkg.com/jws/-/jws-3.2.2.tgz#001099f3639468c9414000e99995fa52fb478304"
@@ -2898,6 +2963,11 @@ leven@^3.1.0:
resolved "https://registry.yarnpkg.com/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2"
integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==
limiter@^1.1.5:
version "1.1.5"
resolved "https://registry.yarnpkg.com/limiter/-/limiter-1.1.5.tgz#8f92a25b3b16c6131293a0cc834b4a838a2aa7c2"
integrity sha512-FWWMIEOxz3GwUI4Ts/IvgVy6LPvoMPgjMdQ185nN6psJyBJ4yOpzqm695/h5umdLJg2vW3GR5iG11MAkR2AzJA==
lines-and-columns@^1.1.6:
version "1.1.6"
resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.1.6.tgz#1c00c743b433cd0a4e80758f7b64a57440d9ff00"
@@ -2942,6 +3012,11 @@ lodash.camelcase@^4.3.0:
resolved "https://registry.yarnpkg.com/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz#b28aa6288a2b9fc651035c7711f65ab6190331a6"
integrity sha1-soqmKIorn8ZRA1x3EfZathkDMaY=
lodash.clonedeep@^4.5.0:
version "4.5.0"
resolved "https://registry.yarnpkg.com/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz#e23f3f9c4f8fbdde872529c1071857a086e5ccef"
integrity sha1-4j8/nE+Pvd6HJSnBBxhXoIblzO8=
lodash.defaults@^4.2.0:
version "4.2.0"
resolved "https://registry.yarnpkg.com/lodash.defaults/-/lodash.defaults-4.2.0.tgz#d09178716ffea4dde9e5fb7b37f6f0802274580c"
@@ -3102,6 +3177,22 @@ lru-cache@^6.0.0:
dependencies:
yallist "^4.0.0"
lru-cache@~4.0.0:
version "4.0.2"
resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.0.2.tgz#1d17679c069cda5d040991a09dbc2c0db377e55e"
integrity sha1-HRdnnAac2l0ECZGgnbwsDbN35V4=
dependencies:
pseudomap "^1.0.1"
yallist "^2.0.0"
lru-memoizer@^2.1.2:
version "2.1.4"
resolved "https://registry.yarnpkg.com/lru-memoizer/-/lru-memoizer-2.1.4.tgz#b864d92b557f00b1eeb322156a0409cb06dafac6"
integrity sha512-IXAq50s4qwrOBrXJklY+KhgZF+5y98PDaNo0gi/v2KQBFLyWr+JyFvijZXkGKjQj/h9c0OwoE+JZbwUXce76hQ==
dependencies:
lodash.clonedeep "^4.5.0"
lru-cache "~4.0.0"
lru-queue@0.1:
version "0.1.0"
resolved "https://registry.yarnpkg.com/lru-queue/-/lru-queue-0.1.0.tgz#2738bd9f0d3cf4f84490c5736c48699ac632cda3"
@@ -3367,6 +3458,11 @@ node-forge@^0.10.0:
resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.10.0.tgz#32dea2afb3e9926f02ee5ce8794902691a676bf3"
integrity sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==
node-forge@^0.9.0:
version "0.9.2"
resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.9.2.tgz#b35a44c28889b2ea55cabf8c79e3563f9676190a"
integrity sha512-naKSScof4Wn+aoHU6HBsifh92Zeicm1GDQKd1vp3Y/kOi8ub0DozCa9KpvYNCXslFHYRmLNiqRopGdTGwNLpNw==
node-gyp@^7.1.2:
version "7.1.2"
resolved "https://registry.yarnpkg.com/node-gyp/-/node-gyp-7.1.2.tgz#21a810aebb187120251c3bcec979af1587b188ae"
@@ -3432,6 +3528,11 @@ object-assign@^4, object-assign@^4.1.0:
resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863"
integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=
object-hash@^2.1.1:
version "2.2.0"
resolved "https://registry.yarnpkg.com/object-hash/-/object-hash-2.2.0.tgz#5ad518581eefc443bd763472b8ff2e9c2c0d54a5"
integrity sha512-gScRMn0bS5fH+IuwyIFgnh9zBdo4DV+6GhygmWM9HyNJSgS0hScp1f5vjtm7oIIOiT9trXrShAkLFSc2IqKNgw==
on-finished@^2.2.0, on-finished@~2.3.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947"
@@ -3700,7 +3801,26 @@ promise-breaker@^5.0.0:
resolved "https://registry.yarnpkg.com/promise-breaker/-/promise-breaker-5.0.0.tgz#58e8541f1619554057da95a211794d7834d30c1d"
integrity sha512-mgsWQuG4kJ1dtO6e/QlNDLFtMkMzzecsC69aI5hlLEjGHFNpHrvGhFi4LiK5jg2SMQj74/diH+wZliL9LpGsyA==
protobufjs@^6.8.1, protobufjs@^6.8.6, protobufjs@^6.8.8, protobufjs@^6.8.9, protobufjs@^6.9.0:
protobufjs@^6.10.0, protobufjs@^6.10.2:
version "6.11.2"
resolved "https://registry.yarnpkg.com/protobufjs/-/protobufjs-6.11.2.tgz#de39fabd4ed32beaa08e9bb1e30d08544c1edf8b"
integrity sha512-4BQJoPooKJl2G9j3XftkIXjoC9C0Av2NOrWmbLWT1vH32GcSUHjM0Arra6UfTsVyfMAuFzaLucXn1sadxJydAw==
dependencies:
"@protobufjs/aspromise" "^1.1.2"
"@protobufjs/base64" "^1.1.2"
"@protobufjs/codegen" "^2.0.4"
"@protobufjs/eventemitter" "^1.1.0"
"@protobufjs/fetch" "^1.1.0"
"@protobufjs/float" "^1.0.2"
"@protobufjs/inquire" "^1.1.0"
"@protobufjs/path" "^1.1.2"
"@protobufjs/pool" "^1.1.0"
"@protobufjs/utf8" "^1.1.0"
"@types/long" "^4.0.1"
"@types/node" ">=13.7.0"
long "^4.0.0"
protobufjs@^6.8.1, protobufjs@^6.8.6, protobufjs@^6.8.8, protobufjs@^6.8.9:
version "6.10.1"
resolved "https://registry.yarnpkg.com/protobufjs/-/protobufjs-6.10.1.tgz#e6a484dd8f04b29629e9053344e3970cccf13cd2"
integrity sha512-pb8kTchL+1Ceg4lFd5XUpK8PdWacbvV5SK2ULH2ebrYtl4GjJmS24m6CKME67jzV53tbJxHlnNOSqQHbTsR9JQ==
@@ -3727,6 +3847,11 @@ proxy-addr@~2.0.5:
forwarded "~0.1.2"
ipaddr.js "1.9.1"
pseudomap@^1.0.1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3"
integrity sha1-8FKijacOYYkX7wqKw0wa5aaChrM=
psl@^1.1.28:
version "1.8.0"
resolved "https://registry.yarnpkg.com/psl/-/psl-1.8.0.tgz#9326f8bcfb013adcc005fdff056acce020e51c24"
@@ -3923,11 +4048,6 @@ require-directory@^2.1.1:
resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42"
integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I=
require-main-filename@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b"
integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==
resolve-from@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6"
@@ -4083,7 +4203,7 @@ serve-static@1.14.1:
parseurl "~1.3.3"
send "0.17.1"
set-blocking@^2.0.0, set-blocking@~2.0.0:
set-blocking@~2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7"
integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc=
@@ -4529,7 +4649,7 @@ ts-node@^8.6.2:
source-map-support "^0.5.17"
yn "3.1.1"
tslib@^1.11.1, tslib@^1.13.0, tslib@^1.8.1, tslib@^1.9.0:
tslib@^1.13.0, tslib@^1.8.1, tslib@^1.9.0:
version "1.14.1"
resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00"
integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==
@@ -4539,6 +4659,11 @@ tslib@^2.0.0:
resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.0.3.tgz#8e0741ac45fc0c226e58a17bfc3e64b9bc6ca61c"
integrity sha512-uZtkfKblCEQtZKBF6EBXVZeQNl82yqtDQdv+eck8u7tdPxjLu2/lp5/uPW+um2tpuxINHWy3GhiccY7QgEaVHQ==
tslib@^2.1.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.3.0.tgz#803b8cdab3e12ba581a4ca41c8839bbb0dacb09e"
integrity sha512-N82ooyxVNm6h1riLCoyS9e3fuJ3AMG2zIZs2Gd1ATcSFjSA23Q0fzjjZeh0jbJvWVDZ0cJT8yaNNaaXHzueNjg==
tslint@^6.1.0:
version "6.1.3"
resolved "https://registry.yarnpkg.com/tslint/-/tslint-6.1.3.tgz#5c23b2eccc32487d5523bd3a470e9aa31789d904"
@@ -4770,11 +4895,6 @@ websocket-extensions@>=0.1.1:
resolved "https://registry.yarnpkg.com/websocket-extensions/-/websocket-extensions-0.1.4.tgz#7f8473bc839dfd87608adb95d7eb075211578a42"
integrity sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg==
which-module@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a"
integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=
which-pm-runs@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/which-pm-runs/-/which-pm-runs-1.0.0.tgz#670b3afbc552e0b55df6b7780ca74615f23ad1cb"
@@ -4831,10 +4951,10 @@ winston@^3.0.0:
triple-beam "^1.3.0"
winston-transport "^4.4.0"
wrap-ansi@^6.2.0:
version "6.2.0"
resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-6.2.0.tgz#e9393ba07102e6c91a3b221478f0257cd2856e53"
integrity sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==
wrap-ansi@^7.0.0:
version "7.0.0"
resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43"
integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==
dependencies:
ansi-styles "^4.0.0"
string-width "^4.1.0"
@@ -4880,10 +5000,15 @@ xtend@~4.0.0:
resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54"
integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==
y18n@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.0.tgz#95ef94f85ecc81d007c264e190a120f0a3c8566b"
integrity sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==
y18n@^5.0.5:
version "5.0.8"
resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55"
integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==
yallist@^2.0.0:
version "2.1.2"
resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52"
integrity sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI=
yallist@^3.0.0, yallist@^3.0.2, yallist@^3.0.3:
version "3.1.1"
@@ -4900,30 +5025,23 @@ yaml@^1.10.0:
resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.0.tgz#3b593add944876077d4d683fee01081bd9fff31e"
integrity sha512-yr2icI4glYaNG+KWONODapy2/jDdMSDnrONSjblABjD9B4Z5LgiircSt8m8sRZFNi08kG9Sm0uSHtEmP3zaEGg==
yargs-parser@^18.1.2:
version "18.1.3"
resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-18.1.3.tgz#be68c4975c6b2abf469236b0c870362fab09a7b0"
integrity sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==
dependencies:
camelcase "^5.0.0"
decamelize "^1.2.0"
yargs-parser@^20.2.2:
version "20.2.9"
resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.9.tgz#2eb7dc3b0289718fc295f362753845c41a0c94ee"
integrity sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==
yargs@^15.3.1:
version "15.4.1"
resolved "https://registry.yarnpkg.com/yargs/-/yargs-15.4.1.tgz#0d87a16de01aee9d8bec2bfbf74f67851730f4f8"
integrity sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==
yargs@^16.1.1:
version "16.2.0"
resolved "https://registry.yarnpkg.com/yargs/-/yargs-16.2.0.tgz#1c82bf0f6b6a66eafce7ef30e376f49a12477f66"
integrity sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==
dependencies:
cliui "^6.0.0"
decamelize "^1.2.0"
find-up "^4.1.0"
get-caller-file "^2.0.1"
cliui "^7.0.2"
escalade "^3.1.1"
get-caller-file "^2.0.5"
require-directory "^2.1.1"
require-main-filename "^2.0.0"
set-blocking "^2.0.0"
string-width "^4.2.0"
which-module "^2.0.0"
y18n "^4.0.0"
yargs-parser "^18.1.2"
y18n "^5.0.5"
yargs-parser "^20.2.2"
yn@3.1.1:
version "3.1.1"

2
ft_build/.dockerignore Normal file
View File

@@ -0,0 +1,2 @@
antler*.json
.gitignore

1
ft_build/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
build/

20
ft_build/Dockerfile Normal file
View File

@@ -0,0 +1,20 @@
# Use the official lightweight Node.js image.
# https://hub.docker.com/_/node
FROM node:14-slim
# Create and change to the app directory.
WORKDIR /workdir
# Copy local code to the container image.
# If you've done yarn install locally, node_modules will be copied to
# docker work directory to save time to perform the same actions again.
COPY . ./
# Install production missing dependencies from above copy command.
# If you add a package-lock.json, speed your build by switching to 'npm ci'.
# RUN npm ci --only=production
RUN yarn
# Run the web service on container startup.
CMD [ "yarn", "start" ]

View File

@@ -0,0 +1,80 @@
import { addPackages, addSparkLib, asyncExecute } from "./terminal";
const fs = require("fs");
import { generateConfigFromTableSchema } from "./loader";
import { commandErrorHandler } from "../utils";
const path = require("path");
import admin from "firebase-admin";
export default async function generateConfig(
schemaPath: string,
user: admin.auth.UserRecord,
streamLogger
) {
return await generateConfigFromTableSchema(
schemaPath,
user,
streamLogger
).then(async (success) => {
if (!success) {
await streamLogger.info(
`generateConfigFromTableSchema failed to complete`
);
return false;
}
await streamLogger.info(`generateConfigFromTableSchema done`);
const configFile = fs.readFileSync(
path.resolve(__dirname, "../functions/src/functionConfig.ts"),
"utf-8"
);
await streamLogger.info(`configFile: ${JSON.stringify(configFile)}`);
const requiredDependencies = configFile.match(
/(?<=(require\(("|'))).*?(?=("|')\))/g
);
if (requiredDependencies) {
const packgesAdded = await addPackages(
requiredDependencies.map((p: any) => ({ name: p })),
user,
streamLogger
);
if (!packgesAdded) {
return false;
}
}
await streamLogger.info(
`requiredDependencies: ${JSON.stringify(requiredDependencies)}`
);
const isFunctionConfigValid = await asyncExecute(
"cd build/functions/src; tsc functionConfig.ts",
commandErrorHandler(
{
user,
functionConfigTs: configFile,
description: `Invalid compiled functionConfig.ts`,
},
streamLogger
)
);
await streamLogger.info(
`isFunctionConfigValid: ${JSON.stringify(isFunctionConfigValid)}`
);
if (!isFunctionConfigValid) {
return false;
}
const { sparksConfig } = require("../functions/src/functionConfig.js");
const requiredSparks = sparksConfig.map((s: any) => s.type);
await streamLogger.info(
`requiredSparks: ${JSON.stringify(requiredSparks)}`
);
for (const lib of requiredSparks) {
const success = await addSparkLib(lib, user, streamLogger);
if (!success) {
return false;
}
}
return true;
});
}

207
ft_build/compiler/loader.ts Normal file
View File

@@ -0,0 +1,207 @@
import { db } from "../firebaseConfig";
const fs = require("fs");
const beautify = require("js-beautify").js;
import admin from "firebase-admin";
import { parseSparksConfig } from "../utils";
export const generateConfigFromTableSchema = async (
schemaDocPath: string,
user: admin.auth.UserRecord,
streamLogger
) => {
await streamLogger.info("getting schema...");
const schemaDoc = await db.doc(schemaDocPath).get();
const schemaData = schemaDoc.data();
try {
if (!schemaData) throw new Error("no schema found");
// Temporarily disabled because this is super long
// await streamLogger.info(`schemaData: ${JSON.stringify(schemaData)}`);
const derivativeColumns = Object.values(schemaData.columns).filter(
(col: any) => col.type === "DERIVATIVE"
);
await streamLogger.info(
`derivativeColumns: ${JSON.stringify(derivativeColumns)}`
);
const derivativesConfig = `[${derivativeColumns.reduce(
(acc, currColumn: any) => {
if (
!currColumn.config.listenerFields ||
currColumn.config.listenerFields.length === 0
)
throw new Error(
`${currColumn.key} derivative is missing listener fields`
);
if (currColumn.config.listenerFields.includes(currColumn.key))
throw new Error(
`${currColumn.key} derivative has its own key as a listener field`
);
return `${acc}{\nfieldName:'${
currColumn.key
}',evaluate:async ({row,ref,db,auth,storage,utilFns}) =>{${
currColumn.config.script
}},\nlistenerFields:[${currColumn.config.listenerFields
.map((fieldKey: string) => `"${fieldKey}"`)
.join(",\n")}]},\n`;
},
""
)}]`;
await streamLogger.info(
`derivativesConfig: ${JSON.stringify(derivativesConfig)}`
);
const initializableColumns = Object.values(
schemaData.columns
).filter((col: any) => Boolean(col.config?.defaultValue));
await streamLogger.info(
`initializableColumns: ${JSON.stringify(initializableColumns)}`
);
const initializeConfig = `[${initializableColumns.reduce(
(acc, currColumn: any) => {
if (currColumn.config.defaultValue.type === "static") {
return `${acc}{\nfieldName:'${currColumn.key}',
type:"${currColumn.config.defaultValue.type}",
value:${
typeof currColumn.config.defaultValue.value === "string"
? `"${currColumn.config.defaultValue.value}"`
: JSON.stringify(currColumn.config.defaultValue.value)
},
},\n`;
} else if (currColumn.config.defaultValue.type === "dynamic") {
return `${acc}{\nfieldName:'${currColumn.key}',
type:"${currColumn.config.defaultValue.type}",
script:async ({row,ref,db,auth,utilFns}) =>{${currColumn.config.defaultValue.script}},
},\n`;
} else {
return `${acc}{\nfieldName:'${currColumn.key}',
type:"${currColumn.config.defaultValue.type}"
},\n`;
}
},
""
)}]`;
await streamLogger.info(
`initializeConfig: ${JSON.stringify(initializeConfig)}`
);
const documentSelectColumns = Object.values(schemaData.columns).filter(
(col: any) => col.type === "DOCUMENT_SELECT" && col.config?.trackedFields
);
const documentSelectConfig = `[${documentSelectColumns.reduce(
(acc, currColumn: any) => {
return `${acc}{\nfieldName:'${
currColumn.key
}',\ntrackedFields:[${currColumn.config.trackedFields
.map((fieldKey: string) => `"${fieldKey}"`)
.join(",\n")}]},\n`;
},
""
)}]`;
await streamLogger.info(
`documentSelectColumns: ${JSON.stringify(documentSelectColumns)}`
);
const sparksConfig = parseSparksConfig(
schemaData.sparks,
user,
streamLogger
);
await streamLogger.info(`sparksConfig: ${JSON.stringify(sparksConfig)}`);
const collectionType = schemaDocPath.includes("subTables")
? "subCollection"
: schemaDocPath.includes("groupSchema")
? "groupCollection"
: "collection";
let collectionId = "";
let functionName = "";
let triggerPath = "";
switch (collectionType) {
case "collection":
collectionId = schemaDocPath.split("/").pop() ?? "";
functionName = `"${collectionId}"`;
triggerPath = `"${collectionId}/{docId}"`;
break;
case "subCollection":
let pathParentIncrement = 0;
triggerPath =
'"' +
schemaDocPath
.replace("_FIRETABLE_/settings/schema/", "")
.replace(/subTables/g, function () {
pathParentIncrement++;
return `{parentDoc${pathParentIncrement}}`;
}) +
"/{docId}" +
'"';
functionName =
'"' +
schemaDocPath
.replace("_FIRETABLE_/settings/schema/", "")
.replace(/\/subTables\//g, "_") +
'"';
break;
case "groupCollection":
collectionId = schemaDocPath.split("/").pop() ?? "";
const triggerDepth = schemaData.triggerDepth
? schemaData.triggerDepth
: 1;
triggerPath = "";
for (let i = 1; i <= triggerDepth; i++) {
triggerPath = triggerPath + `{parentCol${i}}/{parentDoc${i}}/`;
}
triggerPath = '"' + triggerPath + collectionId + "/" + "{docId}" + '"';
functionName = `"CG_${collectionId}${
triggerDepth > 1 ? `_D${triggerDepth}` : ""
}"`;
break;
default:
break;
}
await streamLogger.info(
`collectionType: ${JSON.stringify(collectionType)}`
);
// generate field types from table meta data
const fieldTypes = JSON.stringify(
Object.keys(schemaData.columns).reduce((acc, cur) => {
const field = schemaData.columns[cur];
let fieldType = field.type;
if (fieldType === "DERIVATIVE") {
fieldType = field.config.renderFieldType;
}
return {
[cur]: fieldType,
...acc,
};
}, {})
);
await streamLogger.info(`fieldTypes: ${JSON.stringify(fieldTypes)}`);
const exports: any = {
fieldTypes,
triggerPath,
functionName: functionName.replace(/-/g, "_"),
derivativesConfig,
initializeConfig,
documentSelectConfig,
sparksConfig,
};
await streamLogger.info(`exports: ${JSON.stringify(exports)}`);
const fileData = Object.keys(exports).reduce((acc, currKey) => {
return `${acc}\nexport const ${currKey} = ${exports[currKey]}`;
}, ``);
await streamLogger.info(`fileData: ${JSON.stringify(fileData)}`);
const path = require("path");
fs.writeFileSync(
path.resolve(__dirname, "../functions/src/functionConfig.ts"),
beautify(fileData, { indent_size: 2 })
);
return true;
} catch (error) {
streamLogger.error(error.message);
return false;
}
};

View File

@@ -0,0 +1,83 @@
import * as child from "child_process";
import admin from "firebase-admin";
import { commandErrorHandler, logErrorToDB } from "../utils";
function execute(command: string, callback: any) {
console.log(command);
child.exec(command, function (error, stdout, stderr) {
console.log({ error, stdout, stderr });
callback(stdout);
});
}
export const asyncExecute = async (command: string, callback: any) =>
new Promise(async (resolve, reject) => {
child.exec(command, async function (error, stdout, stderr) {
console.log({ error, stdout, stderr });
await callback(error, stdout, stderr);
resolve(!error);
});
});
export const addPackages = async (
packages: { name: string; version?: string }[],
user: admin.auth.UserRecord,
streamLogger
) => {
const packagesString = packages.reduce((acc, currPackage) => {
return `${acc} ${currPackage.name}@${currPackage.version ?? "latest"}`;
}, "");
if (packagesString.trim().length !== 0) {
const success = await asyncExecute(
`cd build/functions;yarn add ${packagesString}`,
commandErrorHandler(
{
user,
description: "Error adding packages",
},
streamLogger
)
);
return success;
}
return true;
};
export const addSparkLib = async (
name: string,
user: admin.auth.UserRecord,
streamLogger
) => {
try {
const { dependencies } = require(`../sparksLib/${name}`);
const packages = Object.keys(dependencies).map((key) => ({
name: key,
version: dependencies[key],
}));
const success = await addPackages(packages, user, streamLogger);
if (!success) {
return false;
}
} catch (error) {
logErrorToDB(
{
user,
errorDescription: "Error parsing dependencies",
},
streamLogger
);
return false;
}
const success = await asyncExecute(
`cp build/sparksLib/${name}.ts build/functions/src/sparks/${name}.ts`,
commandErrorHandler(
{
user,
description: "Error copying sparksLib",
},
streamLogger
)
);
return success;
};

View File

@@ -14,5 +14,5 @@
},
"compileOnSave": true,
"include": ["src", "generateConfig.ts"],
"ignore": ["sparks"]
"ignore": ["sparks", "sparksLib"]
}

29
ft_build/deploy.sh Executable file
View File

@@ -0,0 +1,29 @@
#!/bin/bash
helpFunction()
{
echo "Usage: ./deploy.sh --project-id [YOUR GCLOUD PROJECT ID]"
exit 0
}
while test $# -gt 0; do
case "$1" in
--project-id)
shift
project_id=$1
shift
;;
*)
echo "$1 is not a recognized flag!"
return 1;
;;
esac
done
if [[ -z "$project_id" ]];
then
helpFunction
fi
gcloud config set project $project_id
gcloud builds submit --tag gcr.io/$project_id/ft-builder
gcloud run deploy ft-builder --image gcr.io/$project_id/ft-builder --platform managed --memory 4Gi --allow-unauthenticated --set-env-vars="_PROJECT_ID=$project_id" --region=australia-southeast1

View File

@@ -0,0 +1,10 @@
// Initialize Firebase Admin
import * as admin from "firebase-admin";
admin.initializeApp();
const db = admin.firestore();
const auth = admin.auth();
db.settings({ timestampsInSnapshots: true, ignoreUndefinedProperties: true });
export { db, admin, auth };

View File

@@ -1,5 +1,6 @@
{
"name": "functions",
"version": "0.0.1",
"scripts": {
"lint": "tslint --project tsconfig.json",
"build": "tsc",
@@ -20,11 +21,11 @@
},
"devDependencies": {
"@types/node": "^14.14.11",
"firebase-tools": "^9.2.2",
"husky": "^4.2.5",
"prettier": "^2.1.1",
"pretty-quick": "^3.0.0",
"ts-node": "^8.6.2",
"tsc": "^1.20150623.0",
"tslint": "^6.1.0",
"typescript": "^4.1.2"
},

View File

@@ -0,0 +1,63 @@
import * as functions from "firebase-functions";
import * as admin from "firebase-admin";
import { db, auth, storage } from "../firebaseConfig";
import utilFns from "../utils";
const derivative = (
functionConfig: {
fieldName: string;
listenerFields: string[];
evaluate: (props: {
row: any;
ref: FirebaseFirestore.DocumentReference<FirebaseFirestore.DocumentData>;
db: FirebaseFirestore.Firestore;
auth: admin.auth.Auth;
storage: admin.storage.Storage;
utilFns: any;
}) => any;
}[]
) => async (change: functions.Change<functions.firestore.DocumentSnapshot>) => {
try {
const row = change.after?.data();
const ref = change.after ? change.after.ref : change.before.ref;
const update = await functionConfig.reduce(
async (accUpdates: any, currDerivative) => {
const shouldEval = utilFns.hasChanged(change)([
...currDerivative.listenerFields,
"_ft_forcedUpdateAt",
]);
if (shouldEval) {
try {
const newValue = await currDerivative.evaluate({
row,
ref,
db,
auth,
storage,
utilFns,
});
if (
newValue !== undefined &&
newValue !== row[currDerivative.fieldName]
) {
return {
...(await accUpdates),
[currDerivative.fieldName]: newValue,
};
}
} catch (error) {
console.log(error);
}
}
return await accUpdates;
},
{}
);
return update;
} catch (error) {
console.log(`Derivatives Error`, error);
return {};
}
};
export default derivative;

View File

@@ -8,6 +8,12 @@ export const db = admin.firestore();
// Initialize Auth
export const auth = admin.auth();
const settings = { timestampsInSnapshots: true };
// Initialize Storage
export const storage = admin.storage();
const settings = {
timestampsInSnapshots: true,
ignoreUndefinedProperties: true,
};
db.settings(settings);
export const env = functions.config();

View File

@@ -8,6 +8,7 @@ import {
documentSelectConfig,
sparksConfig,
initializeConfig,
fieldTypes,
} from "./functionConfig";
import { getTriggerType, changedDocPath } from "./utils";
@@ -21,7 +22,7 @@ export const FT = {
let promises: Promise<any>[] = [];
const sparkPromises = sparksConfig
.filter((sparkConfig) => sparkConfig.triggers.includes(triggerType))
.map((sparkConfig) => spark(sparkConfig)(change, context));
.map((sparkConfig) => spark(sparkConfig, fieldTypes)(change, context));
console.log(
`#${
sparkPromises.length

View File

@@ -1,6 +1,6 @@
import * as functions from "firebase-functions";
import utilFns from "../utils";
import { db, auth } from "../firebaseConfig";
import { db, auth, storage } from "../firebaseConfig";
const initializedDoc = (
columns: { fieldName: string; type: string; value?: any; script?: any }[]
) => async (snapshot: functions.firestore.DocumentSnapshot) =>
@@ -21,6 +21,7 @@ const initializedDoc = (
ref: snapshot.ref,
db,
auth,
storage,
utilFns,
}),
};

View File

@@ -51,7 +51,7 @@ const updateLinks = (
);
return Promise.all([...addPromises, ...removePromises]);
} else {
return false
return false;
}
};
export default function propagate(
@@ -59,16 +59,16 @@ export default function propagate(
config: { fieldName: string; trackedFields: string[] }[],
triggerType: "delete" | "create" | "update"
) {
const promises = []
if (["delete","update"].includes(triggerType)){
const promises = [];
if (["delete", "update"].includes(triggerType)) {
const propagateChangesPromise = propagateChangesOnTrigger(
change,
triggerType
);
promises.push(propagateChangesPromise)
};
if(config.length > 0){
promises.push(propagateChangesPromise);
}
if (config.length > 0) {
if (triggerType === "delete") {
config.forEach((c) =>
promises.push(removeRefsOnTargetDelete(change.before.ref, c.fieldName))

View File

@@ -1,8 +1,8 @@
import * as functions from "firebase-functions";
import { hasRequiredFields, getTriggerType } from "../utils";
import { db, auth } from "../firebaseConfig";
import utilFns, { hasRequiredFields, getTriggerType } from "../utils";
import { db, auth, storage } from "../firebaseConfig";
const spark = (sparkConfig) => async (
const spark = (sparkConfig, fieldTypes) => async (
change: functions.Change<functions.firestore.DocumentSnapshot>,
context: functions.EventContext
) => {
@@ -12,6 +12,7 @@ const spark = (sparkConfig) => async (
const triggerType = getTriggerType(change);
try {
const {
label,
type,
triggers,
shouldRun,
@@ -26,6 +27,9 @@ const spark = (sparkConfig) => async (
change,
triggerType,
sparkConfig,
utilFns,
fieldTypes,
storage,
};
if (!triggers.includes(triggerType)) return false; //check if trigger type is included in the spark
if (
@@ -43,7 +47,7 @@ const spark = (sparkConfig) => async (
: shouldRun)
: false; //
console.log("type is ", type, "dontRun value is", dontRun);
console.log(label, "type is ", type, "dontRun value is", dontRun);
if (dontRun) return false;
const sparkData = await Object.keys(sparkBody).reduce(
@@ -61,6 +65,10 @@ const spark = (sparkConfig) => async (
await sparkFn(sparkData, sparkContext);
return true;
} catch (err) {
const { label, type } = sparkConfig;
console.log(
`error in ${label} spark of type ${type}, on ${context.eventType} in Doc ${context.resource.name}`
);
console.error(err);
return Promise.reject(err);
}

View File

@@ -57,7 +57,16 @@ export const rowReducer = (fieldsToSync, row) =>
else return acc;
}, {});
const hasChanged =(change:functions.Change<functions.firestore.DocumentSnapshot>)=> (trackedFields:string[]) =>{
const before = change.before?.data();
const after = change.after?.data();
if (!before && after)return true;
else if (before && !after)return false;
else return trackedFields.some(trackedField =>JSON.stringify(before[trackedField]) !== JSON.stringify(after[trackedField]))
}
export default {
hasChanged,
getSecret,
hasRequiredFields,
generateId,

140
ft_build/index.ts Normal file
View File

@@ -0,0 +1,140 @@
const express = require("express");
const bodyParser = require("body-parser");
const cors = require("cors");
import { asyncExecute } from "./compiler/terminal";
import { createStreamLogger } from "./utils";
import generateConfig from "./compiler";
import { auth } from "./firebaseConfig";
import meta from "./package.json";
import { commandErrorHandler, logErrorToDB } from "./utils";
import firebase from "firebase-admin";
const app = express();
const jsonParser = bodyParser.json();
app.use(cors());
app.get("/", async (req: any, res: any) => {
res.send(`Firetable cloud function builder version ${meta.version}`);
});
app.post("/", jsonParser, async (req: any, res: any) => {
let user: firebase.auth.UserRecord;
const userToken = req?.body?.token;
if (!userToken) {
console.log("missing auth token");
res.send({
success: false,
reason: "missing auth token",
});
return;
}
try {
const decodedToken = await auth.verifyIdToken(userToken);
const uid = decodedToken.uid;
user = await auth.getUser(uid);
const roles = user?.customClaims?.roles;
if (!roles || !Array.isArray(roles) || !roles?.includes("ADMIN")) {
await logErrorToDB({
errorDescription: `user is not admin`,
user,
});
res.send({
success: false,
reason: `user is not admin`,
});
return;
}
console.log("successfully authenticated");
} catch (error) {
await logErrorToDB({
errorDescription: `error verifying auth token: ${error}`,
user,
});
res.send({
success: false,
reason: `error verifying auth token: ${error}`,
});
return;
}
const configPath = req?.body?.configPath;
console.log("configPath:", configPath);
if (!configPath) {
await logErrorToDB({
errorDescription: `Invalid configPath (${configPath})`,
user,
});
res.send({
success: false,
reason: "invalid configPath",
});
}
const streamLogger = await createStreamLogger(configPath);
await streamLogger.info("streamLogger created");
const success = await generateConfig(configPath, user, streamLogger);
if (!success) {
await streamLogger.error("generateConfig failed to complete");
await streamLogger.fail();
res.send({
success: false,
reason: `generateConfig failed to complete`,
});
return;
}
await streamLogger.info("generateConfig success");
let hasEnvError = false;
if (!process.env._PROJECT_ID) {
await logErrorToDB(
{
errorDescription: `Invalid env: _PROJECT_ID (${process.env._PROJECT_ID})`,
user,
},
streamLogger
);
hasEnvError = true;
}
if (hasEnvError) {
await streamLogger.error("Invalid env:_PROJECT_ID");
await streamLogger.fail();
res.send({
success: false,
reason: "Invalid env:_PROJECT_ID",
});
return;
}
await asyncExecute(
`cd build/functions; \
yarn install`,
commandErrorHandler({ user }, streamLogger)
);
await asyncExecute(
`cd build/functions; \
yarn deployFT \
--project ${process.env._PROJECT_ID} \
--only functions`,
commandErrorHandler({ user }, streamLogger)
);
await streamLogger.end();
res.send({
success: true,
});
});
const port = process.env.PORT || 8080;
app.listen(port, () => {
console.log(
`Firetable cloud function builder ${meta.version}: listening on port ${port}`
);
});

34
ft_build/package.json Normal file
View File

@@ -0,0 +1,34 @@
{
"name": "ft-functions-builder",
"description": "Manages the build and deployment of Firetable cloud functions",
"version": "1.0.0",
"private": true,
"main": "index.js",
"scripts": {
"start": "yarn build && node build",
"build": "rm -rf build && tsc --project ./ && cp -r functions build && cp -r sparksLib build",
"deploy": "./deploy.sh"
},
"engines": {
"node": "14"
},
"dependencies": {
"body-parser": "^1.19.0",
"cors": "^2.8.5",
"express": "^4.17.1",
"firebase-admin": "^9.2.0",
"firebase-functions": "^3.11.0"
},
"devDependencies": {
"@types/express": "^4.17.11",
"@types/node": "^14.14.33",
"firebase-tools": "^8.7.0",
"husky": "^4.2.5",
"js-beautify": "^1.13.0",
"prettier": "^2.1.1",
"pretty-quick": "^3.0.0",
"ts-node": "^9.1.1",
"tslint": "^6.1.0",
"typescript": "^4.2.3"
}
}

View File

@@ -0,0 +1,422 @@
export const dependencies = {
"@google-cloud/bigquery": "^5.5.0",
};
const get = (obj, path, defaultValue = undefined) => {
const travel = (regexp) =>
String.prototype.split
.call(path, regexp)
.filter(Boolean)
.reduce(
(res, key) => (res !== null && res !== undefined ? res[key] : res),
obj
);
const result = travel(/[,[\]]+?/) || travel(/[,[\].]+?/);
return result === undefined || result === obj ? defaultValue : result;
};
const filterSnapshot = (
field: { docPath: string; snapshot: any },
preservedKeys: string[]
) => {
return {
docPath: field.docPath,
...preservedKeys.reduce((acc: any, currentKey: string) => {
const value = get(field.snapshot, currentKey);
if (value) {
return { ...acc, snapshot: { [currentKey]: value, ...acc.snapshot } };
} else return acc;
}, {}),
};
};
// returns object of fieldsToSync
const rowReducer = (fieldsToSync, row) =>
fieldsToSync.reduce(
(
acc: any,
curr: string | { fieldName: string; snapshotFields: string[] }
) => {
if (typeof curr === "string") {
if (row[curr] && typeof row[curr].toDate === "function") {
return {
...acc,
[curr]: row[curr].toDate().getTime() / 1000,
};
} else if (row[curr] !== undefined || row[curr] !== null) {
return { ...acc, [curr]: row[curr] };
} else {
return acc;
}
} else {
if (row[curr.fieldName] && curr.snapshotFields) {
return {
...acc,
[curr.fieldName]: row[curr.fieldName].map((snapshot) =>
filterSnapshot(snapshot, curr.snapshotFields)
),
};
} else {
return acc;
}
}
},
{}
);
const significantDifference = (fieldsToSync, change) => {
const beforeData = change.before.data();
const afterData = change.after.data();
return fieldsToSync.reduce((acc, field) => {
const key = typeof field === "string" ? field : field.fieldName;
if (JSON.stringify(beforeData[key]) !== JSON.stringify(afterData[key]))
return true;
else return acc;
}, false);
};
const transformToSQLData = (value: any, ftType: string) => {
if (value === null || value === undefined) {
return {
value: `null`,
type: "STRING",
};
}
const sanitise = (x: string) =>
x?.replace?.(/\"/g, '\\"')?.replace?.(/\n/g, "\\n") ?? "";
switch (ftType) {
case "SIMPLE_TEXT":
case "LONG_TEXT":
case "EMAIL":
case "PHONE_NUMBER":
case "CODE":
case "RICH_TEXT":
case "ID":
case "SINGLE_SELECT":
case "URL":
return {
value: `"${sanitise(value)}"`,
type: "STRING",
};
case "JSON": // JSON
case "FILE": // JSON
case "IMAGE": // JSON
case "USER": // JSON
case "COLOR": // JSON
case "DOCUMENT_SELECT":
case "SERVICE_SELECT":
case "ACTION":
case "AGGREGATE":
case "MULTI_SELECT": // array
return {
value: `"${sanitise(JSON.stringify(value))}"`,
type: "STRING",
};
case "CHECK_BOX":
return {
value: value ? `true` : `false`,
type: "BOOLEAN",
};
case "NUMBER":
case "PERCENTAGE":
case "RATING":
case "SLIDER":
return {
value: Number(value),
type: "NUMERIC",
};
case "DATE":
case "DATE_TIME":
case "DURATION":
if (!value?.toDate) {
return {
value: `null`,
type: "TIMESTAMP",
};
}
return {
value: `timestamp("${value?.toDate?.()}")`,
type: "TIMESTAMP",
};
case "LAST":
case "STATUS":
case "SUB_TABLE":
default:
// unknown or meaningless to sync
return {
value: `null`,
type: "STRING",
};
}
};
const transformToSQLValue = (ftValue: any, ftType: string) => {
const { value } = transformToSQLData(ftValue, ftType);
return value;
};
const transformToSQLType = (ftType: string) => {
const { type } = transformToSQLData("", ftType);
return type;
};
const bigqueryIndex = async (payload, sparkContext) => {
const { objectID, index, fieldsToSync, projectID } = payload;
const { triggerType, change, fieldTypes } = sparkContext;
const record = rowReducer(fieldsToSync, sparkContext.row);
const { BigQuery } = require("@google-cloud/bigquery");
const bigquery = new BigQuery();
const _projectID = projectID ?? process.env.GCLOUD_PROJECT;
const tableFullName = `${_projectID}.firetable.${index}`;
console.log(
`projectID: ${_projectID}, index: ${index}, tableFullName: ${tableFullName}`
);
// create dataset with exact name "firetable" if not exists
async function preprocessDataset() {
const dataset = bigquery.dataset("firetable");
const res = await dataset.exists();
const exists = res[0];
if (!exists) {
console.log("Dataset 'firetable' does not exist, creating dataset...");
await dataset.create();
console.log("Dataset 'firetable' created.");
} else {
console.log("Dataset 'firetable' exists.");
}
}
async function preprocessTable() {
const dataset = bigquery.dataset("firetable");
const table = dataset.table(index);
const res = await table.exists();
const exists = res[0];
if (!exists) {
console.log(
`Table '${index}' does not exist in dataset 'firetable', creating dataset...`
);
await table.create();
console.log(`Table '${index}' created in dataset 'firetable'.`);
} else {
console.log(`Table ${index} exists in 'firetable'.`);
}
}
async function preprocessSchema() {
const dataset = bigquery.dataset("firetable");
const table = dataset.table(index);
const generatedTypes = Object.keys(fieldTypes)
.filter((field) => fieldsToSync.includes(field))
.reduce((acc, cur) => {
return {
[cur]: transformToSQLType(fieldTypes[cur]),
...acc,
};
}, {});
const generatedSchema = [
{ name: "objectID", type: "STRING", mode: "REQUIRED" },
...Object.keys(generatedTypes).map((key) => {
return {
name: key,
type: generatedTypes[key],
mode: "NULLABLE",
};
}),
];
const pushSchema = async () => {
console.log("pushing schema:", generatedSchema);
const metadata = {
schema: generatedSchema,
};
await table.setMetadata(metadata);
console.log("schema pushed.");
};
const existingRes = await table.getMetadata();
const existingSchema = existingRes[0].schema?.fields;
if (!existingSchema) {
console.log("Existing schema does not exist, pushing schema...");
await pushSchema();
return;
}
// check if schema update is needed
const objectIDFilter = (field) => field.name !== "objectID";
const schemaIdentical =
Object.keys(generatedTypes).length ===
existingSchema.filter(objectIDFilter).length &&
existingSchema
.filter(objectIDFilter)
.every((field) => generatedTypes[field.name] === field.type);
if (schemaIdentical) {
// no change to schema
console.log("Existing schema detected, no update needeed.");
return;
}
// check schema compatibility (only new field is accpted)
const compatible =
Object.keys(generatedTypes).length >
existingSchema.filter(objectIDFilter).length &&
existingSchema
.filter(objectIDFilter)
.filter((field) => Object.keys(generatedTypes).includes(field.name))
.every((field) => generatedTypes[field.name] === field.type);
if (!compatible) {
const errorMessage =
"New update to field types is not compatible with existing schema. Please manually remove the current bigquery table or update spark index";
console.log(errorMessage);
throw errorMessage;
} else {
console.log(
"New field types detected and it is compatible with current schema."
);
}
// push schema
await pushSchema();
}
// return if the objectID exists in bool
async function exist() {
const query = `SELECT objectID FROM ${tableFullName}
WHERE objectID="${objectID}"
;`;
console.log(query);
const res = await bigquery.query(query);
const rows = res?.[0];
return !!rows?.length;
}
function getTypeKnownRecord(data) {
const knownTypes = Object.keys(fieldTypes);
const givenKeys = Object.keys(data);
const knownKeys = givenKeys.filter((key) => knownTypes.includes(key));
const unknownKeys = givenKeys.filter((key) => !knownTypes.includes(key));
const knownRecord = Object.keys(data)
.filter((key) => knownKeys.includes(key))
.reduce((obj, key) => {
return {
...obj,
[key]: data[key],
};
}, {});
if (unknownKeys?.length > 0) {
console.log(
"The following fields do not exist in Firetable and are ignored.",
unknownKeys
);
}
return knownRecord;
}
async function insert(data) {
const keys = Object.keys(data).join(",");
const values = Object.keys(data)
.map((key) => transformToSQLValue(data[key], fieldTypes[key]))
.join(",");
const query = `INSERT INTO ${tableFullName}
(objectID, ${keys})
VALUES ("${objectID}", ${values})
;`;
console.log(query);
await executeQuery(query);
}
// execute a query, if rate limited, sleep and try again until success
// ATTENTION: cloud function might timeout the function execution time at 60,000ms
const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
async function executeQuery(query, delayDepth = 1) {
try {
const res = await bigquery.query(query);
console.log(res);
} catch (error) {
if (
error?.errors?.length === 1 &&
(error?.errors?.[0]?.reason === "rateLimitExceeded" ||
error?.errors?.[0]?.reason === "quotaExceeded")
) {
const delay = Math.round(
Math.floor(Math.random() * 3_000 * (delayDepth % 20) + 1000)
);
console.log(`API rate limited, try again in ${delay}ms`);
await sleep(delay);
await executeQuery(query, delayDepth + 1);
} else {
console.log(error?.errors ?? error);
}
}
if (delayDepth === 1) {
console.log("Query finished.");
}
}
async function update(data) {
const values = Object.keys(data)
.map((key) => `${key}=${transformToSQLValue(data[key], fieldTypes[key])}`)
.join(",");
const query = `UPDATE ${tableFullName}
SET ${values}
WHERE objectID="${objectID}"
;`;
console.log(query);
await executeQuery(query);
}
async function insertOrUpdate(data) {
const objectExists = await exist();
if (objectExists) {
await update(data);
} else {
await insert(data);
}
}
async function remove() {
const query = `DELETE FROM ${tableFullName}
WHERE objectID="${objectID}"
;`;
console.log(query);
await executeQuery(query);
}
// preprocess before starting index logic
await preprocessDataset();
await preprocessTable();
await preprocessSchema();
// only proceed with fields that have known types
const typeKnownRecord = getTypeKnownRecord(record);
switch (triggerType) {
case "delete":
await remove();
break;
case "update":
if (
significantDifference([...fieldsToSync, "_ft_forcedUpdateAt"], change)
) {
await insertOrUpdate(typeKnownRecord);
} else {
console.log("significantDifference is false, no update needed.");
}
break;
case "create":
await insertOrUpdate(typeKnownRecord);
break;
default:
break;
}
return true;
};
export default bigqueryIndex;

View File

@@ -26,7 +26,12 @@ const docSync = async (data, sparkContext) => {
switch (triggerType) {
case "delete":
try {
await db.doc(targetPath).delete();
}
catch (error) {
console.log(error);
}
break;
case "update":
if (

View File

@@ -0,0 +1,25 @@
export const dependencies = {
"mailchimp-api-v3": "1.15.0",
};
// method : 'get|post|put|patch|delete'
// path :`/lists/${listId}/members`
const mailchimp = async (data) => {
const { path, method, path_params, body, query } = data;
const mailchimpLib = require("mailchimp-api-v3");
const utilFns = require("../utils");
const mailchimpKey = await utilFns.getSecret("mailchimp");
const _mailchimp = new mailchimpLib(mailchimpKey);
return new Promise((resolve, reject) => {
_mailchimp.request(
{
method,
path,
path_params,
body,
query,
},
resolve
);
});
};
export default mailchimp;

View File

@@ -0,0 +1,131 @@
export const dependencies = {
meilisearch: "^0.18.1",
};
const get = (obj, path, defaultValue = undefined) => {
const travel = (regexp) =>
String.prototype.split
.call(path, regexp)
.filter(Boolean)
.reduce(
(res, key) => (res !== null && res !== undefined ? res[key] : res),
obj
);
const result = travel(/[,[\]]+?/) || travel(/[,[\].]+?/);
return result === undefined || result === obj ? defaultValue : result;
};
const filterSnapshot = (
field: { docPath: string; snapshot: any },
preservedKeys: string[]
) => {
return {
docPath: field.docPath,
...preservedKeys.reduce((acc: any, currentKey: string) => {
const value = get(field.snapshot, currentKey);
if (value) {
return { ...acc, snapshot: { [currentKey]: value, ...acc.snapshot } };
} else return acc;
}, {}),
};
};
// returns object of fieldsToSync
const rowReducer = (fieldsToSync, row) =>
fieldsToSync.reduce(
(
acc: any,
curr: string | { fieldName: string; snapshotFields: string[] }
) => {
if (typeof curr === "string") {
if (row[curr] && typeof row[curr].toDate === "function") {
return {
...acc,
[curr]: row[curr].toDate().getTime() / 1000,
};
} else if (row[curr] !== undefined || row[curr] !== null) {
return { ...acc, [curr]: row[curr] };
} else {
return acc;
}
} else {
if (row[curr.fieldName] && curr.snapshotFields) {
return {
...acc,
[curr.fieldName]: row[curr.fieldName].map((snapshot) =>
filterSnapshot(snapshot, curr.snapshotFields)
),
};
} else {
return acc;
}
}
},
{}
);
const significantDifference = (fieldsToSync, change) => {
const beforeData = change.before.data();
const afterData = change.after.data();
return fieldsToSync.reduce((acc, field) => {
const key = typeof field === "string" ? field : field.fieldName;
if (JSON.stringify(beforeData[key]) !== JSON.stringify(afterData[key]))
return true;
else return acc;
}, false);
};
const meiliIndex = async (data, sparkContext) => {
const { row, objectID, index, fieldsToSync } = data;
const { triggerType, change } = sparkContext;
const record = rowReducer(fieldsToSync, row);
const { MeiliSearch } = require("meilisearch");
const { getSecret } = require("../utils");
const meiliConfig = await getSecret("meilisearch");
console.log(`meilisearch host : ${meiliConfig.host}, index: ${index}`);
const client = new MeiliSearch(meiliConfig);
const _index = client.index(index);
let res;
switch (triggerType) {
case "delete":
console.log("Deleting...");
res = await _index.deleteDocument(objectID);
break;
case "update":
if (
significantDifference([...fieldsToSync, "_ft_forcedUpdateAt"], change)
) {
console.log("Updating...");
res = await _index.updateDocuments([
{
id: objectID,
...record,
},
]);
}
break;
case "create":
console.log("Creating...");
res = await _index.addDocuments([
{
id: objectID,
...record,
},
]);
break;
default:
console.log("No match.");
break;
}
console.log("Checking status...");
if (res?.updateId) {
console.log("Querying status...");
const status = await client.index(index).getUpdateStatus(res.updateId);
console.log("Status:", status);
}
return true;
};
export default meiliIndex;

View File

@@ -0,0 +1,7 @@
export const dependencies = {};
const task = async (args) => {
const { promises } = args;
const result = await Promise.allSettled(Array.isArray(promises)?promises:[promises])
return result
};
export default task;

20
ft_build/tsconfig.json Normal file
View File

@@ -0,0 +1,20 @@
{
"compilerOptions": {
"target": "es6",
"module": "commonjs",
"rootDir": "./",
"outDir": "./build",
"esModuleInterop": true,
"strict": true,
"noImplicitReturns": true,
"noUnusedLocals": false,
"sourceMap": true,
"noImplicitAny": false,
"resolveJsonModule": true,
"lib": ["ESNext"],
"strictNullChecks": false
},
"compileOnSave": true,
"exclude": ["functions", "build"],
"include": ["*.ts", "firebase.json", "sparksLib"]
}

198
ft_build/utils.ts Normal file
View File

@@ -0,0 +1,198 @@
import { db } from "./firebaseConfig";
import admin from "firebase-admin";
function firetableUser(user: admin.auth.UserRecord) {
return {
displayName: user?.displayName,
email: user?.email,
uid: user?.uid,
emailVerified: user?.emailVerified,
photoURL: user?.photoURL,
timestamp: new Date(),
};
}
async function insertErrorRecordToDB(errorRecord: object) {
await db.collection("_FT_ERRORS").add(errorRecord);
}
async function insertErrorToStreamer(errorRecord: object, streamLogger) {
let errorString = "";
for (const key of [
"command",
"description",
"functionConfigTs",
"sparksConfig",
"stderr",
"errorStackTrace",
]) {
const value = errorRecord[key];
if (value) {
errorString += `\n\n${key}: ${value}`;
}
}
await streamLogger.error(errorString);
}
function commandErrorHandler(
meta: {
user: admin.auth.UserRecord;
description?: string;
functionConfigTs?: string;
sparksConfig?: string;
},
streamLogger
) {
return async function (error, stdout, stderr) {
await streamLogger.info(stdout);
if (!error) {
return;
}
const errorRecord = {
errorType: "commandError",
ranBy: firetableUser(meta.user),
createdAt: admin.firestore.FieldValue.serverTimestamp(),
stdout: stdout ?? "",
stderr: stderr ?? "",
errorStackTrace: error?.stack ?? "",
command: error?.cmd ?? "",
description: meta?.description ?? "",
functionConfigTs: meta?.functionConfigTs ?? "",
sparksConfig: meta?.sparksConfig ?? "",
};
await insertErrorToStreamer(errorRecord, streamLogger);
insertErrorRecordToDB(errorRecord);
};
}
async function logErrorToDB(
data: {
errorDescription: string;
errorExtraInfo?: string;
errorTraceStack?: string;
user: admin.auth.UserRecord;
sparksConfig?: string;
},
streamLogger?
) {
console.error(data.errorDescription);
const errorRecord = {
errorType: "codeError",
ranBy: firetableUser(data.user),
description: data.errorDescription,
createdAt: admin.firestore.FieldValue.serverTimestamp(),
sparksConfig: data?.sparksConfig ?? "",
errorExtraInfo: data?.errorExtraInfo ?? "",
errorStackTrace: data?.errorTraceStack ?? "",
};
if (streamLogger) {
await insertErrorToStreamer(errorRecord, streamLogger);
}
insertErrorRecordToDB(errorRecord);
}
function parseSparksConfig(
sparks: string | undefined,
user: admin.auth.UserRecord,
streamLogger
) {
if (sparks) {
try {
// remove leading "sparks.config(" and trailing ")"
return sparks
.replace(/^(\s*)sparks.config\(/, "")
.replace(/\);?\s*$/, "");
} catch (error) {
logErrorToDB(
{
errorDescription: "Sparks is not wrapped with sparks.config",
errorTraceStack: error.stack,
user,
sparksConfig: sparks,
},
streamLogger
);
}
}
return "[]";
}
async function createStreamLogger(tableConfigPath: string) {
const startTimeStamp = Date.now();
const fullLog: {
log: string;
level: "info" | "error";
timestamp: number;
}[] = [];
const logRef = db
.doc(tableConfigPath)
.collection("ftBuildLogs")
.doc(startTimeStamp.toString());
await logRef.set({ startTimeStamp, status: "BUILDING" });
console.log(
`streamLogger created. tableConfigPath: ${tableConfigPath}, startTimeStamp: ${startTimeStamp}`
);
return {
info: async (log: string) => {
console.log(log);
fullLog.push({
log,
level: "info",
timestamp: Date.now(),
});
await logRef.update({
fullLog,
});
},
error: async (log: string) => {
console.error(log);
fullLog.push({
log,
level: "error",
timestamp: Date.now(),
});
await logRef.update({
fullLog,
});
},
end: async () => {
const logsDoc = await logRef.get();
const errorLog = logsDoc
.get("fullLog")
.filter((log) => log.level === "error");
if (errorLog.length !== 0) {
console.log("streamLogger marked as FAIL");
await logRef.update({
status: "FAIL",
failTimeStamp: Date.now(),
});
} else {
console.log("streamLogger marked as SUCCESS");
await logRef.update({
status: "SUCCESS",
successTimeStamp: Date.now(),
});
}
},
fail: async () => {
console.log("streamLogger marked as FAIL");
await logRef.update({
status: "FAIL",
failTimeStamp: Date.now(),
});
},
};
}
export {
commandErrorHandler,
logErrorToDB,
parseSparksConfig,
createStreamLogger,
};

File diff suppressed because it is too large Load Diff

View File

@@ -16,43 +16,46 @@
"@material-ui/lab": "^4.0.0-alpha.56",
"@material-ui/pickers": "^3.2.10",
"@mdi/js": "^5.8.55",
"@monaco-editor/react": "^3.5.5",
"@monaco-editor/react": "^4.1.0",
"@tinymce/tinymce-react": "^3.4.0",
"algoliasearch": "^4.8.6",
"ansi-to-react": "^6.1.5",
"chroma-js": "^2.1.0",
"csv-parse": "^4.15.3",
"date-fns": "^2.19.0",
"dompurify": "^2.2.6",
"file-saver": "^2.0.5",
"firebase": "^7.23.0",
"firebase": "8.6.8",
"hotkeys-js": "^3.7.2",
"json-format": "^1.0.1",
"json2csv": "^5.0.1",
"json2csv": "^5.0.6",
"jszip": "^3.6.0",
"lodash": "^4.17.20",
"lodash": "^4.17.21",
"moment": "^2.29.1",
"query-string": "^6.8.3",
"react": "^16.9.0",
"react-beautiful-dnd": "^13.0.0",
"react-color": "^2.17.3",
"react-data-grid": "^7.0.0-canary.27",
"react-data-grid": "7.0.0-canary.30",
"react-div-100vh": "^0.3.8",
"react-dnd": "^11.1.3",
"react-dnd-html5-backend": "^11.1.3",
"react-dom": "^16.9.0",
"react-dropzone": "^10.1.8",
"react-hook-form": "^6.15.4",
"react-hook-form": "^6.15.5",
"react-image": "^4.0.3",
"react-joyride": "^2.3.0",
"react-json-view": "^1.19.1",
"react-router-dom": "^5.0.1",
"react-scripts": "^3.4.3",
"react-scroll-sync": "^0.8.0",
"react-usestateref": "^1.0.5",
"serve": "^11.3.2",
"tinymce": "^5.2.0",
"typescript": "^3.7.2",
"use-algolia": "^1.4.1",
"use-debounce": "^3.3.0",
"use-persisted-state": "^0.3.0",
"use-persisted-state": "^0.3.3",
"yarn": "^1.22.10",
"yup": "^0.32.9"
},
@@ -67,7 +70,7 @@
"deploy": "firebase deploy"
},
"engines": {
"node": ">=10 <13"
"node": ">=10"
},
"eslintConfig": {
"extends": "react-app"
@@ -86,9 +89,9 @@
},
"devDependencies": {
"@types/chroma-js": "^2.1.3",
"@types/dompurify": "^2.0.1",
"@types/dompurify": "^2.2.1",
"@types/file-saver": "^2.0.1",
"@types/lodash": "^4.14.138",
"@types/lodash": "^4.14.168",
"@types/node": "^14.14.6",
"@types/react": "^16.9.2",
"@types/react-beautiful-dnd": "^13.0.0",

View File

@@ -65,10 +65,15 @@
Learn how to configure a non-root public URL by running `npm run build`.
-->
<link rel="stylesheet" href="https://use.typekit.net/ngg8buf.css" />
<link rel="preconnect" href="https://fonts.gstatic.com" />
<link
href="https://fonts.googleapis.com/css?family=Open+Sans:400,400i,700,700i&display=swap"
rel="stylesheet"
/>
<link
href="https://fonts.googleapis.com/css2?family=IBM+Plex+Mono&display=swap"
rel="stylesheet"
/>
<title>Firetable</title>
</head>

446
www/public/storage.d.ts vendored Normal file
View File

@@ -0,0 +1,446 @@
/* eslint-disable @typescript-eslint/ban-types */
// firetable/ft_build/functions/node_modules/@google-cloud/storage/build/src/bucket.d.ts
declare class Bucket {
/**
* The bucket's name.
* @name Bucket#name
* @type {string}
*/
name: string;
/**
* A reference to the {@link Storage} associated with this {@link Bucket}
* instance.
* @name Bucket#storage
* @type {Storage}
*/
storage: Storage;
/**
* A user project to apply to each request from this bucket.
* @name Bucket#userProject
* @type {string}
*/
userProject?: string;
/**
* Cloud Storage uses access control lists (ACLs) to manage object and
* bucket access. ACLs are the mechanism you use to share objects with other
* users and allow other users to access your buckets and objects.
*
* An ACL consists of one or more entries, where each entry grants permissions
* to an entity. Permissions define the actions that can be performed against
* an object or bucket (for example, `READ` or `WRITE`); the entity defines
* who the permission applies to (for example, a specific user or group of
* users).
*
* The `acl` object on a Bucket instance provides methods to get you a list of
* the ACLs defined on your bucket, as well as set, update, and delete them.
*
* Buckets also have
* [default
* ACLs](https://cloud.google.com/storage/docs/access-control/lists#default)
* for all created files. Default ACLs specify permissions that all new
* objects added to the bucket will inherit by default. You can add, delete,
* get, and update entities and permissions for these as well with
* {@link Bucket#acl.default}.
*
* @see [About Access Control Lists]{@link http://goo.gl/6qBBPO}
* @see [Default ACLs]{@link https://cloud.google.com/storage/docs/access-control/lists#default}
*
* @name Bucket#acl
* @mixes Acl
* @property {Acl} default Cloud Storage Buckets have
* [default
* ACLs](https://cloud.google.com/storage/docs/access-control/lists#default)
* for all created files. You can add, delete, get, and update entities and
* permissions for these as well. The method signatures and examples are all
* the same, after only prefixing the method call with `default`.
*
* @example
* const {Storage} = require('@google-cloud/storage');
* const storage = new Storage();
*
* //-
* // Make a bucket's contents publicly readable.
* //-
* const myBucket = storage.bucket('my-bucket');
*
* const options = {
* entity: 'allUsers',
* role: storage.acl.READER_ROLE
* };
*
* myBucket.acl.add(options, function(err, aclObject) {});
*
* //-
* // If the callback is omitted, we'll return a Promise.
* //-
* myBucket.acl.add(options).then(function(data) {
* const aclObject = data[0];
* const apiResponse = data[1];
* });
*
* @example <caption>include:samples/acl.js</caption>
* region_tag:storage_print_bucket_acl
* Example of printing a bucket's ACL:
*
* @example <caption>include:samples/acl.js</caption>
* region_tag:storage_print_bucket_acl_for_user
* Example of printing a bucket's ACL for a specific user:
*
* @example <caption>include:samples/acl.js</caption>
* region_tag:storage_add_bucket_owner
* Example of adding an owner to a bucket:
*
* @example <caption>include:samples/acl.js</caption>
* region_tag:storage_remove_bucket_owner
* Example of removing an owner from a bucket:
*
* @example <caption>include:samples/acl.js</caption>
* region_tag:storage_add_bucket_default_owner
* Example of adding a default owner to a bucket:
*
* @example <caption>include:samples/acl.js</caption>
* region_tag:storage_remove_bucket_default_owner
* Example of removing a default owner from a bucket:
*/
acl: Acl;
/**
* Get and set IAM policies for your bucket.
*
* @name Bucket#iam
* @mixes Iam
*
* @see [Cloud Storage IAM Management](https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management)
* @see [Granting, Changing, and Revoking Access](https://cloud.google.com/iam/docs/granting-changing-revoking-access)
* @see [IAM Roles](https://cloud.google.com/iam/docs/understanding-roles)
*
* @example
* const {Storage} = require('@google-cloud/storage');
* const storage = new Storage();
* const bucket = storage.bucket('albums');
*
* //-
* // Get the IAM policy for your bucket.
* //-
* bucket.iam.getPolicy(function(err, policy) {
* console.log(policy);
* });
*
* //-
* // If the callback is omitted, we'll return a Promise.
* //-
* bucket.iam.getPolicy().then(function(data) {
* const policy = data[0];
* const apiResponse = data[1];
* });
*
* @example <caption>include:samples/iam.js</caption>
* region_tag:storage_view_bucket_iam_members
* Example of retrieving a bucket's IAM policy:
*
* @example <caption>include:samples/iam.js</caption>
* region_tag:storage_add_bucket_iam_member
* Example of adding to a bucket's IAM policy:
*
* @example <caption>include:samples/iam.js</caption>
* region_tag:storage_remove_bucket_iam_member
* Example of removing from a bucket's IAM policy:
*/
iam: Iam;
/**
* Get {@link File} objects for the files currently in the bucket as a
* readable object stream.
*
* @method Bucket#getFilesStream
* @param {GetFilesOptions} [query] Query object for listing files.
* @returns {ReadableStream} A readable stream that emits {@link File} instances.
*
* @example
* const {Storage} = require('@google-cloud/storage');
* const storage = new Storage();
* const bucket = storage.bucket('albums');
*
* bucket.getFilesStream()
* .on('error', console.error)
* .on('data', function(file) {
* // file is a File object.
* })
* .on('end', function() {
* // All files retrieved.
* });
*
* //-
* // If you anticipate many results, you can end a stream early to prevent
* // unnecessary processing and API requests.
* //-
* bucket.getFilesStream()
* .on('data', function(file) {
* this.end();
* });
*
* //-
* // If you're filtering files with a delimiter, you should use
* // {@link Bucket#getFiles} and set `autoPaginate: false` in order to
* // preserve the `apiResponse` argument.
* //-
* const prefixes = [];
*
* function callback(err, files, nextQuery, apiResponse) {
* prefixes = prefixes.concat(apiResponse.prefixes);
*
* if (nextQuery) {
* bucket.getFiles(nextQuery, callback);
* } else {
* // prefixes = The finished array of prefixes.
* }
* }
*
* bucket.getFiles({
* autoPaginate: false,
* delimiter: '/'
* }, callback);
*/
getFilesStream: Function;
signer?: URLSigner;
constructor(storage: Storage, name: string, options?: BucketOptions);
addLifecycleRule(
rule: LifecycleRule,
options?: AddLifecycleRuleOptions
): Promise<SetBucketMetadataResponse>;
addLifecycleRule(
rule: LifecycleRule,
options: AddLifecycleRuleOptions,
callback: SetBucketMetadataCallback
): void;
addLifecycleRule(
rule: LifecycleRule,
callback: SetBucketMetadataCallback
): void;
combine(
sources: string[] | File[],
destination: string | File,
options?: CombineOptions
): Promise<CombineResponse>;
combine(
sources: string[] | File[],
destination: string | File,
options: CombineOptions,
callback: CombineCallback
): void;
combine(
sources: string[] | File[],
destination: string | File,
callback: CombineCallback
): void;
createChannel(
id: string,
config: CreateChannelConfig,
options?: CreateChannelOptions
): Promise<CreateChannelResponse>;
createChannel(
id: string,
config: CreateChannelConfig,
callback: CreateChannelCallback
): void;
createChannel(
id: string,
config: CreateChannelConfig,
options: CreateChannelOptions,
callback: CreateChannelCallback
): void;
createNotification(
topic: string,
options?: CreateNotificationOptions
): Promise<CreateNotificationResponse>;
createNotification(
topic: string,
options: CreateNotificationOptions,
callback: CreateNotificationCallback
): void;
createNotification(topic: string, callback: CreateNotificationCallback): void;
deleteFiles(query?: DeleteFilesOptions): Promise<void>;
deleteFiles(callback: DeleteFilesCallback): void;
deleteFiles(query: DeleteFilesOptions, callback: DeleteFilesCallback): void;
deleteLabels(labels?: string | string[]): Promise<DeleteLabelsResponse>;
deleteLabels(callback: DeleteLabelsCallback): void;
deleteLabels(labels: string | string[], callback: DeleteLabelsCallback): void;
disableRequesterPays(): Promise<DisableRequesterPaysResponse>;
disableRequesterPays(callback: DisableRequesterPaysCallback): void;
enableLogging(
config: EnableLoggingOptions
): Promise<SetBucketMetadataResponse>;
enableLogging(
config: EnableLoggingOptions,
callback: SetBucketMetadataCallback
): void;
enableRequesterPays(): Promise<EnableRequesterPaysResponse>;
enableRequesterPays(callback: EnableRequesterPaysCallback): void;
/**
* Create a {@link File} object. See {@link File} to see how to handle
* the different use cases you may have.
*
* @param {string} name The name of the file in this bucket.
* @param {object} [options] Configuration options.
* @param {string|number} [options.generation] Only use a specific revision of
* this file.
* @param {string} [options.encryptionKey] A custom encryption key. See
* [Customer-supplied Encryption
* Keys](https://cloud.google.com/storage/docs/encryption#customer-supplied).
* @param {string} [options.kmsKeyName] The name of the Cloud KMS key that will
* be used to encrypt the object. Must be in the format:
* `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`.
* KMS key ring must use the same location as the bucket.
* @returns {File}
*
* @example
* const {Storage} = require('@google-cloud/storage');
* const storage = new Storage();
* const bucket = storage.bucket('albums');
* const file = bucket.file('my-existing-file.png');
*/
file(name: string, options?: FileOptions): File;
getFiles(query?: GetFilesOptions): Promise<GetFilesResponse>;
getFiles(query: GetFilesOptions, callback: GetFilesCallback): void;
getFiles(callback: GetFilesCallback): void;
getLabels(options: GetLabelsOptions): Promise<GetLabelsResponse>;
getLabels(callback: GetLabelsCallback): void;
getLabels(options: GetLabelsOptions, callback: GetLabelsCallback): void;
getNotifications(
options?: GetNotificationsOptions
): Promise<GetNotificationsResponse>;
getNotifications(callback: GetNotificationsCallback): void;
getNotifications(
options: GetNotificationsOptions,
callback: GetNotificationsCallback
): void;
getSignedUrl(cfg: GetBucketSignedUrlConfig): Promise<GetSignedUrlResponse>;
getSignedUrl(
cfg: GetBucketSignedUrlConfig,
callback: GetSignedUrlCallback
): void;
lock(metageneration: number | string): Promise<BucketLockResponse>;
lock(metageneration: number | string, callback: BucketLockCallback): void;
makePrivate(
options?: MakeBucketPrivateOptions
): Promise<MakeBucketPrivateResponse>;
makePrivate(callback: MakeBucketPrivateCallback): void;
makePrivate(
options: MakeBucketPrivateOptions,
callback: MakeBucketPrivateCallback
): void;
makePublic(
options?: MakeBucketPublicOptions
): Promise<MakeBucketPublicResponse>;
makePublic(callback: MakeBucketPublicCallback): void;
makePublic(
options: MakeBucketPublicOptions,
callback: MakeBucketPublicCallback
): void;
/**
* Get a reference to a Cloud Pub/Sub Notification.
*
* @param {string} id ID of notification.
* @returns {Notification}
* @see Notification
*
* @example
* const {Storage} = require('@google-cloud/storage');
* const storage = new Storage();
* const bucket = storage.bucket('my-bucket');
* const notification = bucket.notification('1');
*/
notification(id: string): Notification;
removeRetentionPeriod(): Promise<SetBucketMetadataResponse>;
removeRetentionPeriod(callback: SetBucketMetadataCallback): void;
request(reqOpts: DecorateRequestOptions): Promise<[ResponseBody, Metadata]>;
request(
reqOpts: DecorateRequestOptions,
callback: BodyResponseCallback
): void;
setLabels(
labels: Labels,
options?: SetLabelsOptions
): Promise<SetLabelsResponse>;
setLabels(labels: Labels, callback: SetLabelsCallback): void;
setLabels(
labels: Labels,
options: SetLabelsOptions,
callback: SetLabelsCallback
): void;
setRetentionPeriod(duration: number): Promise<SetBucketMetadataResponse>;
setRetentionPeriod(
duration: number,
callback: SetBucketMetadataCallback
): void;
setCorsConfiguration(
corsConfiguration: Cors[]
): Promise<SetBucketMetadataResponse>;
setCorsConfiguration(
corsConfiguration: Cors[],
callback: SetBucketMetadataCallback
): void;
setStorageClass(
storageClass: string,
options?: SetBucketStorageClassOptions
): Promise<SetBucketMetadataResponse>;
setStorageClass(
storageClass: string,
callback: SetBucketStorageClassCallback
): void;
setStorageClass(
storageClass: string,
options: SetBucketStorageClassOptions,
callback: SetBucketStorageClassCallback
): void;
/**
* Set a user project to be billed for all requests made from this Bucket
* object and any files referenced from this Bucket object.
*
* @param {string} userProject The user project.
*
* @example
* const {Storage} = require('@google-cloud/storage');
* const storage = new Storage();
* const bucket = storage.bucket('albums');
*
* bucket.setUserProject('grape-spaceship-123');
*/
setUserProject(userProject: string): void;
upload(pathString: string, options?: UploadOptions): Promise<UploadResponse>;
upload(
pathString: string,
options: UploadOptions,
callback: UploadCallback
): void;
upload(pathString: string, callback: UploadCallback): void;
makeAllFilesPublicPrivate_(
options?: MakeAllFilesPublicPrivateOptions
): Promise<MakeAllFilesPublicPrivateResponse>;
makeAllFilesPublicPrivate_(callback: MakeAllFilesPublicPrivateCallback): void;
makeAllFilesPublicPrivate_(
options: MakeAllFilesPublicPrivateOptions,
callback: MakeAllFilesPublicPrivateCallback
): void;
getId(): string;
}
/*! firebase-admin v9.4.2 */
declare namespace firebasestorage {
/**
* The default `Storage` service if no
* app is provided or the `Storage` service associated with the provided
* app.
*/
export class Storage {
/**
* Optional app whose `Storage` service to
* return. If not provided, the default `Storage` service will be returned.
*/
app: app.App;
/**
* @returns A [Bucket](https://cloud.google.com/nodejs/docs/reference/storage/latest/Bucket)
* instance as defined in the `@google-cloud/storage` package.
*/
bucket(name?: string): Bucket;
}
}

View File

@@ -14,6 +14,7 @@ import { SnackProvider } from "contexts/SnackContext";
import ConfirmationProvider from "components/ConfirmationDialog/Provider";
import { AppProvider } from "contexts/AppContext";
import { FiretableContextProvider } from "contexts/FiretableContext";
import { SnackLogProvider } from "contexts/SnackLogContext";
import routes from "constants/routes";
import AuthView from "pages/Auth/GoogleAuth";
@@ -48,81 +49,87 @@ export default function App() {
<AppProvider>
<ConfirmationProvider>
<SnackProvider>
<CustomBrowserRouter>
<Suspense fallback={<Loading fullScreen />}>
<Switch>
<Route exact path={routes.auth} render={() => <AuthView />} />
<Route
exact
path={routes.impersonatorAuth}
render={() => <ImpersonatorAuthPage />}
/>
<Route
exact
path={routes.authSetup}
render={() => <AuthSetupGuidePage />}
/>
<Route
exact
path={routes.jwtAuth}
render={() => <JwtAuthPage />}
/>
<Route
exact
path={routes.signOut}
render={() => <SignOutView />}
/>
<Route exact path={"/test"} render={() => <TestView />} />
<PrivateRoute
exact
path={[
routes.home,
routes.tableWithId,
routes.tableGroupWithId,
routes.gridWithId,
]}
render={() => (
<FiretableContextProvider>
<Switch>
<PrivateRoute
exact
path={routes.home}
render={() => <HomePage />}
/>
<PrivateRoute
path={routes.tableWithId}
render={() => <TablePage />}
/>
<PrivateRoute
path={routes.tableGroupWithId}
render={() => <TablePage />}
/>
</Switch>
</FiretableContextProvider>
)}
/>
<SnackLogProvider>
<CustomBrowserRouter>
<Suspense fallback={<Loading fullScreen />}>
<Switch>
<Route
exact
path={routes.auth}
render={() => <AuthView />}
/>
<Route
exact
path={routes.impersonatorAuth}
render={() => <ImpersonatorAuthPage />}
/>
<Route
exact
path={routes.authSetup}
render={() => <AuthSetupGuidePage />}
/>
<Route
exact
path={routes.jwtAuth}
render={() => <JwtAuthPage />}
/>
<Route
exact
path={routes.signOut}
render={() => <SignOutView />}
/>
<Route exact path={"/test"} render={() => <TestView />} />
<PrivateRoute
exact
path={[
routes.home,
routes.tableWithId,
routes.tableGroupWithId,
routes.gridWithId,
]}
render={() => (
<FiretableContextProvider>
<Switch>
<PrivateRoute
exact
path={routes.home}
render={() => <HomePage />}
/>
<PrivateRoute
path={routes.tableWithId}
render={() => <TablePage />}
/>
<PrivateRoute
path={routes.tableGroupWithId}
render={() => <TablePage />}
/>
</Switch>
</FiretableContextProvider>
)}
/>
<PrivateRoute
render={() => (
<EmptyState
message="Page Not Found"
description={
<Button
component={Link}
to={routes.home}
variant="outlined"
style={{ marginTop: 8 }}
>
Go Home
</Button>
}
fullScreen
/>
)}
/>
</Switch>
</Suspense>
</CustomBrowserRouter>
<PrivateRoute
render={() => (
<EmptyState
message="Page Not Found"
description={
<Button
component={Link}
to={routes.home}
variant="outlined"
style={{ marginTop: 8 }}
>
Go Home
</Button>
}
fullScreen
/>
)}
/>
</Switch>
</Suspense>
</CustomBrowserRouter>
</SnackLogProvider>
</SnackProvider>
</ConfirmationProvider>
</AppProvider>

View File

@@ -18,10 +18,12 @@ export const HEADING_FONT = "Europa, sans-serif";
export const BODY_FONT = '"Open Sans", sans-serif';
export const MONO_FONT =
"SFMono-Regular, Consolas, Liberation Mono, Menlo, monospace";
export const LOG_FONT = "IBM Plex Mono, monospace";
export const ANTLER_RED = "#ED4747";
export const SECONDARY_GREY = "#282829";
export const SECONDARY_TEXT = "rgba(0, 0, 0, 0.6)";
export const LOG_TEXT = "#cccccc";
export const ERROR = "#b00020";
export const ROOT_FONT_SIZE = 16;
@@ -48,7 +50,7 @@ export const themeBase = {
palette: {
primary: { main: ANTLER_RED, light: ANTLER_RED },
secondary: { main: SECONDARY_GREY },
text: { secondary: SECONDARY_TEXT },
text: { secondary: SECONDARY_TEXT, log: LOG_TEXT },
error: { main: ERROR },
},
typography: {
@@ -144,6 +146,7 @@ export const darkThemeBase = {
text: {
// primary: "rgba(255, 255, 255, 0.87)",
secondary: "rgba(255, 255, 255, 0.7)",
log: "black",
// disabled: "rgba(255, 255, 255, 0.38)",
},
error: { main: "#CF6679" },

View File

@@ -0,0 +1,11 @@
import React from "react";
import SvgIcon, { SvgIconProps } from "@material-ui/core/SvgIcon";
import { mdiPulse } from "@mdi/js";
export default function Status(props: SvgIconProps) {
return (
<SvgIcon viewBox="0 0 24 24" {...props}>
<path d={mdiPulse} />
</SvgIcon>
);
}

View File

@@ -1,6 +1,6 @@
import React, { useRef, useMemo, useState } from "react";
import clsx from "clsx";
import Editor, { monaco } from "@monaco-editor/react";
import Editor, { useMonaco } from "@monaco-editor/react";
import { useTheme, createStyles, makeStyles } from "@material-ui/core/styles";
@@ -42,6 +42,7 @@ export default function CodeEditor({
const [initialEditorValue] = useState(value ?? "");
const { tableState } = useFiretableContext();
const classes = useStyles();
const monacoInstance = useMonaco();
const editorRef = useRef<any>();
@@ -49,39 +50,42 @@ export default function CodeEditor({
editorRef.current = editor;
}
function listenEditorChanges() {
setTimeout(() => {
editorRef.current?.onDidChangeModelContent((ev) => {
onChange(editorRef.current.getValue());
});
}, 2000);
}
const themeTransformer = (theme: string) => {
switch (theme) {
case "dark":
return "vs-dark";
default:
return theme;
}
};
useMemo(async () => {
monaco
.init()
.then((monacoInstance) => {
monacoInstance.languages.typescript.javascriptDefaults.setDiagnosticsOptions(
{
noSemanticValidation: true,
noSyntaxValidation: false,
}
);
// compiler options
monacoInstance.languages.typescript.javascriptDefaults.setCompilerOptions(
{
target: monacoInstance.languages.typescript.ScriptTarget.ES5,
allowNonTsExtensions: true,
}
);
})
.catch((error) =>
console.error(
"An error occurred during initialization of Monaco: ",
error
)
if (!monacoInstance) {
// useMonaco returns a monaco instance but initialisation is done asynchronously
// dont execute the logic until the instance is initialised
return;
}
try {
monacoInstance.languages.typescript.javascriptDefaults.setDiagnosticsOptions(
{
noSemanticValidation: true,
noSyntaxValidation: false,
}
);
listenEditorChanges();
// compiler options
monacoInstance.languages.typescript.javascriptDefaults.setCompilerOptions(
{
target: monacoInstance.languages.typescript.ScriptTarget.ES5,
allowNonTsExtensions: true,
}
);
} catch (error) {
console.error(
"An error occurred during initialization of Monaco: ",
error
);
}
}, [tableState?.columns]);
return (
@@ -90,9 +94,9 @@ export default function CodeEditor({
className={clsx(classes.editorWrapper, wrapperProps?.className)}
>
<Editor
theme={theme.palette.type}
theme={themeTransformer(theme.palette.type)}
height={height}
editorDidMount={handleEditorDidMount}
onMount={handleEditorDidMount}
language="javascript"
value={initialEditorValue}
options={{
@@ -100,6 +104,7 @@ export default function CodeEditor({
fontFamily: theme.typography.fontFamilyMono,
...editorOptions,
}}
onChange={onChange as any}
/>
</div>
);

View File

@@ -0,0 +1,73 @@
import React from "react";
import { useFiretableContext } from "contexts/FiretableContext";
import { Box, Tooltip, Button, Chip } from "@material-ui/core";
import OpenIcon from "@material-ui/icons/OpenInNew";
function AvailableValueTag({ label, details }) {
return (
<Tooltip
style={{
zIndex: 9999,
marginRight: 4,
}}
title={<>{details}</>}
>
<Chip label={label} size="small" />
</Tooltip>
);
}
/* TODO implement parameter "tags" that defines available tags and values
{
row: "You have access to the object 'row' at...",
ref: "...",
...: ...
}
*/
export default function CodeEditorHelper({ docLink }) {
const { tableState } = useFiretableContext();
const availableVariables = [
{
key: "row",
description: `row has the value of doc.data() it has type definitions using this table's schema, but you can access any field in the document.`,
},
{
key: "db",
description: `db object provides access to firestore database instance of this project. giving you access to any collection or document in this firestore instance`,
},
{
key: "ref",
description: `ref object that represents the reference to the current row in firestore db (ie: doc.ref).`,
},
{
key: "auth",
description: `auth provides access to a firebase auth instance, can be used to manage auth users or generate tokens.`,
},
{
key: "storage",
description: `firebase Storage can be accessed through this, storage.bucket() returns default storage bucket of the firebase project.`,
},
{
key: "utilFns",
description: `utilFns provides a set of functions that are commonly used, such as easy access to GCP Secret Manager`,
},
];
return (
<Box marginBottom={1} display="flex" justifyContent="space-between">
<Box>
You have access to:{" "}
{availableVariables.map((v) => (
<AvailableValueTag label={v.key} details={v.description} />
))}
</Box>
<Button
size="small"
endIcon={<OpenIcon />}
target="_blank"
href={docLink}
>
Examples & Docs
</Button>
</Box>
);
}

View File

@@ -2,7 +2,6 @@ import React, { lazy, Suspense } from "react";
import clsx from "clsx";
import { TextField, TextFieldProps } from "@material-ui/core";
import useStyles from "./styles";
import Loading from "components/Loading";
import ErrorBoundary from "components/ErrorBoundary";
@@ -26,7 +25,7 @@ export interface IConnectServiceSelectProps {
className?: string;
/** Override any props of the root MUI `TextField` component */
TextFieldProps?: Partial<TextFieldProps>;
docRef: firebase.firestore.DocumentReference;
docRef: firebase.default.firestore.DocumentReference;
}
export default function ConnectServiceSelect({

View File

@@ -1,146 +0,0 @@
import React, { useState, useEffect } from "react";
import useAlgolia from "use-algolia";
import _find from "lodash/find";
import { useDebounce } from "use-debounce";
import MultiSelect, { MultiSelectProps } from "@antlerengineering/multiselect";
import Loading from "components/Loading";
export type ConnectTableValue = {
snapshot: any;
docPath: string;
};
export interface IConnectTableSelectProps {
value: ConnectTableValue[];
onChange: (value: ConnectTableValue[]) => void;
row: any;
column: any;
collectionPath: string;
config: {
filters: string;
primaryKeys: string[];
secondaryKeys: string[];
multiple?: boolean;
[key: string]: any;
};
editable?: boolean;
/** Optional style overrides for root MUI `TextField` component */
className?: string;
/** Override any props of the root MUI `TextField` component */
TextFieldProps?: MultiSelectProps<ConnectTableValue[]>["TextFieldProps"];
}
export default function ConnectTableSelect({
value = [],
onChange,
row,
column,
collectionPath,
config,
editable,
className,
TextFieldProps = {},
}: IConnectTableSelectProps) {
// Store a local copy of the value so the dropdown doesnt automatically close
// when the user selects a new item and we allow for multiple selections
const [localValue, setLocalValue] = useState(
Array.isArray(value) ? value : []
);
const [algoliaState, requestDispatch, , setAlgoliaConfig] = useAlgolia(
process.env.REACT_APP_ALGOLIA_APP_ID!,
process.env.REACT_APP_ALGOLIA_SEARCH_API_KEY!,
"" // Dont choose the index until the user opens the dropdown
);
const algoliaIndex = collectionPath ?? config.index;
const options = algoliaState.hits.map((hit) => ({
label: config?.primaryKeys?.map((key: string) => hit[key]).join(" "),
value: hit.objectID,
}));
// Pass a list of objectIDs to MultiSelect
const sanitisedValue = localValue.map(
(item) => item.docPath.split("/")[item.docPath.split("/").length - 1]
);
const handleChange = (_newValue) => {
// Ensure we return an array
const newValue = Array.isArray(_newValue)
? _newValue
: _newValue !== null
? [_newValue]
: [];
// Calculate new value
const newLocalValue = newValue.map((objectID) => {
// If this objectID is already in the previous value, use that previous
// values snapshot (in case it points to an object not in the current
// Algolia query)
const existingMatch = _find(localValue, {
docPath: `${algoliaIndex}/${objectID}`,
});
if (existingMatch) return existingMatch;
// If this is a completely new selection, grab the snapshot from the
// current Algolia query
const match = _find(algoliaState.hits, { objectID });
const { _highlightResult, ...snapshot } = match;
return {
snapshot,
docPath: `${algoliaIndex}/${snapshot.objectID}`,
};
});
// If !multiple, we MUST change the value (bypassing localValue),
// otherwise `setLocalValue` wont be called in time for the new
// `localValue` to be read by `handleSave`
if (config.multiple === false) onChange(newLocalValue);
// Otherwise, `setLocalValue` until user closes dropdown
else setLocalValue(newLocalValue);
};
// Save when user closes dropdown
const handleSave = () => {
if (config.multiple !== false) onChange(localValue);
};
// Change MultiSelect input field to search Algolia directly
const [search, setSearch] = useState("");
const [debouncedSearch] = useDebounce(search, 1000);
useEffect(() => {
requestDispatch({ query: debouncedSearch });
}, [debouncedSearch]);
return (
<MultiSelect
value={config.multiple === false ? sanitisedValue[0] : sanitisedValue}
onChange={handleChange}
onOpen={() => setAlgoliaConfig({ indexName: algoliaIndex })}
onClose={handleSave}
options={options}
TextFieldProps={{
className,
hiddenLabel: true,
...TextFieldProps,
}}
label={column?.name}
multiple={(config?.multiple ?? true) as any}
AutocompleteProps={{
loading: algoliaState.loading,
loadingText: <Loading />,
inputValue: search,
onInputChange: (_, value, reason) => {
if (reason === "input") setSearch(value);
},
filterOptions: () => options,
}}
countText={`${localValue.length} of ${
algoliaState.response?.nbHits ?? "?"
}`}
disabled={editable === false}
/>
);
}

View File

@@ -64,24 +64,6 @@ const useStyles = makeStyles((theme) =>
...theme.typography.body1,
// https://codepen.io/evank/pen/wWbRNO
background: `
linear-gradient(
var(--bg-paper) 50%,
${fade(theme.palette.background.paper, 0)}
),
linear-gradient(
${fade(theme.palette.background.paper, 0)},
var(--bg-paper) 50%
) 0 100%,
linear-gradient(
to top, ${theme.palette.divider} 1px,
${fade(theme.palette.divider, 0)}
),
linear-gradient(to top,
${theme.palette.divider} 1px,
${fade(theme.palette.divider, 0)}
) 0 calc(100% - 0.5px)`,
backgroundRepeat: "no-repeat",
backgroundColor: "var(--bg-paper)",
backgroundSize: "100% 2px, 100% 3px, 100% 1px, 100% 1px",

View File

@@ -23,7 +23,7 @@ import CheckIcon from "@material-ui/icons/Check";
import { useAppContext } from "contexts/AppContext";
import routes from "constants/routes";
import meta from "../../../package.json";
import { projectId } from "../../firebase";
const useStyles = makeStyles((theme) =>
createStyles({
spacer: {
@@ -141,7 +141,7 @@ export default function UserMenu(props: IconButtonProps) {
>
<MenuItem
component="a"
href={`https://console.firebase.google.com/project/${process.env.REACT_APP_FIREBASE_PROJECT_ID}/firestore/data~2F_FT_USERS~2F${currentUser.uid}`}
href={`https://console.firebase.google.com/project/${projectId}/firestore/data/~2F_FT_USERS~2F${currentUser.uid}`}
target="_blank"
rel="noopener"
>

View File

@@ -5,23 +5,10 @@ import { FIELDS } from "@antlerengineering/form-builder";
import HelperText from "../HelperText";
export const settings = () => [
{ type: FIELDS.heading, label: "Cloud build configuration" },
{ type: FIELDS.heading, label: "Cloud Run configuration" },
{
type: FIELDS.text,
name: "cloudBuild.branch",
label: "FT Branch",
//validation: yup.string().required("Required"),
name: "ftBuildUrl",
label: "Cloud Run trigger URL",
},
{
type: FIELDS.description,
description: (
<HelperText>Firetable branch to build cloud functions from</HelperText>
),
},
{
type: FIELDS.text,
name: "cloudBuild.triggerId",
label: "Trigger Id",
//validation: yup.string().required("Required"),
},
];
];

View File

@@ -48,12 +48,13 @@ export default function SettingsDialog({
useEffect(() => {
if (!settingsDocState.loading) {
const cloudBuild = settingsDocState?.doc?.cloudBuild;
setForm(cloudBuild ? { cloudBuild } : FORM_EMPTY_STATE);
const ftBuildUrl = settingsDocState?.doc?.ftBuildUrl;
setForm({ ftBuildUrl });
}
}, [settingsDocState.doc, open]);
const handleSubmit = (values) => {
setForm(values)
settingsDocDispatch({ action: DocActions.update, data: values });
handleClose();
};

View File

@@ -0,0 +1,119 @@
import React, { useState } from "react";
import clsx from "clsx";
import {
makeStyles,
createStyles,
Tooltip,
TooltipProps,
Button,
ButtonProps,
} from "@material-ui/core";
const useStyles = makeStyles((theme) =>
createStyles({
tooltip: {
backgroundColor: theme.palette.background.default,
boxShadow: theme.shadows[2],
...theme.typography.caption,
color: theme.palette.text.primary,
padding: 0,
},
arrow: {
"&::before": {
backgroundColor: theme.palette.background.default,
boxShadow: theme.shadows[2],
},
},
grid: {
padding: theme.spacing(2),
cursor: "default",
display: "grid",
gridTemplateColumns: "40px auto",
gap: theme.spacing(1, 2),
},
emoji: {
fontSize: `${40 / 16}rem`,
fontWeight: 400,
fontFamily:
"apple color emoji, segoe ui emoji, noto color emoji, android emoji, emojisymbols, emojione mozilla, twemoji mozilla, segoe ui symbol",
},
message: {
alignSelf: "center",
},
dismissButton: {
marginLeft: theme.spacing(-1),
gridColumn: 2,
justifySelf: "flex-start",
},
})
);
export interface IRichTooltipProps extends Partial<TooltipProps> {
render: (props: {
openTooltip: () => void;
closeTooltip: () => void;
toggleTooltip: () => void;
}) => TooltipProps["children"];
emoji?: React.ReactNode;
message: React.ReactNode;
dismissButtonText?: React.ReactNode;
dismissButtonProps?: Partial<ButtonProps>;
}
export default function RichTooltip({
render,
emoji,
message,
dismissButtonText,
dismissButtonProps,
...props
}: IRichTooltipProps) {
const classes = useStyles();
const [open, setOpen] = useState(false);
const openTooltip = () => setOpen(true);
const closeTooltip = () => setOpen(false);
const toggleTooltip = () => setOpen((state) => !state);
return (
<Tooltip
disableFocusListener
disableHoverListener
disableTouchListener
arrow
interactive
open={open}
onClose={closeTooltip}
classes={{ tooltip: classes.tooltip, arrow: classes.arrow }}
title={
<div className={classes.grid} onClick={closeTooltip}>
<span className={classes.emoji}>{emoji}</span>
<div className={classes.message}>{message}</div>
{dismissButtonText && (
<Button
{...dismissButtonProps}
onClick={closeTooltip}
className={clsx(
classes.dismissButton,
dismissButtonProps?.className
)}
>
{dismissButtonText}
</Button>
)}
</div>
}
{...props}
>
{render({ openTooltip, closeTooltip, toggleTooltip })}
</Tooltip>
);
}

View File

@@ -15,7 +15,7 @@ import { FiretableState } from "hooks/useFiretable";
export interface IAutosaveProps
extends Pick<UseFormMethods, "reset" | "formState"> {
control: Control;
docRef: firebase.firestore.DocumentReference;
docRef: firebase.default.firestore.DocumentReference;
row: any;
}

Some files were not shown because too many files have changed in this diff Show More