ft build: add logs to db for each table

This commit is contained in:
Bobby
2021-06-16 19:55:39 +10:00
parent 0e2746d036
commit af58f53b29
5 changed files with 169 additions and 69 deletions

View File

@@ -7,57 +7,70 @@ import admin from "firebase-admin";
export default async function generateConfig(
schemaPath: string,
user: admin.auth.UserRecord
user: admin.auth.UserRecord,
streamLogger
) {
return await generateConfigFromTableSchema(schemaPath, user).then(
async (success) => {
if (!success) {
console.log("generateConfigFromTableSchema failed to complete");
return await generateConfigFromTableSchema(
schemaPath,
user,
streamLogger
).then(async (success) => {
if (!success) {
await streamLogger(`generateConfigFromTableSchema failed to complete`);
return false;
}
await streamLogger(`generateConfigFromTableSchema done`);
const configFile = fs.readFileSync(
path.resolve(__dirname, "../functions/src/functionConfig.ts"),
"utf-8"
);
await streamLogger(`configFile: ${JSON.stringify(configFile)}`);
const requiredDependencies = configFile.match(
/(?<=(require\(("|'))).*?(?=("|')\))/g
);
if (requiredDependencies) {
const packgesAdded = await addPackages(
requiredDependencies.map((p: any) => ({ name: p })),
user,
streamLogger
);
if (!packgesAdded) {
return false;
}
}
await streamLogger(
`requiredDependencies: ${JSON.stringify(requiredDependencies)}`
);
console.log("generateConfigFromTableSchema done");
const configFile = fs.readFileSync(
path.resolve(__dirname, "../functions/src/functionConfig.ts"),
"utf-8"
);
const requiredDependencies = configFile.match(
/(?<=(require\(("|'))).*?(?=("|')\))/g
);
if (requiredDependencies) {
const packgesAdded = await addPackages(
requiredDependencies.map((p: any) => ({ name: p })),
user
);
if (!packgesAdded) {
return false;
}
}
const isFunctionConfigValid = await asyncExecute(
"cd build/functions/src; tsc functionConfig.ts",
commandErrorHandler({
const isFunctionConfigValid = await asyncExecute(
"cd build/functions/src; tsc functionConfig.ts",
commandErrorHandler(
{
user,
functionConfigTs: configFile,
description: `Invalid compiled functionConfig.ts`,
})
);
if (!isFunctionConfigValid) {
},
streamLogger
)
);
await streamLogger(
`isFunctionConfigValid: ${JSON.stringify(isFunctionConfigValid)}`
);
if (!isFunctionConfigValid) {
return false;
}
const { sparksConfig } = require("../functions/src/functionConfig.js");
const requiredSparks = sparksConfig.map((s: any) => s.type);
await streamLogger(`requiredSparks: ${JSON.stringify(requiredSparks)}`);
for (const lib of requiredSparks) {
const success = await addSparkLib(lib, user, streamLogger);
if (!success) {
return false;
}
const { sparksConfig } = require("../functions/src/functionConfig.js");
const requiredSparks = sparksConfig.map((s: any) => s.type);
console.log({ requiredSparks });
for (const lib of requiredSparks) {
const success = await addSparkLib(lib, user);
if (!success) {
return false;
}
}
return true;
}
);
return true;
});
}

View File

@@ -6,14 +6,18 @@ import { parseSparksConfig } from "../utils";
export const generateConfigFromTableSchema = async (
schemaDocPath: string,
user: admin.auth.UserRecord
user: admin.auth.UserRecord,
streamLogger
) => {
await streamLogger("getting schema...");
const schemaDoc = await db.doc(schemaDocPath).get();
const schemaData = schemaDoc.data();
if (!schemaData) throw new Error("no schema found");
await streamLogger(`schemaData: ${JSON.stringify(schemaData)}`);
const derivativeColumns = Object.values(schemaData.columns).filter(
(col: any) => col.type === "DERIVATIVE"
);
await streamLogger(`derivativeColumns: ${JSON.stringify(derivativeColumns)}`);
const derivativesConfig = `[${derivativeColumns.reduce(
(acc, currColumn: any) => {
if (
@@ -37,11 +41,14 @@ export const generateConfigFromTableSchema = async (
},
""
)}]`;
await streamLogger(`derivativesConfig: ${JSON.stringify(derivativesConfig)}`);
const initializableColumns = Object.values(
schemaData.columns
).filter((col: any) => Boolean(col.config?.defaultValue));
console.log(JSON.stringify({ initializableColumns }));
await streamLogger(
`initializableColumns: ${JSON.stringify(initializableColumns)}`
);
const initializeConfig = `[${initializableColumns.reduce(
(acc, currColumn: any) => {
if (currColumn.config.defaultValue.type === "static") {
@@ -66,6 +73,7 @@ export const generateConfigFromTableSchema = async (
},
""
)}]`;
await streamLogger(`initializeConfig: ${JSON.stringify(initializeConfig)}`);
const documentSelectColumns = Object.values(schemaData.columns).filter(
(col: any) => col.type === "DOCUMENT_SELECT" && col.config?.trackedFields
);
@@ -79,8 +87,12 @@ export const generateConfigFromTableSchema = async (
},
""
)}]`;
await streamLogger(
`documentSelectColumns: ${JSON.stringify(documentSelectColumns)}`
);
const sparksConfig = parseSparksConfig(schemaData.sparks, user);
await streamLogger(`sparksConfig: ${JSON.stringify(sparksConfig)}`);
const collectionType = schemaDocPath.includes("subTables")
? "subCollection"
@@ -132,6 +144,7 @@ export const generateConfigFromTableSchema = async (
default:
break;
}
await streamLogger(`collectionType: ${JSON.stringify(collectionType)}`);
// generate field types from table meta data
const fieldTypes = JSON.stringify(
@@ -147,6 +160,7 @@ export const generateConfigFromTableSchema = async (
};
}, {})
);
await streamLogger(`fieldTypes: ${JSON.stringify(fieldTypes)}`);
const exports: any = {
fieldTypes,
@@ -157,10 +171,12 @@ export const generateConfigFromTableSchema = async (
documentSelectConfig,
sparksConfig,
};
await streamLogger(`exports: ${JSON.stringify(exports)}`);
const fileData = Object.keys(exports).reduce((acc, currKey) => {
return `${acc}\nexport const ${currKey} = ${exports[currKey]}`;
}, ``);
await streamLogger(`fileData: ${JSON.stringify(fileData)}`);
const path = require("path");
fs.writeFileSync(

View File

@@ -21,7 +21,8 @@ export const asyncExecute = async (command: string, callback: any) =>
export const addPackages = async (
packages: { name: string; version?: string }[],
user: admin.auth.UserRecord
user: admin.auth.UserRecord,
streamLogger
) => {
const packagesString = packages.reduce((acc, currPackage) => {
return `${acc} ${currPackage.name}@${currPackage.version ?? "latest"}`;
@@ -29,10 +30,13 @@ export const addPackages = async (
if (packagesString.trim().length !== 0) {
const success = await asyncExecute(
`cd build/functions;yarn add ${packagesString}`,
commandErrorHandler({
user,
description: "Error adding packages",
})
commandErrorHandler(
{
user,
description: "Error adding packages",
},
streamLogger
)
);
return success;
}
@@ -41,7 +45,8 @@ export const addPackages = async (
export const addSparkLib = async (
name: string,
user: admin.auth.UserRecord
user: admin.auth.UserRecord,
streamLogger
) => {
try {
const { dependencies } = require(`../sparksLib/${name}`);
@@ -49,7 +54,7 @@ export const addSparkLib = async (
name: key,
version: dependencies[key],
}));
const success = await addPackages(packages, user);
const success = await addPackages(packages, user, streamLogger);
if (!success) {
return false;
}
@@ -63,10 +68,13 @@ export const addSparkLib = async (
const success = await asyncExecute(
`cp build/sparksLib/${name}.ts build/functions/src/sparks/${name}.ts`,
commandErrorHandler({
user,
description: "Error copying sparksLib",
})
commandErrorHandler(
{
user,
description: "Error copying sparksLib",
},
streamLogger
)
);
return success;
};

View File

@@ -2,15 +2,30 @@ const express = require("express");
const bodyParser = require("body-parser");
const cors = require("cors");
import { asyncExecute } from "./compiler/terminal";
import { createStreamLogger } from "./utils";
import generateConfig from "./compiler";
import { auth } from "./firebaseConfig";
import meta from "./package.json";
import { commandErrorHandler, logErrorToDB } from "./utils";
import firebase from "firebase-admin";
import http from "http";
// import { Server } from "socket.io";
const app = express();
// const httpServer = new http.Server(app);
const jsonParser = bodyParser.json();
// const io = new Server(httpServer, {
// cors: {
// origin: "*",
// methods: ["GET", "POST"],
// },
// });
// io.on("connection", () => {
// io.emit("log", "Hey!");
// });
app.use(cors());
app.get("/", async (req: any, res: any) => {
@@ -73,7 +88,10 @@ app.post("/", jsonParser, async (req: any, res: any) => {
});
}
const success = await generateConfig(configPath, user);
const streamLogger = createStreamLogger(configPath, Date.now());
streamLogger("streamLogger created");
const success = await generateConfig(configPath, user, streamLogger);
if (!success) {
console.log(`generateConfig failed to complete`);
res.send({
@@ -82,8 +100,7 @@ app.post("/", jsonParser, async (req: any, res: any) => {
});
return;
}
console.log("generateConfig done");
streamLogger("generateConfig success");
let hasEnvError = false;
@@ -106,7 +123,7 @@ app.post("/", jsonParser, async (req: any, res: any) => {
await asyncExecute(
`cd build/functions; \
yarn install`,
commandErrorHandler({ user })
commandErrorHandler({ user }, streamLogger)
);
await asyncExecute(
@@ -114,10 +131,10 @@ app.post("/", jsonParser, async (req: any, res: any) => {
yarn deployFT \
--project ${process.env._PROJECT_ID} \
--only functions`,
commandErrorHandler({ user })
commandErrorHandler({ user }, streamLogger)
);
console.log("build complete");
await streamLogger(`build complete`);
res.send({
success: true,
});

View File

@@ -16,13 +16,18 @@ async function insertErrorRecordToDB(errorRecord: object) {
await db.collection("_FT_ERRORS").add(errorRecord);
}
function commandErrorHandler(meta: {
user: admin.auth.UserRecord;
description?: string;
functionConfigTs?: string;
sparksConfig?: string;
}) {
function commandErrorHandler(
meta: {
user: admin.auth.UserRecord;
description?: string;
functionConfigTs?: string;
sparksConfig?: string;
},
streamLogger
) {
return async function (error, stdout, stderr) {
await streamLogger(stdout);
if (!error) {
return;
}
@@ -88,4 +93,45 @@ function parseSparksConfig(
return "[]";
}
export { commandErrorHandler, logErrorToDB, parseSparksConfig };
function createStreamLogger(
tableConfigPath: string,
startTimeStamp: number
// emitFn
) {
const fullLog: string[] = [];
console.log("socketLogger created");
return async (log: string) => {
console.log(log);
fullLog.push(log);
await db.doc(tableConfigPath).update({
ftBuild: {
log,
tableConfigPath,
startTimeStamp,
fullLog,
},
});
// if (!emitFn) {
// // await logErrorToDB({
// // errorDescription: `Invalid socket (${configPath})`,
// // user,
// // });
// } else {
// fullLog.push(log);
// emitFn("log", {
// log,
// tableConfigPath,
// startTimeStamp,
// fullLog,
// });
// }
};
}
export {
commandErrorHandler,
logErrorToDB,
parseSparksConfig,
createStreamLogger,
};