Auto-create client tables and flattened views on add-data

This commit is contained in:
root 2026-04-01 15:18:57 -07:00
parent 666a173998
commit c4bbe6046f
5 changed files with 207 additions and 15 deletions

1
.gitignore vendored Normal file
View File

@ -0,0 +1 @@
node_modules

2
db.js
View File

@ -5,7 +5,7 @@ const pool = new Pool({
port: process.env.DB_PORT || 5440,
database: "axdb",
user: 'postgres',
password: 'AXU001',
password: 'AXUdev001!',
max: 10
});

View File

@ -4,6 +4,7 @@ const cors = require("cors");
require("dotenv").config();
const port = process.env.PORT || 3052;
const ONE_MINUTE_MS = 60 * 1000;
const { checkAuthKeys, checkAdminAuthKey } = require("./modules/auth");
@ -41,10 +42,14 @@ app.use((req, res) => {
res.status(404).json({ message: "Not found. Incorrect endpoint." });
});
app.listen(port, () => {
const server = app.listen(port, () => {
console.log(`Server listening at http://localhost:${port}`);
});
server.requestTimeout = ONE_MINUTE_MS;
server.headersTimeout = ONE_MINUTE_MS + 5000;
server.keepAliveTimeout = ONE_MINUTE_MS;
// Middleware function to check the Bearer token for create, update, delete operations.
async function checkAuth(req, res, next) {
const authHeader = req.headers.authorization;

View File

@ -1,28 +1,214 @@
const query = require("../db.js");
const { toSnakeCase } = require("./utils.js");
const WORK_ORDER_TIMESTAMP_FIELDS = new Map([
["Request date", "request_date"],
["Request date 2", "request_date_2"],
["Created", "created"],
["Start", "start"],
["WO Accepted By Vendor", "wo_accepted_by_vendor"],
["Start Work", "start_work"],
["Downtime start date", "downtime_start_date"],
["Downtime end", "downtime_end"],
["Note closed time", "note_closed_time"],
["End Work", "end_work"],
["End", "end"],
["Closed", "closed"],
]);
const WORK_ORDER_FIELD_ALIASES = new Map([
["Address", "address"],
["Assignee", "assignee"],
["Assignee profile", "assignee_profile"],
["Bookmark", "bookmark"],
["Category", "category"],
["Contact", "contact"],
["District", "district"],
["Division manager", "division_manager"],
["Downtime Cat.", "downtime_cat"],
["Downtime?", "downtime"],
["From", "from_requestor"],
["Labor", "labor_amount"],
["Qty. Labor Hours", "labor_hours"],
["Location", "location"],
["Material", "material_amount"],
["NTE Amt. $", "nte_amount"],
["NTE Amount", "nte_amount_2"],
["Part of", "part_of"],
["Priority", "priority"],
["mainProblem", "problem"],
["Problem Type", "problemtype"],
["Property", "property"],
["Reference", "reference"],
["Site #", "site_number"],
["State", "state"],
["Status", "status"],
["Subject", "subject"],
["Total", "total_amount"],
["Travel", "travel_amount"],
["Travel count", "travel_count"],
["Type", "type"],
["VAT", "vat"],
["Vendor Response Time", "vendor_response_time"],
]);
function normalizeIdentifier(value) {
return String(value)
.replace(/([a-z0-9])([A-Z])/g, "$1_$2")
.replace(/[^a-zA-Z0-9]+/g, "_")
.replace(/^_+|_+$/g, "")
.replace(/_+/g, "_")
.toLowerCase();
}
function quoteIdentifier(value) {
return `"${String(value).replace(/"/g, "\"\"")}"`;
}
function quoteLiteral(value) {
return `'${String(value).replace(/'/g, "''")}'`;
}
function getColumnAlias(key) {
return WORK_ORDER_TIMESTAMP_FIELDS.get(key)
|| WORK_ORDER_FIELD_ALIASES.get(key)
|| normalizeIdentifier(key);
}
function buildTimestampExpression(tableName, key, alias) {
return `
CASE
WHEN ((${tableName}.data ->> ${quoteLiteral(key)}) = ''::text) THEN NULL::timestamp with time zone
ELSE to_timestamp((${tableName}.data ->> ${quoteLiteral(key)}), 'YYYY-MM-DD"T"HH24:MI:SS'::text)
END AS ${quoteIdentifier(alias)}`;
}
function buildTextExpression(tableName, key, alias) {
return `(${tableName}.data ->> ${quoteLiteral(key)}) AS ${quoteIdentifier(alias)}`;
}
async function ensureTableExists(tableName) {
await query(`
CREATE TABLE IF NOT EXISTS ${tableName} (
id text primary key,
created_at timestamp with time zone not null default now(),
updated_at timestamp with time zone not null default now(),
data jsonb not null
)`);
await query(`
DROP TRIGGER IF EXISTS set_update_${tableName} ON ${tableName}`);
await query(`
CREATE TRIGGER set_update_${tableName}
BEFORE UPDATE ON ${tableName}
FOR EACH ROW
EXECUTE PROCEDURE trigger_set_timestamp()`);
}
async function getFlattenedKeys(tableName) {
const result = await query(`
SELECT DISTINCT key
FROM ${tableName},
LATERAL jsonb_object_keys(data) AS key
ORDER BY key`);
return result.rows
.map((row) => row.key)
.filter((key) => !["id", "created_at", "updated_at", "data"].includes(key));
}
async function refreshFlattenedView(tableName, viewName) {
const keys = await getFlattenedKeys(tableName);
const seenAliases = new Set(["id", "created_at", "updated_at", "data"]);
const selectFragments = [
`${tableName}.id`,
`${tableName}.created_at`,
`${tableName}.updated_at`,
];
for (const key of keys) {
let alias = getColumnAlias(key);
if (!alias) {
continue;
}
let suffix = 2;
while (seenAliases.has(alias)) {
alias = `${alias}_${suffix}`;
suffix += 1;
}
seenAliases.add(alias);
const expression = WORK_ORDER_TIMESTAMP_FIELDS.has(key)
? buildTimestampExpression(tableName, key, alias)
: buildTextExpression(tableName, key, alias);
selectFragments.push(expression);
}
await query(`
DROP VIEW IF EXISTS ${viewName}`);
await query(`
CREATE VIEW ${viewName} AS
SELECT ${selectFragments.join(",\n ")}
FROM ${tableName}`);
}
const addData = async (req, res) => {
try {
const client = req.params.client;
const { tableName, data } = req.body;
const dBtableName = `${client}_${toSnakeCase(tableName)}`;
const tableCheck = await query(`
SELECT EXISTS (SELECT 1 FROM pg_tables WHERE tablename = $1)
AS table_exists`, [dBtableName]);
if (!tableCheck.rows[0]["table_exists"]) {
return res.status(404).json({ error: `Data collection ${tableName} not found.` });
if (typeof tableName !== "string" || !tableName.trim()) {
return res.status(400).json({ error: "tableName is required." });
}
if (!Array.isArray(data)) {
return res.status(400).json({ error: "data must be an array." });
}
const normalizedClient = normalizeIdentifier(client);
const normalizedTableName = normalizeIdentifier(tableName);
if (!normalizedClient || !normalizedTableName) {
return res.status(400).json({ error: "Invalid client or tableName." });
}
const dBtableName = `${normalizedClient}_${normalizedTableName}`;
const flattenedViewName = `${normalizedClient}_flattened_${normalizedTableName}`;
const insertQuery = `
INSERT INTO ${dBtableName} (id, data) VALUES ($1, $2)
ON CONFLICT (id) DO UPDATE SET data = $2`;
for (item of data) {
await query(insertQuery, [item.id, item]);
for (const item of data) {
if (!item || typeof item !== "object" || item.id == null) {
return res.status(400).json({ error: "Each data item must be an object with an id." });
}
}
res.status(200).json({ message: "Data added successfully" });
await query("BEGIN");
try {
await ensureTableExists(dBtableName);
for (const item of data) {
await query(insertQuery, [String(item.id), item]);
}
await refreshFlattenedView(dBtableName, flattenedViewName);
await query("COMMIT");
} catch (err) {
await query("ROLLBACK");
throw err;
}
res.status(200).json({
message: "Data added successfully",
tableName: dBtableName,
viewName: flattenedViewName,
});
} catch (err) {
console.error("Error handling the request:", err);

View File

@ -12,7 +12,7 @@
"author": "",
"license": "ISC",
"dependencies": {
"axios": "^1.4.0",
"axios": "1.4.0",
"cors": "^2.8.5",
"dotenv": "^16.4.5",
"express": "^4.18.2",