fix: remove nested .git folders, re-add as normal directories

This commit is contained in:
2026-03-22 17:50:47 -05:00
parent f55c7e47c9
commit 6b7eec67b8
1870 changed files with 4170168 additions and 3 deletions
+41
View File
@@ -0,0 +1,41 @@
import { prisma } from "../src/constants";
await (async () => {
const userIdentifier = Bun.argv[2];
const roleIdentifier = Bun.argv[3];
if (Bun.argv.length !== 4)
console.error(
"2 arguments expected. \n Format: bun utils:assign_user_role {user} {role}"
);
const user = await prisma.user.findFirst({
where: { OR: [{ id: userIdentifier }, { login: userIdentifier }] },
});
if (!user)
return console.error(
`User with identifier '${userIdentifier}' doesn't exist`
);
const role = await prisma.role.findFirst({
where: { OR: [{ id: roleIdentifier }, { moniker: roleIdentifier }] },
include: { users: true },
});
if (!role)
return console.error(
`Role with identifier '${roleIdentifier}' doesn't exist`
);
if (role.users.map((v) => v.id).includes(user.id))
return console.log("User already has role!");
await prisma.user.update({
where: { id: user.id },
data: { roles: { set: { id: role.id } } },
});
return console.log(
`User '${user.login}' has been given role '${role.title}'!`
);
})();
+93
View File
@@ -0,0 +1,93 @@
/**
* One-time backfill script to populate category/subcategory fields
* on existing CatalogItem records from ConnectWise data.
*
* Usage: bun utils/backfillCatalogCategories.ts
*/
import { prisma, connectWiseApi } from "../src/constants";
interface CWReference {
id: number;
name: string;
}
interface CatalogItemPartial {
id: number;
category: CWReference;
subcategory: CWReference;
}
async function main() {
// 1. Find all DB items that are missing category data
const dbItems = await prisma.catalogItem.findMany({
where: { category: null },
select: { cwCatalogId: true, id: true },
});
if (dbItems.length === 0) {
console.log("All catalog items already have category data. Nothing to do.");
return;
}
console.log(`Found ${dbItems.length} catalog items missing category data.`);
// 2. Fetch all items from CW with category/subcategory fields
const pageSize = 1000;
const countRes = await connectWiseApi.get("/procurement/catalog/count");
const totalCount = countRes.data.count;
const totalPages = Math.ceil(totalCount / pageSize);
const cwMap = new Map<
number,
{ category: CWReference; subcategory: CWReference }
>();
for (let page = 1; page <= totalPages; page++) {
const res = await connectWiseApi.get(
`/procurement/catalog?page=${page}&pageSize=${pageSize}&fields=id,category,subcategory`,
);
const items: CatalogItemPartial[] = res.data;
for (const item of items) {
cwMap.set(item.id, {
category: item.category,
subcategory: item.subcategory,
});
}
}
console.log(`Fetched ${cwMap.size} items from CW. Updating DB...`);
// 3. Batch update
let updated = 0;
const batchSize = 50;
for (let i = 0; i < dbItems.length; i += batchSize) {
const batch = dbItems.slice(i, i + batchSize);
await Promise.all(
batch.map(async (dbItem) => {
const cwData = cwMap.get(dbItem.cwCatalogId);
if (!cwData) return;
await prisma.catalogItem.update({
where: { id: dbItem.id },
data: {
category: cwData.category?.name ?? null,
categoryCwId: cwData.category?.id ?? null,
subcategory: cwData.subcategory?.name ?? null,
subcategoryCwId: cwData.subcategory?.id ?? null,
},
});
updated++;
}),
);
console.log(
` Updated ${Math.min(i + batchSize, dbItems.length)}/${dbItems.length}...`,
);
}
console.log(`Done. Updated ${updated} catalog items with category data.`);
}
main()
.catch(console.error)
.finally(() => prisma.$disconnect());
+65
View File
@@ -0,0 +1,65 @@
import { execSync } from "child_process";
const kubeconfig = "/Users/jroberts/projects/K8S-QuickDeploy/k8s.yaml";
function getKey(name: string): string {
const b64 = execSync(
`KUBECONFIG=${kubeconfig} kubectl get secret optima-keys-secret -n optima -o jsonpath="{.data.${name}}"`,
)
.toString()
.trim();
return Buffer.from(b64, "base64").toString("utf-8");
}
const privKeys = [
"ACCESS_TOKEN_PRIVATE_KEY",
"REFRESH_TOKEN_PRIVATE_KEY",
"PERMISSIONS_PRIVATE_KEY",
"SECURE_VALUES_PRIVATE_KEY",
];
const converted: Record<string, string> = {};
// Use openssl CLI to convert PKCS#1 to PKCS#8 (Bun's crypto has issues with some keys)
for (const k of privKeys) {
const pem = getKey(k);
const pkcs8 = execSync("openssl pkey -in /dev/stdin", {
input: pem,
}).toString();
converted[k] = pkcs8;
console.log(`${k}: converted to PKCS#8 ✅`);
}
const pubPem = getKey("SECURE_VALUES_PUBLIC_KEY");
const spki = execSync("openssl rsa -RSAPublicKey_in -pubout -in /dev/stdin", {
input: pubPem,
}).toString();
converted["SECURE_VALUES_PUBLIC_KEY"] = spki;
console.log("SECURE_VALUES_PUBLIC_KEY: converted to SPKI ✅");
// Generate kubectl command to recreate the secret with PKCS#8 keys
const args = Object.entries(converted)
.map(([k, v]) => `--from-literal=${k}='${v}'`)
.join(" \\\n ");
console.log("\n--- Delete and recreate secret with PKCS#8 keys ---\n");
console.log(
`KUBECONFIG=${kubeconfig} kubectl delete secret optima-keys-secret -n optima`,
);
console.log(
`KUBECONFIG=${kubeconfig} kubectl create secret generic optima-keys-secret -n optima \\\n ${args}`,
);
// Actually do it
console.log("\nApplying...");
execSync(
`KUBECONFIG=${kubeconfig} kubectl delete secret optima-keys-secret -n optima`,
);
const literals = Object.entries(converted).map(
([k, v]) => `--from-literal=${k}=${v}`,
);
const cmd = `KUBECONFIG=${kubeconfig} kubectl create secret generic optima-keys-secret -n optima ${literals.join(" ")}`;
execSync(cmd);
console.log("Secret recreated with PKCS#8 keys ✅");
+27
View File
@@ -0,0 +1,27 @@
import cuid from "cuid";
import { prisma } from "../src/constants";
import { signPermissions } from "../src/modules/permission-utils/signPermissions";
let newRole;
let id = cuid();
const admin = await prisma.role.findFirst({
where: { moniker: "administrator" },
});
if (admin) console.log("Admin already exists", admin);
if (!admin)
newRole = await prisma.role.create({
data: {
id,
moniker: "administrator",
title: "Admin",
permissions: signPermissions({
issuer: "roles",
subject: id,
permissions: ["*"],
}),
},
});
console.log("Admin Role Created!", newRole);
+123
View File
@@ -0,0 +1,123 @@
/**
* Quick utility to fetch all distinct categories, subcategories, and manufacturers
* from the ConnectWise catalog and print them for reference.
*/
import { connectWiseApi } from "../src/constants";
interface CWReference {
id: number;
name: string;
}
interface CatalogItem {
id: number;
identifier: string;
description: string;
category: CWReference;
subcategory: CWReference;
manufacturer: CWReference;
inactiveFlag: boolean;
}
async function main() {
const pageSize = 1000;
// Get total count
const countRes = await connectWiseApi.get("/procurement/catalog/count");
const totalCount = countRes.data.count;
const totalPages = Math.ceil(totalCount / pageSize);
console.log(`Total catalog items: ${totalCount}`);
const categories = new Map<number, string>();
const subcategories = new Map<
number,
{ name: string; categoryId: number; categoryName: string }
>();
const manufacturers = new Map<number, string>();
const catSubcatPairs = new Map<
string,
{ category: string; subcategory: string; count: number }
>();
for (let page = 1; page <= totalPages; page++) {
const res = await connectWiseApi.get(
`/procurement/catalog?page=${page}&pageSize=${pageSize}&fields=id,identifier,description,category,subcategory,manufacturer,inactiveFlag`,
);
const items: CatalogItem[] = res.data;
for (const item of items) {
if (item.category) {
categories.set(item.category.id, item.category.name);
}
if (item.subcategory) {
subcategories.set(item.subcategory.id, {
name: item.subcategory.name,
categoryId: item.category?.id,
categoryName: item.category?.name,
});
}
if (item.manufacturer) {
manufacturers.set(item.manufacturer.id, item.manufacturer.name);
}
const key = `${item.category?.name ?? "None"}::${item.subcategory?.name ?? "None"}`;
const existing = catSubcatPairs.get(key);
if (existing) {
existing.count++;
} else {
catSubcatPairs.set(key, {
category: item.category?.name ?? "None",
subcategory: item.subcategory?.name ?? "None",
count: 1,
});
}
}
}
console.log("\n=== CATEGORIES ===");
const sortedCats = [...categories.entries()].sort((a, b) =>
a[1].localeCompare(b[1]),
);
for (const [id, name] of sortedCats) {
console.log(` [${id}] ${name}`);
}
console.log("\n=== SUBCATEGORIES (grouped by category) ===");
const groupedSubs = new Map<string, { id: number; name: string }[]>();
for (const [id, sub] of subcategories) {
const catName = sub.categoryName ?? "None";
if (!groupedSubs.has(catName)) groupedSubs.set(catName, []);
groupedSubs.get(catName)!.push({ id, name: sub.name });
}
for (const [catName, subs] of [...groupedSubs.entries()].sort((a, b) =>
a[0].localeCompare(b[0]),
)) {
console.log(`\n ${catName}:`);
for (const sub of subs.sort((a, b) => a.name.localeCompare(b.name))) {
console.log(` [${sub.id}] ${sub.name}`);
}
}
console.log("\n=== MANUFACTURERS ===");
const sortedMfgs = [...manufacturers.entries()].sort((a, b) =>
a[1].localeCompare(b[1]),
);
for (const [id, name] of sortedMfgs) {
console.log(` [${id}] ${name}`);
}
console.log("\n=== CATEGORY → SUBCATEGORY PAIRS (with item counts) ===");
const sortedPairs = [...catSubcatPairs.values()].sort(
(a, b) =>
a.category.localeCompare(b.category) ||
a.subcategory.localeCompare(b.subcategory),
);
for (const pair of sortedPairs) {
console.log(
` ${pair.category}${pair.subcategory} (${pair.count} items)`,
);
}
}
main().catch(console.error);
+57
View File
@@ -0,0 +1,57 @@
import crypto from "crypto";
console.log(`
Generating Private Keys
-----------------
This script will go through and generate all the keys necessary for running the Credential Manager API locally.
This process might take several minutes.
-----------------`);
const keyFiles = [
".accessToken.key",
".refreshToken.key",
".permissions.key",
".secureValues.key",
];
const publicDir = "public-keys";
await Promise.all(
keyFiles.map(async (v) => {
const privExists = await Bun.file(v)
.exists()
.then((bool) => {
if (bool) {
console.log(`'${v}' already exists`);
return true;
}
return false;
});
const pubPath = `${publicDir}/${v.replace(/\.key$/, ".pub")}`;
const pubExists = await Bun.file(pubPath)
.exists()
.then((bool) => {
if (bool) {
console.log(`'${pubPath}' already exists`);
return true;
}
return false;
});
if (!privExists || !pubExists) {
// Always regenerate both files together to ensure the key pair matches
console.log(`Generating '${v}' and '${pubPath}'...`);
const { privateKey, publicKey } = crypto.generateKeyPairSync("rsa", {
modulusLength: 4096,
privateKeyEncoding: { type: "pkcs8", format: "pem" },
publicKeyEncoding: { type: "spki", format: "pem" },
});
await Bun.write(v, privateKey);
await Bun.write(pubPath, publicKey);
}
return;
}),
);
console.log("\nGenerated All Keys Successfully!");
+67
View File
@@ -0,0 +1,67 @@
import crypto from "crypto";
import { mkdirSync } from "fs";
const outputDir = "production-keys";
console.log(`
Generating Production Keys
-----------------
This script will generate all RSA key pairs needed for the production deployment.
Output directory: ${outputDir}/
-----------------`);
// Ensure output directory exists
mkdirSync(outputDir, { recursive: true });
const keyFiles = ["accessToken", "refreshToken", "permissions", "secureValues"];
const generatedKeys: Record<string, { private: string; public: string }> = {};
for (const name of keyFiles) {
console.log(`Generating '${name}' key pair (4096-bit RSA)...`);
const { privateKey, publicKey } = crypto.generateKeyPairSync("rsa", {
modulusLength: 4096,
privateKeyEncoding: { type: "pkcs8", format: "pem" },
publicKeyEncoding: { type: "spki", format: "pem" },
});
generatedKeys[name] = { private: privateKey, public: publicKey };
const privPath = `${outputDir}/${name}.key`;
const pubPath = `${outputDir}/${name}.pub`;
await Bun.write(privPath, privateKey);
await Bun.write(pubPath, publicKey);
console.log(`${privPath}`);
console.log(`${pubPath}`);
}
// Generate Kubernetes Secret YAML
const toBase64 = (str: string) => Buffer.from(str).toString("base64");
const secretYaml = `apiVersion: v1
kind: Secret
metadata:
name: optima-keys-secret
namespace: optima
type: Opaque
data:
ACCESS_TOKEN_PRIVATE_KEY: ${toBase64(generatedKeys["accessToken"].private)}
REFRESH_TOKEN_PRIVATE_KEY: ${toBase64(generatedKeys["refreshToken"].private)}
PERMISSIONS_PRIVATE_KEY: ${toBase64(generatedKeys["permissions"].private)}
SECURE_VALUES_PRIVATE_KEY: ${toBase64(generatedKeys["secureValues"].private)}
SECURE_VALUES_PUBLIC_KEY: ${toBase64(generatedKeys["secureValues"].public)}
`;
const secretPath = `${outputDir}/optima-keys-secret.yaml`;
await Bun.write(secretPath, secretYaml);
console.log(`\n ✔ ${secretPath}`);
console.log(`
-----------------
All production keys and K8s Secret manifest generated in '${outputDir}/'.
⚠️ Delete the '${outputDir}/' directory after applying to your cluster.
Do NOT commit these keys to version control.
-----------------
`);
+552
View File
@@ -0,0 +1,552 @@
import axios from "axios";
type JsonObject = Record<string, unknown>;
type SnapshotItem = {
key: string;
serialized: string;
data: JsonObject;
trackedRows: TrackedRow[];
trackedSignature: string;
};
type TrackedRow = {
key: string;
product: string;
onHand: string;
inventory: string;
};
const POLL_MS = 60_000;
const CW_BASE_URL =
process.env.CW_BASE_URL ??
"https://ttscw.totaltech.net/v4_6_release/apis/3.0";
const ENDPOINT = "/procurement/adjustments?pageSize=1000";
const CW_BASIC_TOKEN = process.env.CW_BASIC_TOKEN;
const CW_CLIENT_ID = process.env.CW_CLIENT_ID;
if (!CW_BASIC_TOKEN || !CW_CLIENT_ID) {
console.error(
"Missing required env vars: CW_BASIC_TOKEN and/or CW_CLIENT_ID",
);
process.exit(1);
}
const cw = axios.create({
baseURL: CW_BASE_URL,
headers: {
Authorization: `Basic ${CW_BASIC_TOKEN}`,
clientId: CW_CLIENT_ID,
"Content-Type": "application/json",
},
timeout: 30_000,
});
const isObject = (value: unknown): value is JsonObject =>
typeof value === "object" && value !== null && !Array.isArray(value);
const stableStringify = (value: unknown): string => {
if (Array.isArray(value)) {
const entries = value.map((entry) => stableStringify(entry)).sort();
return `[${entries.join(",")}]`;
}
if (isObject(value)) {
const keys = Object.keys(value).sort();
const pairs = keys.map(
(key) => `${JSON.stringify(key)}:${stableStringify(value[key])}`,
);
return `{${pairs.join(",")}}`;
}
return JSON.stringify(value);
};
const toObject = (value: unknown): JsonObject => {
if (!isObject(value)) return {};
return value;
};
const readPathValue = (obj: JsonObject, path: string): unknown => {
const parts = path.split(".");
let current: unknown = obj;
for (const part of parts) {
if (!isObject(current)) return null;
current = current[part];
}
return current;
};
const asKey = (value: unknown): string | null => {
if (typeof value === "string" && value.length > 0) return value;
if (typeof value === "number") return value.toString();
return null;
};
const firstValue = (obj: JsonObject, paths: string[]): unknown => {
for (const path of paths) {
const value = readPathValue(obj, path);
if (value === null || value === undefined || value === "") continue;
return value;
}
return null;
};
const itemKey = (adjustment: JsonObject): string => {
const keyPaths = [
"id",
"adjustmentId",
"procurementAdjustmentId",
"recordId",
"recId",
"_info.id",
"_info.href",
];
for (const keyPath of keyPaths) {
const keyValue = asKey(readPathValue(adjustment, keyPath));
if (keyValue) return keyValue;
}
return `anon:${stableStringify(adjustment)}`;
};
const summarize = (adjustment: JsonObject) => {
const pick = (...paths: string[]) => {
for (const path of paths) {
const value = readPathValue(adjustment, path);
if (value !== null && value !== undefined && value !== "") return value;
}
return "-";
};
return {
id: pick("id", "adjustmentId", "procurementAdjustmentId", "recordId"),
type: pick(
"type.name",
"type.identifier",
"type.id",
"type",
"adjustmentType.name",
"adjustmentType",
"transactionType.name",
"transactionType",
),
amount: pick("amount", "value", "total", "quantity"),
status: pick("status.name", "status", "state"),
description: pick("description", "summary", "notes"),
updatedBy: pick("_info.updatedBy", "updatedBy", "lastUpdatedBy"),
lastUpdated: pick("_info.lastUpdated", "lastUpdated", "dateUpdated"),
};
};
const normalizeValue = (value: unknown): string => {
if (value === null || value === undefined || value === "") return "-";
return toDisplayValue(value);
};
const isMeaningfulQuantity = (value: string) => value !== "-";
const toTrackedRow = (detail: JsonObject): TrackedRow | null => {
const productValue = firstValue(detail, [
"product.name",
"product.identifier",
"product.id",
"item.name",
"item.identifier",
"item.id",
"catalogItem.name",
"catalogItem.identifier",
"catalogItem.id",
"productIdentifier",
"productName",
"sku",
"identifier",
"id",
]);
const onHandValue = firstValue(detail, [
"onHand",
"onHandQty",
"onHandQuantity",
"qtyOnHand",
"quantityOnHand",
"quantity.onHand",
]);
const inventoryValue = firstValue(detail, [
"inventory",
"inventoryQty",
"inventoryLevel",
"quantity",
"qty",
]);
const onHand = normalizeValue(onHandValue);
const inventory = normalizeValue(inventoryValue);
const hasMeaningfulQuantity =
isMeaningfulQuantity(onHand) || isMeaningfulQuantity(inventory);
if (!hasMeaningfulQuantity) return null;
const product = normalizeValue(productValue);
const rowKey = `${product}|${onHand}|${inventory}`;
return {
key: rowKey,
product,
onHand,
inventory,
};
};
const getTrackedRows = (adjustment: JsonObject): TrackedRow[] => {
const detailCandidates = [
readPathValue(adjustment, "adjustmentDetails"),
readPathValue(adjustment, "details"),
readPathValue(adjustment, "lineItems"),
];
for (const candidate of detailCandidates) {
if (!Array.isArray(candidate)) continue;
const rows = candidate
.map((entry) => toTrackedRow(toObject(entry)))
.filter((entry): entry is TrackedRow => entry !== null)
.sort((a, b) => a.key.localeCompare(b.key));
if (rows.length > 0) return rows;
}
const rootRow = toTrackedRow(adjustment);
if (!rootRow) return [];
return [rootRow];
};
const toDisplayValue = (value: unknown): string => {
if (value === null || value === undefined || value === "") return "-";
if (
typeof value === "string" ||
typeof value === "number" ||
typeof value === "boolean"
) {
return String(value);
}
if (Array.isArray(value)) {
return `[${value.map((entry) => toDisplayValue(entry)).join(",")}]`;
}
if (!isObject(value)) return String(value);
const commonObjectPaths = ["name", "identifier", "id", "value", "code"];
for (const path of commonObjectPaths) {
const objectValue = readPathValue(value, path);
if (objectValue === null || objectValue === undefined || objectValue === "")
continue;
if (typeof objectValue === "object") continue;
return String(objectValue);
}
return stableStringify(value);
};
const clean = (value: unknown): string => {
if (value === null || value === undefined || value === "") return "-";
return toDisplayValue(value);
};
const diffPaths = (
before: unknown,
after: unknown,
currentPath = "",
paths: string[] = [],
maxPaths = 6,
): string[] => {
if (paths.length >= maxPaths) return paths;
const beforeIsObject = isObject(before);
const afterIsObject = isObject(after);
if (!beforeIsObject || !afterIsObject) {
if (stableStringify(before) === stableStringify(after)) return paths;
const label = currentPath || "(root)";
paths.push(label);
return paths;
}
const keys = [
...new Set([...Object.keys(before), ...Object.keys(after)]),
].sort();
for (const key of keys) {
if (paths.length >= maxPaths) return paths;
const nextPath = currentPath ? `${currentPath}.${key}` : key;
diffPaths(before[key], after[key], nextPath, paths, maxPaths);
}
return paths;
};
const toLine = (kind: "+" | "~" | "-", adjustment: JsonObject): string => {
const s = summarize(adjustment);
return `${kind} id=${clean(s.id)} type=${clean(s.type)} status=${clean(s.status)} amount=${clean(
s.amount,
)} by=${clean(s.updatedBy)} desc=${clean(s.description)}`;
};
const updatedToLine = (before: JsonObject, after: JsonObject): string => {
const prev = summarize(before);
const next = summarize(after);
const changed: string[] = [];
if (clean(prev.status) !== clean(next.status)) {
changed.push(`status:${clean(prev.status)}${clean(next.status)}`);
}
if (clean(prev.amount) !== clean(next.amount)) {
changed.push(`amount:${clean(prev.amount)}${clean(next.amount)}`);
}
if (clean(prev.updatedBy) !== clean(next.updatedBy)) {
changed.push(`by:${clean(prev.updatedBy)}${clean(next.updatedBy)}`);
}
if (clean(prev.description) !== clean(next.description)) {
changed.push(`desc:${clean(prev.description)}${clean(next.description)}`);
}
if (clean(prev.lastUpdated) !== clean(next.lastUpdated)) {
changed.push(
`updated:${clean(prev.lastUpdated)}${clean(next.lastUpdated)}`,
);
}
const noisyFields = new Set(["_info.lastUpdated"]);
const rawDiffs = diffPaths(before, after).filter(
(path) => !noisyFields.has(path),
);
const rawDelta =
rawDiffs.length > 0 ? `fields:${rawDiffs.join(",")}` : "content changed";
const delta = changed.length > 0 ? changed.join(" | ") : rawDelta;
return `~ id=${clean(next.id)} type=${clean(next.type)} ${delta}`;
};
const formatTracked = (row: TrackedRow) =>
`product=${row.product} onHand=${row.onHand} inventory=${row.inventory}`;
const trackedAddedLine = (item: SnapshotItem) => {
const base = summarize(item.data);
const rows = item.trackedRows.slice(0, 3).map(formatTracked).join(" ; ");
const more =
item.trackedRows.length > 3
? ` ; +${item.trackedRows.length - 3} more`
: "";
return `+ id=${clean(base.id)} type=${clean(base.type)} ${rows}${more}`;
};
const trackedRemovedLine = (item: SnapshotItem) => {
const base = summarize(item.data);
const rows = item.trackedRows.slice(0, 3).map(formatTracked).join(" ; ");
const more =
item.trackedRows.length > 3
? ` ; +${item.trackedRows.length - 3} more`
: "";
return `- id=${clean(base.id)} type=${clean(base.type)} ${rows}${more}`;
};
const trackedUpdatedLine = (
beforeItem: SnapshotItem,
afterItem: SnapshotItem,
) => {
const base = summarize(afterItem.data);
const beforeMap = new Map(
beforeItem.trackedRows.map((row) => [row.product, row]),
);
const afterMap = new Map(
afterItem.trackedRows.map((row) => [row.product, row]),
);
const productKeys = [
...new Set([...beforeMap.keys(), ...afterMap.keys()]),
].sort();
const deltas: string[] = [];
for (const product of productKeys) {
const prev = beforeMap.get(product);
const next = afterMap.get(product);
if (!prev && next) {
deltas.push(
`${product} added(onHand=${next.onHand},inventory=${next.inventory})`,
);
continue;
}
if (prev && !next) {
deltas.push(`${product} removed`);
continue;
}
if (!prev || !next) continue;
const onHandChanged = prev.onHand !== next.onHand;
const inventoryChanged = prev.inventory !== next.inventory;
if (!onHandChanged && !inventoryChanged) continue;
const parts: string[] = [];
onHandChanged ? parts.push(`onHand:${prev.onHand}${next.onHand}`) : null;
inventoryChanged
? parts.push(`inventory:${prev.inventory}${next.inventory}`)
: null;
deltas.push(`${product} ${parts.join(",")}`);
}
const shown = deltas.slice(0, 3).join(" ; ");
const more = deltas.length > 3 ? ` ; +${deltas.length - 3} more` : "";
const changes = shown || "inventory/on-hand changed";
return `~ id=${clean(base.id)} type=${clean(base.type)} ${changes}${more}`;
};
const toSnapshot = (rows: unknown[]): SnapshotItem[] =>
rows.map((row) => {
const data = toObject(row);
const trackedRows = getTrackedRows(data);
const trackedSignature = stableStringify(trackedRows);
return {
key: itemKey(data),
serialized: stableStringify(data),
data,
trackedRows,
trackedSignature,
};
});
const fetchAdjustments = async (): Promise<unknown[]> => {
const response = await cw.get(ENDPOINT);
const payload = response.data;
if (Array.isArray(payload)) return payload;
if (isObject(payload) && Array.isArray(payload.data)) return payload.data;
return [];
};
const now = () => new Date().toISOString();
let previous = new Map<string, SnapshotItem>();
const run = async () => {
console.log(
`[${now()}] Watching ${CW_BASE_URL}${ENDPOINT} every ${POLL_MS / 1000}s`,
);
while (true) {
try {
const rows = await fetchAdjustments();
const snapshotItems = toSnapshot(rows);
const current = new Map(snapshotItems.map((item) => [item.key, item]));
if (previous.size === 0) {
previous = current;
console.log(
`[${now()}] Baseline captured (${current.size} adjustments)`,
);
await Bun.sleep(POLL_MS);
continue;
}
const added: SnapshotItem[] = [];
const removed: SnapshotItem[] = [];
const updated: Array<{ before: SnapshotItem; after: SnapshotItem }> = [];
for (const [key, item] of current) {
const previousItem = previous.get(key);
if (!previousItem) {
if (item.trackedRows.length === 0) continue;
added.push(item);
continue;
}
const hasTrackedRows =
item.trackedRows.length > 0 || previousItem.trackedRows.length > 0;
if (!hasTrackedRows) continue;
if (previousItem.trackedSignature !== item.trackedSignature) {
updated.push({ before: previousItem, after: item });
}
}
for (const [key, item] of previous) {
if (!current.has(key) && item.trackedRows.length > 0)
removed.push(item);
}
if (added.length > 0 || updated.length > 0 || removed.length > 0) {
console.log(`\n[${now()}] Changes detected:`);
console.log(`- added: ${added.length}`);
console.log(`- updated: ${updated.length}`);
console.log(`- removed: ${removed.length}`);
if (added.length > 0) {
console.log("\nAdded:");
for (const item of added) {
console.log(trackedAddedLine(item));
}
}
if (updated.length > 0) {
console.log("\nUpdated:");
for (const item of updated) {
console.log(trackedUpdatedLine(item.before, item.after));
}
}
if (removed.length > 0) {
console.log("\nRemoved:");
for (const item of removed) {
console.log(trackedRemovedLine(item));
}
}
}
if (added.length === 0 && updated.length === 0 && removed.length === 0) {
console.log(`[${now()}] No changes (${current.size} adjustments)`);
}
previous = current;
} catch (error) {
if (axios.isAxiosError(error)) {
console.error(
`[${now()}] Poll failed: ${error.response?.status ?? "ERR"}`,
error.message,
);
} else {
console.error(`[${now()}] Poll failed:`, error);
}
}
await Bun.sleep(POLL_MS);
}
};
run().catch((error) => {
console.error("Watcher crashed:", error);
process.exit(1);
});
+217
View File
@@ -0,0 +1,217 @@
import { appendFile, mkdir } from "node:fs/promises";
const port = 3001;
const logDir = "cw-api-logs";
const logFilePath = `${logDir}/test-webserver-${new Date().toISOString().replace(/[:.]/g, "-")}.jsonl`;
const jsonBodyMethods = ["POST", "PUT", "PATCH", "DELETE"];
type ParsedJson = Record<string, unknown> | unknown[];
type EventSummary = ReturnType<typeof buildSummary>;
const color = {
reset: "\x1b[0m",
bold: "\x1b[1m",
dim: "\x1b[2m",
cyan: "\x1b[36m",
blue: "\x1b[34m",
yellow: "\x1b[33m",
magenta: "\x1b[35m",
green: "\x1b[32m",
gray: "\x1b[90m",
};
const paint = (value: string, tone: keyof typeof color) =>
`${color[tone]}${value}${color.reset}`;
const safeParseJson = (value: string): ParsedJson | null => {
try {
const parsed = JSON.parse(value);
const isObject = typeof parsed === "object" && parsed !== null;
return isObject ? (parsed as ParsedJson) : null;
} catch {
return null;
}
};
const parseEntity = (value: unknown): ParsedJson | null => {
if (typeof value === "string") return safeParseJson(value);
if (typeof value !== "object" || value === null) return null;
return value as ParsedJson;
};
const asObject = (value: ParsedJson | null): Record<string, unknown> | null => {
if (!value) return null;
if (Array.isArray(value)) return null;
return value;
};
const parseJsonStringFields = (
value: Record<string, unknown> | null,
): Record<string, unknown> | null => {
if (!value) return null;
return Object.entries(value).reduce<Record<string, unknown>>(
(acc, [key, current]) => {
if (typeof current !== "string") {
acc[key] = current;
return acc;
}
const looksLikeJson = current.startsWith("{") || current.startsWith("[");
if (!looksLikeJson) {
acc[key] = current;
return acc;
}
const parsed = safeParseJson(current);
acc[key] = parsed ?? current;
return acc;
},
{},
);
};
const parseQuery = (url: URL) => {
const entries = [...url.searchParams.entries()];
const params = entries.reduce<Record<string, string>>((acc, [key, value]) => {
acc[key] = value;
return acc;
}, {});
const rawQuery = url.search.startsWith("?")
? url.search.slice(1)
: url.search;
const firstSegment = rawQuery.split("&")[0] ?? "";
const hasEquals = firstSegment.includes("=");
const inferredId = !hasEquals && firstSegment ? firstSegment : null;
return {
params,
inferredId,
};
};
const buildSummary = (
parsedBody: Record<string, unknown> | null,
parsedEntity: Record<string, unknown> | null,
) => {
if (!parsedBody) return null;
return {
messageId: parsedBody.MessageId ?? null,
action: parsedBody.Action ?? null,
type: parsedBody.Type ?? null,
id: parsedBody.ID ?? null,
memberId: parsedBody.MemberId ?? null,
entityStatus:
parsedEntity?.StatusName ??
parsedEntity?.TicketStatus ??
parsedEntity?.Status ??
null,
entitySummary: parsedEntity?.Summary ?? parsedEntity?.CompanyName ?? null,
entityUpdatedBy: parsedEntity?.UpdatedBy ?? null,
entityLastUpdated:
parsedEntity?.LastUpdatedUTC ?? parsedEntity?.LastUpdated ?? null,
};
};
const displayTerminalEvent = (
method: string,
routePath: string,
query: { params: Record<string, string>; inferredId: string | null },
summary: EventSummary,
timestamp: string,
) => {
const id = String(summary?.id ?? query.inferredId ?? "-");
const action = String(summary?.action ?? query.params.action ?? "-");
const eventType = String(summary?.type ?? routePath.split("/")[1] ?? "-");
const actor = String(
summary?.entityUpdatedBy ??
query.params.memberId ??
summary?.memberId ??
"-",
);
const status = String(summary?.entityStatus ?? "-");
const title = String(summary?.entitySummary ?? "-");
const methodTone = method === "GET" ? "green" : "yellow";
console.log();
console.log(
`${paint("●", "cyan")} ${paint(method, methodTone)} ${paint(routePath, "blue")} ${paint(timestamp, "gray")}`,
);
console.log(
`${paint("type", "magenta")}: ${paint(eventType, "bold")} ${paint("action", "magenta")}: ${action} ${paint("id", "magenta")}: ${id}`,
);
console.log(
`${paint("actor", "magenta")}: ${paint(actor, "cyan")} ${paint("status", "magenta")}: ${status}`,
);
console.log(`${paint("title", "magenta")}: ${title}`);
};
const writeLogRecord = async (record: Record<string, unknown>) => {
await appendFile(logFilePath, `${JSON.stringify(record)}\n`, "utf8");
};
await mkdir(logDir, { recursive: true });
Bun.serve({
port,
async fetch(request) {
const url = new URL(request.url);
const routePath = `${url.pathname}${url.search}`;
const method = request.method;
const query = parseQuery(url);
const startedAt = new Date().toISOString();
const rawBody = jsonBodyMethods.includes(method)
? await request.text()
: "";
const parsedJson = safeParseJson(rawBody);
const parsedBody = asObject(parsedJson);
const parsedBodyExpanded = parseJsonStringFields(parsedBody);
const parsedEntity = asObject(parseEntity(parsedBodyExpanded?.Entity));
const summary = buildSummary(parsedBodyExpanded, parsedEntity);
const responseBody = {
success: true,
method,
path: routePath,
timestamp: startedAt,
summary,
};
const responseStatus = 200;
displayTerminalEvent(method, routePath, query, summary, startedAt);
await writeLogRecord({
timestamp: startedAt,
request: {
method,
path: routePath,
query,
headers: Object.fromEntries(request.headers.entries()),
bodyRaw: rawBody || null,
bodyParsed: parsedBodyExpanded,
entityParsed: parsedEntity,
summary,
},
response: {
status: responseStatus,
body: responseBody,
},
});
return Response.json(responseBody, { status: responseStatus });
},
});
console.log(`Test webserver listening on http://localhost:${port}`);
console.log(`Response/request log file: ${logFilePath}`);
+247
View File
@@ -0,0 +1,247 @@
/**
* CW Endpoint Validator
*
* Validates that all ConnectWise API endpoints used by the application
* are reachable and respond correctly. Uses the same axios instance
* and credentials as the running app.
*
* Usage: bun ./utils/validateCwEndpoints.ts
*/
import { connectWiseApi } from "../src/constants";
import { prisma } from "../src/constants";
interface EndpointTest {
name: string;
method: "get" | "post" | "patch" | "delete";
url: string;
/** If true, a 404 is treated as "endpoint exists but resource doesn't" — still a pass. */
allow404?: boolean;
}
// ---------------------------------------------------------------------------
// Build test list — some require real IDs from the database
// ---------------------------------------------------------------------------
async function buildTestList(): Promise<EndpointTest[]> {
// Grab a sample opportunity from the DB to test with real IDs
const sampleOpp = await prisma.opportunity.findFirst({
where: { closedFlag: false },
select: { cwOpportunityId: true, companyId: true },
orderBy: { cwLastUpdated: "desc" },
});
const sampleCompany = await prisma.company.findFirst({
select: { cw_CompanyId: true },
});
const oppId = sampleOpp?.cwOpportunityId ?? 1;
const companyId = sampleCompany?.cw_CompanyId ?? 1;
return [
// ── Core counts (lightweight, always work) ──────────────────────────
{
name: "Opportunities count",
method: "get",
url: "/sales/opportunities/count",
},
{
name: "Activities count",
method: "get",
url: "/sales/activities/count",
},
{
name: "Companies count",
method: "get",
url: "/company/companies/count",
},
{
name: "Members count",
method: "get",
url: "/system/members/count",
},
{
name: "Catalog count",
method: "get",
url: "/procurement/catalog/count",
},
// ── Paginated list endpoints ────────────────────────────────────────
{
name: "Opportunities list (page 1, size 1)",
method: "get",
url: "/sales/opportunities?page=1&pageSize=1",
},
{
name: "Activities list (page 1, size 1)",
method: "get",
url: "/sales/activities?page=1&pageSize=1",
},
{
name: "Companies list (page 1, size 1)",
method: "get",
url: "/company/companies?page=1&pageSize=1",
},
{
name: "Members list (page 1, size 1)",
method: "get",
url: "/system/members?page=1&pageSize=1",
},
{
name: "Catalog list (page 1, size 1)",
method: "get",
url: "/procurement/catalog?page=1&pageSize=1",
},
{
name: "User-defined fields (page 1)",
method: "get",
url: "/system/userDefinedFields?pageSize=1",
},
// ── Single-resource fetches (need real IDs) ─────────────────────────
{
name: `Opportunity #${oppId}`,
method: "get",
url: `/sales/opportunities/${oppId}`,
allow404: true,
},
{
name: `Opportunity #${oppId} forecast`,
method: "get",
url: `/sales/opportunities/${oppId}/forecast`,
allow404: true,
},
{
name: `Opportunity #${oppId} notes`,
method: "get",
url: `/sales/opportunities/${oppId}/notes`,
allow404: true,
},
{
name: `Opportunity #${oppId} contacts`,
method: "get",
url: `/sales/opportunities/${oppId}/contacts`,
allow404: true,
},
{
name: `Activities for opp #${oppId}`,
method: "get",
url: `/sales/activities/count?conditions=${encodeURIComponent(`opportunity/id=${oppId}`)}`,
allow404: true,
},
{
name: `Procurement products for opp #${oppId}`,
method: "get",
url: `/procurement/products?conditions=${encodeURIComponent(`opportunity/id=${oppId}`)}&fields=id,forecastDetailId,cancelledFlag`,
allow404: true,
},
{
name: `Company #${companyId}`,
method: "get",
url: `/company/companies/${companyId}`,
allow404: true,
},
{
name: `Company #${companyId} sites`,
method: "get",
url: `/company/companies/${companyId}/sites?pageSize=1`,
allow404: true,
},
{
name: `Company #${companyId} configurations`,
method: "get",
url: `/company/configurations?conditions=${encodeURIComponent(`company/id=${companyId}`)}&pageSize=1`,
allow404: true,
},
];
}
// ---------------------------------------------------------------------------
// Runner
// ---------------------------------------------------------------------------
async function main() {
console.log(
"╔══════════════════════════════════════════════════════════════╗",
);
console.log(
"║ ConnectWise API Endpoint Validator ║",
);
console.log(
"╚══════════════════════════════════════════════════════════════╝",
);
console.log();
console.log(`Base URL: ${connectWiseApi.defaults.baseURL}`);
console.log(`Timeout: ${connectWiseApi.defaults.timeout ?? "none"}ms`);
console.log();
const tests = await buildTestList();
let passed = 0;
let failed = 0;
let warned = 0;
for (const test of tests) {
const start = performance.now();
try {
const response = await connectWiseApi.request({
method: test.method,
url: test.url,
timeout: 30_000, // Use a generous timeout for validation
});
const elapsed = Math.round(performance.now() - start);
const statusTag =
elapsed > 5000 ? `⚠️ SLOW (${elapsed}ms)` : `${elapsed}ms`;
console.log(`${test.name}${response.status} [${statusTag}]`);
if (elapsed > 5000) warned++;
passed++;
} catch (err: any) {
const elapsed = Math.round(performance.now() - start);
if (err?.isAxiosError) {
const status = err.response?.status;
const code = err.code;
if (status === 404 && test.allow404) {
console.log(
` ⚠️ ${test.name} — 404 (resource not found, endpoint OK) [${elapsed}ms]`,
);
warned++;
continue;
}
if (code === "ECONNABORTED") {
console.log(`${test.name} — TIMEOUT after ${elapsed}ms`);
} else if (code === "ECONNREFUSED") {
console.log(
`${test.name} — CONNECTION REFUSED (CW server down?)`,
);
} else if (status) {
console.log(
`${test.name} — HTTP ${status} [${elapsed}ms]: ${err.response?.data?.message ?? err.message}`,
);
} else {
console.log(
`${test.name}${code ?? err.message} [${elapsed}ms]`,
);
}
} else {
console.log(`${test.name}${err.message} [${elapsed}ms]`);
}
failed++;
}
}
console.log();
console.log("─".repeat(64));
console.log(
` Results: ${passed} passed, ${warned} warnings, ${failed} failed (${tests.length} total)`,
);
console.log("─".repeat(64));
await prisma.$disconnect();
process.exit(failed > 0 ? 1 : 0);
}
main().catch((err) => {
console.error("Fatal error:", err.message);
process.exit(1);
});