feat: add CW callback route and optimize cache refresh workflows

This commit is contained in:
2026-03-03 19:46:48 -06:00
parent 6d935e7180
commit a048e1e824
28 changed files with 3268 additions and 294 deletions
+8 -6
View File
@@ -16,8 +16,8 @@
* | # | Condition | TTL (ms) | TTL (human) | Rationale |
* |---|------------------------------------------------------------------|----------|-------------|--------------------------------------------------------------------|
* | 1 | `closedFlag` is `true` | `null` | Do not cache| Closed records are rarely accessed; caching wastes memory. |
* | 2 | `expectedCloseDate` OR `lastUpdated` is within the last **5 days**| 30 000 | 30 seconds | High-activity window — data changes frequently and must stay fresh.|
* | 3 | `expectedCloseDate` OR `lastUpdated` is within the last **14 days**| 60 000 | 60 seconds | Moderate activity — still relevant, but changes less often. |
* | 2 | `expectedCloseDate` OR `lastUpdated` is within the last **5 days**| 60 000 | 60 seconds | High-activity window — data changes frequently and must stay fresh.|
* | 3 | `expectedCloseDate` OR `lastUpdated` is within the last **14 days**| 90 000 | 90 seconds | Moderate activity — still relevant, but changes less often. |
* | 4 | Everything else (older than 14 days) | 900 000 | 15 minutes | Low activity — safe to serve from cache for longer. |
*
* ## Evaluation order
@@ -62,11 +62,13 @@
// Constants
// ---------------------------------------------------------------------------
/** 30 seconds TTL for high-activity records (within 5 days). */
export const TTL_HIGH_ACTIVITY = 30_000;
/** 60 seconds TTL for high-activity records (within 5 days).
* Must exceed the 30-second background refresh interval so the cache
* stays warm between cycles. */
export const TTL_HIGH_ACTIVITY = 60_000;
/** 60 seconds TTL for moderate-activity records (within 14 days). */
export const TTL_MODERATE_ACTIVITY = 60_000;
/** 90 seconds TTL for moderate-activity records (within 14 days). */
export const TTL_MODERATE_ACTIVITY = 90_000;
/** 15 minutes TTL for low-activity / stale records. */
export const TTL_LOW_ACTIVITY = 900_000;
@@ -18,7 +18,7 @@
* | 1 | Status is **Won**, **Lost**, **Pending Won**, or **Pending Lost** | `null` | No cache | Products on terminal / near-terminal opps are static; no need to keep them warm. |
* | 2 | Opportunity is **not cacheable** (main cache TTL is `null`) | `null` | No cache | If the opp itself is evicted, sub-resources follow suit. |
* | 3 | `lastUpdated` is within the last **3 days** | 15 000 | 15 seconds | Actively-worked deals — products are being edited and need near-real-time freshness. |
* | 4 | Everything else | 1 800 000 | 30 minutes | Lazy on-demand cache: fetched when requested, expires after 30 min without refresh. |
* | 4 | Everything else | 1 200 000 | 20 minutes | Lazy on-demand cache: fetched when requested, expires after 20 min without refresh. |
*
* ## Evaluation order
*
@@ -44,11 +44,13 @@ import { QUOTE_STATUSES } from "../../types/QuoteStatuses";
// Constants
// ---------------------------------------------------------------------------
/** 15 seconds — TTL for hot products (opportunity updated within 3 days). */
export const PRODUCTS_TTL_HOT = 15_000;
/** 45 seconds — TTL for hot products (opportunity updated within 3 days).
* Must exceed the 30-second background refresh interval so the cache
* stays warm between cycles. */
export const PRODUCTS_TTL_HOT = 45_000;
/** 30 minutes — TTL for on-demand product cache (lazy fallback). */
export const PRODUCTS_TTL_LAZY = 1_800_000;
/** 20 minutes — TTL for on-demand product cache (lazy fallback). */
export const PRODUCTS_TTL_LAZY = 1_200_000;
/** 3 days in milliseconds. */
const THREE_DAYS_MS = 3 * 24 * 60 * 60 * 1000;
+25 -16
View File
@@ -310,25 +310,34 @@ export async function fetchAndCacheCompanyCwData(
ttlMs: number,
): Promise<{ company: any; defaultContact: any; allContacts: any[] } | null> {
try {
const cwCompany = await fetchCwCompanyById(cwCompanyId);
// Fetch company and all-contacts in parallel — the allContacts URL
// can be constructed directly without the company response.
const [cwCompany, allContactsData] = await Promise.all([
fetchCwCompanyById(cwCompanyId),
withCwRetry(
() =>
connectWiseApi.get(
`/company/companies/${cwCompanyId}/contacts?pageSize=1000`,
),
{ label: `company#${cwCompanyId}/allContacts` },
),
]);
if (!cwCompany) return null;
const contactHref = cwCompany.defaultContact?._info?.contact_href;
const defaultContactData = contactHref
? await withCwRetry(() => connectWiseApi.get(contactHref), {
label: `company#${cwCompanyId}/defaultContact`,
})
: undefined;
const allContactsData = await withCwRetry(
() =>
connectWiseApi.get(`${cwCompany._info.contacts_href}&pageSize=1000`),
{ label: `company#${cwCompanyId}/allContacts` },
);
// Default contact: derive from allContacts instead of making an
// extra serial CW call. The company object carries the default
// contact's ID, so we can pull it from the list we already fetched.
const defaultContactId = cwCompany.defaultContact?.id;
const defaultContactData = defaultContactId
? ((allContactsData.data as any[]).find(
(c: any) => c.id === defaultContactId,
) ?? null)
: null;
const blob = {
company: cwCompany,
defaultContact: defaultContactData?.data ?? null,
defaultContact: defaultContactData,
allContacts: allContactsData.data,
};
@@ -491,11 +500,11 @@ export async function invalidateProductsCache(
}
/**
* Site TTL — 30 minutes. Site/address data rarely changes so we cache
* Site TTL — 20 minutes. Site/address data rarely changes so we cache
* aggressively. The background refresh does NOT proactively warm site keys;
* they are populated lazily on the first detail-view request.
*/
const SITE_TTL_MS = 1_800_000;
const SITE_TTL_MS = 1_200_000;
/**
* Fetch a CW company site from ConnectWise and cache the result.
@@ -0,0 +1,79 @@
/**
* CW API Concurrency Limiter
*
* Limits the number of simultaneous in-flight requests to the ConnectWise
* API. CW responds significantly slower under high concurrency (observed
* ~3× slower at 9 concurrent vs 56 concurrent), so bounding the
* parallelism actually reduces total wall-clock time.
*
* Implemented as an Axios request interceptor that gates on a simple
* counting semaphore. When the limit is reached, new requests queue and
* resolve in FIFO order as earlier requests complete.
*/
import type { AxiosInstance, InternalAxiosRequestConfig } from "axios";
// ---------------------------------------------------------------------------
// Semaphore
// ---------------------------------------------------------------------------
class Semaphore {
private _current = 0;
private _queue: (() => void)[] = [];
constructor(private _max: number) {}
/** Acquire a slot — resolves immediately if under the limit, else waits. */
acquire(): Promise<void> {
if (this._current < this._max) {
this._current++;
return Promise.resolve();
}
return new Promise<void>((resolve) => {
this._queue.push(resolve);
});
}
/** Release a slot — wakes the next queued caller, if any. */
release(): void {
const next = this._queue.shift();
if (next) {
// Hand the slot directly to the next waiter (don't decrement)
next();
} else {
this._current--;
}
}
}
// ---------------------------------------------------------------------------
// Interceptor attachment
// ---------------------------------------------------------------------------
/**
* Attach a concurrency-limiting interceptor to an Axios instance.
*
* @param api - The Axios instance to limit.
* @param max - Maximum concurrent in-flight requests (default: 6).
*/
export function attachCwConcurrencyLimiter(api: AxiosInstance, max = 6): void {
const sem = new Semaphore(max);
// Request interceptor: wait for a slot before the request fires
api.interceptors.request.use(async (config: InternalAxiosRequestConfig) => {
await sem.acquire();
return config;
});
// Response interceptor: release the slot on success or failure
api.interceptors.response.use(
(response) => {
sem.release();
return response;
},
(error) => {
sem.release();
return Promise.reject(error);
},
);
}
@@ -102,3 +102,40 @@ export const resolveMember = async (
cwMemberId: cwMember?.id ?? null,
};
};
/**
* Resolve Multiple CW Identifiers in a Single Batch
*
* Same as `resolveMember` but batches the DB query so that N identifiers
* require only **one** `findMany` instead of N `findFirst` calls.
*
* @param identifiers - Array of CW member identifiers
* @returns Map of identifier → ResolvedMember
*/
export const resolveMembers = async (
identifiers: string[],
): Promise<Map<string, ResolvedMember>> => {
const unique = [...new Set(identifiers)];
// Single batched DB query for all identifiers
const localUsers = await prisma.user.findMany({
where: { cwIdentifier: { in: unique } },
select: { id: true, cwIdentifier: true },
});
const userMap = new Map(localUsers.map((u) => [u.cwIdentifier, u.id]));
const result = new Map<string, ResolvedMember>();
for (const identifier of unique) {
const cwMember = memberCache.get(identifier);
const name = cwMember
? `${cwMember.firstName} ${cwMember.lastName}`.trim() || identifier
: identifier;
result.set(identifier, {
id: userMap.get(identifier) ?? null,
identifier,
name,
cwMemberId: cwMember?.id ?? null,
});
}
return result;
};
+22 -4
View File
@@ -66,10 +66,28 @@ export const catalogCw = {
return allItems;
},
fetchByCatalogId: async (cwCatalogId: number): Promise<CatalogItem> => {
try {
const response = await connectWiseApi.get(
`/procurement/catalog/${cwCatalogId}`,
);
return response.data;
} catch {
const fallback = await connectWiseApi.get(
`/procurement/catalog/items/${cwCatalogId}`,
);
return fallback.data;
}
},
fetch: async (id: string): Promise<CatalogItem> => {
const response = await connectWiseApi.get(
`/procurement/catalog/items/${id}`,
);
return response.data;
const numericId = Number(id);
if (!Number.isFinite(numericId)) {
const response = await connectWiseApi.get(
`/procurement/catalog/items/${id}`,
);
return response.data;
}
return catalogCw.fetchByCatalogId(numericId);
},
};
@@ -0,0 +1,469 @@
import { prisma, redis, connectWiseApi } from "../../../constants";
import { withCwRetry } from "../withCwRetry";
import { catalogCw } from "./catalog";
import { CatalogItem } from "./catalog.types";
type JsonObject = Record<string, unknown>;
type TrackedProduct = {
cwCatalogId: number;
product: string;
onHand: string;
inventory: string;
key: string;
};
type AdjustmentSnapshot = {
key: string;
trackedRows: TrackedProduct[];
signature: string;
};
const ADJUSTMENTS_ENDPOINT = "/procurement/adjustments?pageSize=1000";
const CATALOG_ITEM_CACHE_PREFIX = "catalog:item:cw:";
const CATALOG_ITEM_CACHE_TTL_SECONDS = 20 * 60;
const MAX_SYNC_PER_CYCLE = Number(
process.env.CW_ADJUSTMENT_SYNC_MAX_PER_CYCLE ?? "50",
);
const SYNC_COOLDOWN_MS = Number(
process.env.CW_ADJUSTMENT_SYNC_COOLDOWN_MS ?? `${10 * 60 * 1000}`,
);
let previous = new Map<string, AdjustmentSnapshot>();
let previousProductState = new Map<number, string>();
const lastSyncedAt = new Map<number, number>();
let inFlight = false;
const isObject = (value: unknown): value is JsonObject =>
typeof value === "object" && value !== null && !Array.isArray(value);
const toObject = (value: unknown): JsonObject => {
if (!isObject(value)) return {};
return value;
};
const stableStringify = (value: unknown): string => {
if (Array.isArray(value)) {
const entries = value.map((entry) => stableStringify(entry)).sort();
return `[${entries.join(",")}]`;
}
if (isObject(value)) {
const keys = Object.keys(value).sort();
const pairs = keys.map(
(key) => `${JSON.stringify(key)}:${stableStringify(value[key])}`,
);
return `{${pairs.join(",")}}`;
}
return JSON.stringify(value);
};
const readPathValue = (obj: JsonObject, path: string): unknown => {
const parts = path.split(".");
let current: unknown = obj;
for (const part of parts) {
if (!isObject(current)) return null;
current = current[part];
}
return current;
};
const firstValue = (obj: JsonObject, paths: string[]): unknown => {
for (const path of paths) {
const value = readPathValue(obj, path);
if (value === null || value === undefined || value === "") continue;
return value;
}
return null;
};
const asNumber = (value: unknown): number | null => {
if (typeof value === "number" && Number.isFinite(value)) return value;
if (typeof value === "string" && value.length > 0) {
const parsed = Number(value);
if (Number.isFinite(parsed)) return parsed;
}
return null;
};
const asText = (value: unknown): string => {
if (value === null || value === undefined || value === "") return "-";
if (
typeof value === "string" ||
typeof value === "number" ||
typeof value === "boolean"
) {
return String(value);
}
if (Array.isArray(value)) {
return `[${value.map((entry) => asText(entry)).join(",")}]`;
}
if (!isObject(value)) return String(value);
const preferredFields = ["name", "identifier", "id", "code", "value"];
for (const field of preferredFields) {
const fieldValue = readPathValue(value, field);
if (fieldValue === null || fieldValue === undefined || fieldValue === "")
continue;
if (typeof fieldValue === "object") continue;
return String(fieldValue);
}
return stableStringify(value);
};
const adjustmentKey = (adjustment: JsonObject): string => {
const keyPaths = [
"id",
"adjustmentId",
"procurementAdjustmentId",
"recordId",
"recId",
"_info.id",
"_info.href",
];
for (const path of keyPaths) {
const key = firstValue(adjustment, [path]);
const keyText = asText(key);
if (keyText !== "-") return keyText;
}
return `anon:${stableStringify(adjustment)}`;
};
const trackedRow = (detail: JsonObject): TrackedProduct | null => {
const cwCatalogId = asNumber(
firstValue(detail, [
"catalogItem.id",
"catalogItemId",
"catalog.id",
"catalogId",
"item.id",
"itemId",
"product.id",
"productId",
"id",
]),
);
if (!cwCatalogId) return null;
const onHand = asText(
firstValue(detail, [
"onHand",
"onHandQty",
"onHandQuantity",
"qtyOnHand",
"quantityOnHand",
"quantity.onHand",
]),
);
const inventory = asText(
firstValue(detail, [
"inventory",
"inventoryQty",
"inventoryLevel",
"quantity",
"qty",
]),
);
if (onHand === "-" && inventory === "-") return null;
const product = asText(
firstValue(detail, [
"product.name",
"product.identifier",
"item.name",
"item.identifier",
"catalogItem.name",
"catalogItem.identifier",
"productName",
"productIdentifier",
"sku",
"identifier",
]),
);
return {
cwCatalogId,
product,
onHand,
inventory,
key: `${cwCatalogId}|${product}|${onHand}|${inventory}`,
};
};
const trackedRows = (adjustment: JsonObject): TrackedProduct[] => {
const detailCandidates = [
readPathValue(adjustment, "adjustmentDetails"),
readPathValue(adjustment, "details"),
readPathValue(adjustment, "lineItems"),
];
for (const candidate of detailCandidates) {
if (!Array.isArray(candidate)) continue;
const rows = candidate
.map((entry) => trackedRow(toObject(entry)))
.filter((entry): entry is TrackedProduct => entry !== null)
.sort((a, b) => a.key.localeCompare(b.key));
if (rows.length > 0) return rows;
}
const root = trackedRow(adjustment);
if (!root) return [];
return [root];
};
const snapshot = (rows: unknown[]): Map<string, AdjustmentSnapshot> => {
const out = new Map<string, AdjustmentSnapshot>();
for (const entry of rows) {
const adjustment = toObject(entry);
const key = adjustmentKey(adjustment);
const rowsTracked = trackedRows(adjustment);
const signature = stableStringify(rowsTracked);
out.set(key, {
key,
trackedRows: rowsTracked,
signature,
});
}
return out;
};
const changedCatalogIds = (
before: Map<number, string>,
after: Map<number, string>,
): Set<number> => {
const changed = new Set<number>();
for (const [cwCatalogId, nextSignature] of after) {
const prevSignature = before.get(cwCatalogId);
if (!prevSignature) {
changed.add(cwCatalogId);
continue;
}
if (prevSignature === nextSignature) continue;
changed.add(cwCatalogId);
}
return changed;
};
const productState = (
adjustments: Map<string, AdjustmentSnapshot>,
): Map<number, string> => {
const grouped = new Map<number, Set<string>>();
for (const snapshot of adjustments.values()) {
for (const row of snapshot.trackedRows) {
const rows = grouped.get(row.cwCatalogId) ?? new Set<string>();
rows.add(row.key);
grouped.set(row.cwCatalogId, rows);
}
}
const state = new Map<number, string>();
for (const [cwCatalogId, rows] of grouped) {
state.set(cwCatalogId, stableStringify([...rows].sort()));
}
return state;
};
const applySyncGuards = (ids: number[]): number[] => {
const now = Date.now();
const cooledIds = ids.filter((cwCatalogId) => {
const last = lastSyncedAt.get(cwCatalogId);
if (!last) return true;
return now - last >= SYNC_COOLDOWN_MS;
});
if (cooledIds.length <= MAX_SYNC_PER_CYCLE) return cooledIds;
return cooledIds.slice(0, MAX_SYNC_PER_CYCLE);
};
const fetchAdjustments = async (): Promise<unknown[]> => {
const response = await withCwRetry(
() => connectWiseApi.get(ADJUSTMENTS_ENDPOINT),
{
label: "inventory-adjustments",
maxAttempts: 3,
},
);
const payload = response.data;
if (Array.isArray(payload)) return payload;
if (isObject(payload) && Array.isArray(payload.data)) return payload.data;
return [];
};
const cacheKey = (cwCatalogId: number) =>
`${CATALOG_ITEM_CACHE_PREFIX}${cwCatalogId}`;
const cwLastUpdated = (item: CatalogItem): Date => {
const value = item._info?.lastUpdated;
if (!value) return new Date();
const parsed = new Date(value);
const invalidDate = Number.isNaN(parsed.getTime());
if (invalidDate) return new Date();
return parsed;
};
const syncCatalogItem = async (cwCatalogId: number): Promise<boolean> => {
try {
const item = await withCwRetry(
() => catalogCw.fetchByCatalogId(cwCatalogId),
{
label: `catalog-item:${cwCatalogId}`,
maxAttempts: 3,
},
);
const onHand = await withCwRetry(
() => catalogCw.fetchInventoryOnHand(cwCatalogId),
{
label: `catalog-onhand:${cwCatalogId}`,
maxAttempts: 3,
},
);
const persisted = await prisma.catalogItem.upsert({
where: { cwCatalogId },
create: {
cwCatalogId,
identifier: item.identifier,
name: item.description,
description: item.description,
customerDescription: item.customerDescription,
internalNotes: item.notes,
category: item.category?.name,
categoryCwId: item.category?.id,
subcategory: item.subcategory?.name,
subcategoryCwId: item.subcategory?.id,
manufacturer: item.manufacturer?.name,
manufactureCwId: item.manufacturer?.id,
partNumber: item.manufacturerPartNumber,
vendorName: item.vendor?.name,
vendorSku: item.vendorSku,
vendorCwId: item.vendor?.id,
price: item.price,
cost: item.cost,
inactive: item.inactiveFlag,
salesTaxable: item.taxableFlag,
onHand,
cwLastUpdated: cwLastUpdated(item),
},
update: {
identifier: item.identifier,
name: item.description,
description: item.description,
customerDescription: item.customerDescription,
internalNotes: item.notes,
category: item.category?.name,
categoryCwId: item.category?.id,
subcategory: item.subcategory?.name,
subcategoryCwId: item.subcategory?.id,
manufacturer: item.manufacturer?.name,
manufactureCwId: item.manufacturer?.id,
partNumber: item.manufacturerPartNumber,
vendorName: item.vendor?.name,
vendorSku: item.vendorSku,
vendorCwId: item.vendor?.id,
price: item.price,
cost: item.cost,
inactive: item.inactiveFlag,
salesTaxable: item.taxableFlag,
onHand,
cwLastUpdated: cwLastUpdated(item),
},
});
await redis.set(
cacheKey(cwCatalogId),
JSON.stringify({
cwCatalogId,
onHand,
cwItem: item,
dbItem: persisted,
syncedAt: new Date().toISOString(),
}),
"EX",
CATALOG_ITEM_CACHE_TTL_SECONDS,
);
return true;
} catch (err) {
console.error(
`[inventory-adjustments] failed to sync catalog item ${cwCatalogId}`,
err,
);
return false;
}
};
export const listenInventoryAdjustments = async (): Promise<void> => {
if (inFlight) return;
inFlight = true;
try {
const rows = await fetchAdjustments();
const current = snapshot(rows);
const currentProductState = productState(current);
if (previous.size === 0) {
previous = current;
previousProductState = currentProductState;
console.log(
`[inventory-adjustments] baseline captured (${current.size} adjustments, ${currentProductState.size} products)`,
);
return;
}
const changedIds = [
...changedCatalogIds(previousProductState, currentProductState),
].sort((a, b) => a - b);
const guardedIds = applySyncGuards(changedIds);
previous = current;
previousProductState = currentProductState;
if (guardedIds.length === 0) return;
let successCount = 0;
for (const cwCatalogId of guardedIds) {
const ok = await syncCatalogItem(cwCatalogId);
if (!ok) continue;
lastSyncedAt.set(cwCatalogId, Date.now());
successCount += 1;
}
const skippedByCooldown = changedIds.length - guardedIds.length;
console.log(
`[inventory-adjustments] inventory changed for ${changedIds.length} products, queued ${guardedIds.length}, synced ${successCount}, cooldown/cap skipped ${skippedByCooldown}`,
);
} catch (err) {
console.error("[inventory-adjustments] listener failed", err);
} finally {
inFlight = false;
}
};
@@ -2,6 +2,31 @@ import { prisma } from "../../../constants";
import { events } from "../../globalEvents";
import { catalogCw } from "./catalog";
const CONCURRENCY = 6;
const BATCH_DELAY_MS = 250;
const sleep = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms));
const runSlowParallel = async (
tasks: Array<() => Promise<void>>,
): Promise<number> => {
let failureCount = 0;
for (let i = 0; i < tasks.length; i += CONCURRENCY) {
const batch = tasks.slice(i, i + CONCURRENCY);
const results = await Promise.allSettled(batch.map((task) => task()));
for (const result of results) {
if (result.status === "rejected") failureCount += 1;
}
if (i + CONCURRENCY >= tasks.length) continue;
await sleep(BATCH_DELAY_MS);
}
return failureCount;
};
export const refreshCatalog = async () => {
events.emit("cw:catalog:refresh:check");
@@ -46,101 +71,104 @@ export const refreshCatalog = async () => {
staleCount: staleIds.length,
});
// 4. Fetch full catalog data, then filter to only stale items
const staleIdSet = new Set(staleIds);
const allCwItems = await catalogCw.fetchAllItemsFromCw();
const allStaleItems = new Map<number, any>();
// 4. Fetch full CW item data for stale IDs using slow, bounded concurrency
const cwItemMap = new Map<number, any>();
const itemFetchTasks: Array<() => Promise<void>> = staleIds.map(
(cwId) => async () => {
const item = await catalogCw.fetchByCatalogId(cwId);
cwItemMap.set(cwId, item);
},
);
const itemFetchFailures = await runSlowParallel(itemFetchTasks);
for (const [id, item] of allCwItems) {
if (staleIdSet.has(id)) {
allStaleItems.set(id, item);
}
}
// 5. Batch fetch inventory onHand for stale items (50 concurrent)
// 5. Fetch inventory onHand for stale IDs using the same slow parallel strategy
const onHandMap = new Map<number, number>();
const batchSize = 50;
const inventoryTasks: Array<() => Promise<void>> = staleIds.map(
(cwId) => async () => {
try {
const onHand = await catalogCw.fetchInventoryOnHand(cwId);
onHandMap.set(cwId, onHand);
} catch {
onHandMap.set(cwId, 0);
}
},
);
const inventoryFailures = await runSlowParallel(inventoryTasks);
for (let i = 0; i < staleIds.length; i += batchSize) {
const batch = staleIds.slice(i, i + batchSize);
await Promise.all(
batch.map(async (cwId) => {
try {
const onHand = await catalogCw.fetchInventoryOnHand(cwId);
onHandMap.set(cwId, onHand);
} catch {
onHandMap.set(cwId, 0);
}
}),
// 6. Upsert stale/new items with bounded slow parallel execution
let updatedCount = 0;
const upsertTasks: Array<() => Promise<void>> = staleIds.map(
(cwId) => async () => {
const item = cwItemMap.get(cwId);
if (!item) return;
const cwLastUpdated = item._info?.lastUpdated
? new Date(item._info.lastUpdated)
: new Date();
const onHand = onHandMap.get(cwId) ?? 0;
await prisma.catalogItem.upsert({
where: { cwCatalogId: cwId },
create: {
cwCatalogId: cwId,
identifier: item.identifier,
name: item.description,
description: item.description,
customerDescription: item.customerDescription,
internalNotes: item.notes,
category: item.category?.name,
categoryCwId: item.category?.id,
subcategory: item.subcategory?.name,
subcategoryCwId: item.subcategory?.id,
manufacturer: item.manufacturer?.name,
manufactureCwId: item.manufacturer?.id,
partNumber: item.manufacturerPartNumber,
vendorName: item.vendor?.name,
vendorSku: item.vendorSku,
vendorCwId: item.vendor?.id,
price: item.price,
cost: item.cost,
inactive: item.inactiveFlag,
salesTaxable: item.taxableFlag,
onHand,
cwLastUpdated,
},
update: {
name: item.description,
identifier: item.identifier,
description: item.description,
customerDescription: item.customerDescription,
internalNotes: item.notes,
category: item.category?.name,
categoryCwId: item.category?.id,
subcategory: item.subcategory?.name,
subcategoryCwId: item.subcategory?.id,
manufacturer: item.manufacturer?.name,
manufactureCwId: item.manufacturer?.id,
partNumber: item.manufacturerPartNumber,
vendorName: item.vendor?.name,
vendorSku: item.vendorSku,
vendorCwId: item.vendor?.id,
price: item.price,
cost: item.cost,
inactive: item.inactiveFlag,
salesTaxable: item.taxableFlag,
onHand,
cwLastUpdated,
},
});
updatedCount += 1;
},
);
const upsertFailures = await runSlowParallel(upsertTasks);
const failedTasks = itemFetchFailures + inventoryFailures + upsertFailures;
if (failedTasks > 0) {
console.warn(
`[catalog-refresh] ${failedTasks} slow-parallel task(s) failed; remaining items will retry next cycle`,
);
}
// 6. Upsert only the stale/new items
const updatedCount = (
await Promise.all(
staleIds.map(async (cwId) => {
const item = allStaleItems.get(cwId);
if (!item) return null;
const cwLastUpdated = item._info?.lastUpdated
? new Date(item._info.lastUpdated)
: new Date();
const onHand = onHandMap.get(cwId) ?? 0;
return await prisma.catalogItem.upsert({
where: { cwCatalogId: cwId },
create: {
cwCatalogId: cwId,
identifier: item.identifier,
name: item.description,
description: item.description,
customerDescription: item.customerDescription,
internalNotes: item.notes,
category: item.category?.name,
categoryCwId: item.category?.id,
subcategory: item.subcategory?.name,
subcategoryCwId: item.subcategory?.id,
manufacturer: item.manufacturer?.name,
manufactureCwId: item.manufacturer?.id,
partNumber: item.manufacturerPartNumber,
vendorName: item.vendor?.name,
vendorSku: item.vendorSku,
vendorCwId: item.vendor?.id,
price: item.price,
cost: item.cost,
inactive: item.inactiveFlag,
salesTaxable: item.taxableFlag,
onHand,
cwLastUpdated,
},
update: {
name: item.description,
identifier: item.identifier,
description: item.description,
customerDescription: item.customerDescription,
internalNotes: item.notes,
category: item.category?.name,
categoryCwId: item.category?.id,
subcategory: item.subcategory?.name,
subcategoryCwId: item.subcategory?.id,
manufacturer: item.manufacturer?.name,
manufactureCwId: item.manufacturer?.id,
partNumber: item.manufacturerPartNumber,
vendorName: item.vendor?.name,
vendorSku: item.vendorSku,
vendorCwId: item.vendor?.id,
price: item.price,
cost: item.cost,
inactive: item.inactiveFlag,
salesTaxable: item.taxableFlag,
onHand,
cwLastUpdated,
},
});
}),
)
).filter(Boolean).length;
events.emit("cw:catalog:refresh:completed", {
totalCw: cwSummaries.size,
totalDb: dbItems.length,
@@ -2,6 +2,11 @@ import { prisma } from "../../../constants";
import { events } from "../../globalEvents";
import { catalogCw } from "./catalog";
const CONCURRENCY = 6;
const BATCH_DELAY_MS = 250;
const sleep = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms));
export const refreshInventory = async () => {
events.emit("cw:inventory:refresh:check");
@@ -23,13 +28,13 @@ export const refreshInventory = async () => {
totalItems: dbItems.length,
});
// 2. Batch fetch inventory onHand for all items (50 concurrent)
// 2. Slow-parallel fetch inventory onHand for all items
const onHandMap = new Map<number, number>();
const batchSize = 150;
let failedCount = 0;
for (let i = 0; i < dbItems.length; i += batchSize) {
const batch = dbItems.slice(i, i + batchSize);
await Promise.all(
for (let i = 0; i < dbItems.length; i += CONCURRENCY) {
const batch = dbItems.slice(i, i + CONCURRENCY);
const results = await Promise.allSettled(
batch.map(async (item) => {
try {
const onHand = await catalogCw.fetchInventoryOnHand(item.cwCatalogId);
@@ -39,6 +44,13 @@ export const refreshInventory = async () => {
}
}),
);
for (const result of results) {
if (result.status === "rejected") failedCount += 1;
}
if (i + CONCURRENCY >= dbItems.length) continue;
await sleep(BATCH_DELAY_MS);
}
// 3. Only update items where onHand has changed
@@ -71,4 +83,10 @@ export const refreshInventory = async () => {
totalItems: dbItems.length,
updatedCount,
});
if (failedCount > 0) {
console.warn(
`[inventory-refresh] ${failedCount} task(s) failed; fallback values were used and will retry next sweep`,
);
}
};