all the haul

This commit is contained in:
2026-04-07 23:56:31 +00:00
parent 87cce83030
commit 24f303355b
244 changed files with 33743 additions and 11249 deletions
@@ -1,168 +0,0 @@
/**
* @module computeCacheTTL
*
* Adaptive Cache TTL Algorithm
* ============================
*
* Determines how long a cached record should live before it must be
* re-fetched from the upstream source (e.g. ConnectWise API).
*
* The algorithm prioritises freshness for records that are actively
* being worked on, while avoiding unnecessary API calls for stale or
* inactive data.
*
* ## Spec
*
* | # | Condition | TTL (ms) | TTL (human) | Rationale |
* |---|------------------------------------------------------------------|----------|-------------|--------------------------------------------------------------------|
* | 1 | `closedFlag` is `true` | `null` | Do not cache| Closed records are rarely accessed; caching wastes memory. |
* | 2 | `expectedCloseDate` OR `lastUpdated` is within the last **5 days**| 60 000 | 60 seconds | High-activity window — data changes frequently and must stay fresh.|
* | 3 | `expectedCloseDate` OR `lastUpdated` is within the last **14 days**| 90 000 | 90 seconds | Moderate activity — still relevant, but changes less often. |
* | 4 | Everything else (older than 14 days) | 900 000 | 15 minutes | Low activity — safe to serve from cache for longer. |
*
* ## Evaluation order
*
* Rules are evaluated **top-to-bottom**; the first matching rule wins.
* Rule 2 (5-day window) is a subset of Rule 3 (14-day window), so it
* must be checked first.
*
* ## Inputs
*
* | Field | Type | Description |
* |--------------------|------------------|--------------------------------------------------------------------|
* | `closedFlag` | `boolean` | Whether the record is closed / inactive. |
* | `expectedCloseDate`| `Date \| null` | The projected close date (future-looking relevance signal). |
* | `lastUpdated` | `Date \| null` | The last time the upstream record was modified (backward-looking). |
* | `now` | `Date` (optional)| Override for the current timestamp; defaults to `new Date()`. |
*
* ## Output
*
* Returns `number | null`:
* - A positive integer representing the TTL in **milliseconds**, or
* - `null` when the record should **not** be cached at all.
*
* ## Usage
*
* ```ts
* import { computeCacheTTL } from "../modules/algorithms/computeCacheTTL";
*
* const ttl = computeCacheTTL({
* closedFlag: opportunity.closedFlag,
* expectedCloseDate: opportunity.expectedCloseDate,
* lastUpdated: opportunity.cwLastUpdated,
* });
*
* if (ttl !== null) {
* await redis.set(key, serialised, "PX", ttl);
* }
* ```
*/
// ---------------------------------------------------------------------------
// Constants
// ---------------------------------------------------------------------------
/** 60 seconds TTL for high-activity records (within 5 days).
* Must exceed the 30-second background refresh interval so the cache
* stays warm between cycles. */
export const TTL_HIGH_ACTIVITY = 60_000;
/** 90 seconds TTL for moderate-activity records (within 14 days). */
export const TTL_MODERATE_ACTIVITY = 90_000;
/** 15 minutes TTL for low-activity / stale records. */
export const TTL_LOW_ACTIVITY = 900_000;
/** 30 days in milliseconds. */
const THIRTY_DAYS_MS = 30 * 24 * 60 * 60 * 1000;
/** 5 days in milliseconds. */
const FIVE_DAYS_MS = 5 * 24 * 60 * 60 * 1000;
/** 14 days in milliseconds. */
const FOURTEEN_DAYS_MS = 14 * 24 * 60 * 60 * 1000;
// ---------------------------------------------------------------------------
// Input type
// ---------------------------------------------------------------------------
export interface CacheTTLInput {
/** Whether the record is closed / inactive. */
closedFlag: boolean;
/** When the record was closed — used for recently-closed caching (within 30 days). */
closedDate: Date | null;
/** The projected close date — serves as a forward-looking relevance signal. */
expectedCloseDate: Date | null;
/** The date the upstream record was last modified — backward-looking signal. */
lastUpdated: Date | null;
/**
* Override for the current timestamp.
* Useful for deterministic testing. Defaults to `new Date()`.
*/
now?: Date;
}
// ---------------------------------------------------------------------------
// Algorithm
// ---------------------------------------------------------------------------
/**
* Compute the cache TTL for a record based on its activity signals.
*
* @param input - The record's activity signals. See {@link CacheTTLInput}.
* @returns The TTL in milliseconds, or `null` if the record should not be cached.
*
* @see Module-level JSDoc for the full spec table and evaluation rules.
*/
export function computeCacheTTL(input: CacheTTLInput): number | null {
const {
closedFlag,
closedDate,
expectedCloseDate,
lastUpdated,
now = new Date(),
} = input;
const nowMs = now.getTime();
/**
* Check whether a date falls within a window around `now`.
*
* "Within" means the date is between `now - windowMs` and `now + windowMs`,
* allowing both past updates and future-scheduled dates to qualify.
*/
const isWithinWindow = (date: Date | null, windowMs: number): boolean => {
if (!date) return false;
const diff = Math.abs(nowMs - date.getTime());
return diff <= windowMs;
};
// Rule 1 — Closed records
if (closedFlag) {
// Rule 1b — Recently closed (within 30 days) → cache at low-activity TTL
if (isWithinWindow(closedDate, THIRTY_DAYS_MS)) {
return TTL_LOW_ACTIVITY;
}
// Rule 1a — Closed longer than 30 days → do not cache
return null;
}
// Rule 2 — High activity (5 days)
if (
isWithinWindow(expectedCloseDate, FIVE_DAYS_MS) ||
isWithinWindow(lastUpdated, FIVE_DAYS_MS)
) {
return TTL_HIGH_ACTIVITY;
}
// Rule 3 — Moderate activity (14 days)
if (
isWithinWindow(expectedCloseDate, FOURTEEN_DAYS_MS) ||
isWithinWindow(lastUpdated, FOURTEEN_DAYS_MS)
) {
return TTL_MODERATE_ACTIVITY;
}
// Rule 4 — Low activity / stale
return TTL_LOW_ACTIVITY;
}
@@ -1,116 +0,0 @@
/**
* @module computeProductsCacheTTL
*
* Adaptive Cache TTL for Opportunity Products
* ============================================
*
* Determines how long products (forecast items) should be cached in
* Redis before being re-fetched from ConnectWise.
*
* Products have unique caching rules compared to notes or contacts
* because they are typically finalised before a deal closes and do not
* change once the opportunity reaches a terminal status.
*
* ## Spec
*
* | # | Condition | TTL (ms) | TTL (human) | Rationale |
* |---|------------------------------------------------------------------------------|------------|-------------|---------------------------------------------------------------------------------------|
* | 1 | Status is **Won**, **Lost**, **Pending Won**, or **Pending Lost** | `null` | No cache | Products on terminal / near-terminal opps are static; no need to keep them warm. |
* | 2 | Opportunity is **not cacheable** (main cache TTL is `null`) | `null` | No cache | If the opp itself is evicted, sub-resources follow suit. |
* | 3 | `lastUpdated` is within the last **3 days** | 15 000 | 15 seconds | Actively-worked deals — products are being edited and need near-real-time freshness. |
* | 4 | Everything else | 1 200 000 | 20 minutes | Lazy on-demand cache: fetched when requested, expires after 20 min without refresh. |
*
* ## Evaluation order
*
* Rules are evaluated top-to-bottom; the first matching rule wins.
*
* ## Inputs
*
* Extends {@link CacheTTLInput} from `computeCacheTTL` with an
* additional `statusCwId` field used to identify terminal statuses.
*
* ## Output
*
* Returns `number | null`:
* - Positive integer = TTL in **milliseconds**.
* - `null` = do **not** cache.
*/
import type { CacheTTLInput } from "./computeCacheTTL";
import { computeCacheTTL } from "./computeCacheTTL";
import { QUOTE_STATUSES } from "../../types/QuoteStatuses";
// ---------------------------------------------------------------------------
// Constants
// ---------------------------------------------------------------------------
/** 45 seconds — TTL for hot products (opportunity updated within 3 days).
* Must exceed the 30-second background refresh interval so the cache
* stays warm between cycles. */
export const PRODUCTS_TTL_HOT = 45_000;
/** 20 minutes — TTL for on-demand product cache (lazy fallback). */
export const PRODUCTS_TTL_LAZY = 1_200_000;
/** 3 days in milliseconds. */
const THREE_DAYS_MS = 3 * 24 * 60 * 60 * 1000;
/**
* Set of all CW status IDs that map to a Won or Lost canonical status.
*
* Built at module load from {@link QUOTE_STATUSES} so it stays in sync
* with any future status additions.
*/
export const WON_LOST_STATUS_IDS: ReadonlySet<number> = new Set(
QUOTE_STATUSES.filter((s) => s.wonFlag || s.lostFlag).flatMap((s) => [
s.id,
...s.optimaEquivalency,
]),
);
// ---------------------------------------------------------------------------
// Input type
// ---------------------------------------------------------------------------
export interface ProductsCacheTTLInput extends CacheTTLInput {
/** The CW status ID of the opportunity. */
statusCwId: number | null;
}
// ---------------------------------------------------------------------------
// Algorithm
// ---------------------------------------------------------------------------
/**
* Compute the cache TTL for an opportunity's products.
*
* @param input - The opportunity's activity signals plus status ID.
* @returns TTL in milliseconds, or `null` if products should not be cached.
*/
export function computeProductsCacheTTL(
input: ProductsCacheTTLInput,
): number | null {
const { statusCwId, lastUpdated, now = new Date() } = input;
// Rule 1 — Terminal statuses: Won / Lost / Pending Won / Pending Lost
if (statusCwId !== null && WON_LOST_STATUS_IDS.has(statusCwId)) {
return null;
}
// Rule 2 — If the opportunity itself is not cacheable, skip products too
const mainTTL = computeCacheTTL(input);
if (mainTTL === null) {
return null;
}
// Rule 3 — Hot: updated within the last 3 days
if (lastUpdated) {
const diff = Math.abs(now.getTime() - lastUpdated.getTime());
if (diff <= THREE_DAYS_MS) {
return PRODUCTS_TTL_HOT;
}
}
// Rule 4 — Lazy fallback
return PRODUCTS_TTL_LAZY;
}
@@ -1,118 +0,0 @@
/**
* @module computeSubResourceCacheTTL
*
* Adaptive Cache TTL for Opportunity Sub-Resources
* =================================================
*
* Determines how long cached sub-resource data (notes, contacts) should
* live before being re-fetched from ConnectWise.
*
* Sub-resources change less frequently than the opportunity record itself
* or its activity feed, so TTLs are longer than the primary cache. The
* same activity-signal heuristics are used (expected close date, last
* updated, closed status) but with relaxed durations.
*
* ## Spec
*
* | # | Condition | TTL (ms) | TTL (human) | Rationale |
* |---|-------------------------------------------------------------------|----------|-------------|--------------------------------------------------------------------|
* | 1 | `closedFlag` is `true` AND closed > 30 days ago | `null` | Do not cache| Old closed records are rarely accessed. |
* | 1b| `closedFlag` is `true` AND closed within 30 days | 300 000 | 5 minutes | Recently-closed records may still be viewed occasionally. |
* | 2 | `expectedCloseDate` OR `lastUpdated` within **5 days** | 60 000 | 60 seconds | Active deals — contacts/notes may still change. |
* | 3 | `expectedCloseDate` OR `lastUpdated` within **14 days** | 120 000 | 2 minutes | Moderate activity — less likely to change. |
* | 4 | Everything else (older than 14 days) | 300 000 | 5 minutes | Low activity — safe to cache longer. |
*
* ## Evaluation order
*
* Rules are evaluated top-to-bottom; the first matching rule wins.
*
* ## Inputs
*
* Uses the same {@link CacheTTLInput} interface as `computeCacheTTL`.
*
* ## Output
*
* Returns `number | null`:
* - Positive integer = TTL in **milliseconds**.
* - `null` = do **not** cache.
*/
import type { CacheTTLInput } from "./computeCacheTTL";
// ---------------------------------------------------------------------------
// Constants
// ---------------------------------------------------------------------------
/** 60 seconds — TTL for high-activity sub-resources (within 5 days). */
export const SUB_TTL_HIGH_ACTIVITY = 60_000;
/** 2 minutes — TTL for moderate-activity sub-resources (within 14 days). */
export const SUB_TTL_MODERATE_ACTIVITY = 120_000;
/** 5 minutes — TTL for low-activity / stale sub-resources. */
export const SUB_TTL_LOW_ACTIVITY = 300_000;
/** 30 days in milliseconds. */
const THIRTY_DAYS_MS = 30 * 24 * 60 * 60 * 1000;
/** 5 days in milliseconds. */
const FIVE_DAYS_MS = 5 * 24 * 60 * 60 * 1000;
/** 14 days in milliseconds. */
const FOURTEEN_DAYS_MS = 14 * 24 * 60 * 60 * 1000;
// ---------------------------------------------------------------------------
// Algorithm
// ---------------------------------------------------------------------------
/**
* Compute the cache TTL for an opportunity sub-resource (notes, contacts).
*
* @param input - The opportunity's activity signals. See {@link CacheTTLInput}.
* @returns The TTL in milliseconds, or `null` if the data should not be cached.
*/
export function computeSubResourceCacheTTL(
input: CacheTTLInput,
): number | null {
const {
closedFlag,
closedDate,
expectedCloseDate,
lastUpdated,
now = new Date(),
} = input;
const nowMs = now.getTime();
const isWithinWindow = (date: Date | null, windowMs: number): boolean => {
if (!date) return false;
return Math.abs(nowMs - date.getTime()) <= windowMs;
};
// Rule 1 — Closed records
if (closedFlag) {
if (isWithinWindow(closedDate, THIRTY_DAYS_MS)) {
return SUB_TTL_LOW_ACTIVITY;
}
return null;
}
// Rule 2 — High activity (5 days)
if (
isWithinWindow(expectedCloseDate, FIVE_DAYS_MS) ||
isWithinWindow(lastUpdated, FIVE_DAYS_MS)
) {
return SUB_TTL_HIGH_ACTIVITY;
}
// Rule 3 — Moderate activity (14 days)
if (
isWithinWindow(expectedCloseDate, FOURTEEN_DAYS_MS) ||
isWithinWindow(lastUpdated, FOURTEEN_DAYS_MS)
) {
return SUB_TTL_MODERATE_ACTIVITY;
}
// Rule 4 — Low activity / stale
return SUB_TTL_LOW_ACTIVITY;
}
-659
View File
@@ -1,659 +0,0 @@
/**
* @module opportunityCache
*
* Redis-backed cache for expensive ConnectWise API data associated
* with opportunities.
*
* ## What is cached
*
* Each non-closed opportunity may have cached payloads keyed by its `cwOpportunityId`:
*
* - **Activities** (`opp:activities:{cwOpportunityId}`) — the raw `CWActivity[]` array
* - **Company CW data** (`opp:company-cw:{cw_CompanyId}`) — hydrated company / contacts blob
* - **Notes** (`opp:notes:{cwOpportunityId}`) — raw CW notes array
* - **Contacts** (`opp:contacts:{cwOpportunityId}`) — raw CW contacts array
* - **Products** (`opp:products:{cwOpportunityId}`) — raw CW forecast + procurement products blob
*
* TTLs are computed dynamically via {@link computeCacheTTL}.
*
* ## Background refresh (Worker-based)
*
* **⚠️ This module is now READ-ONLY.** Cache refresh logic has been moved to workers:
*
* - {@link refreshActiveOpportunitiesWorker} — Scheduled to run every 20 minutes
* to run a unified cache pass across all opportunities. Active/recent records
* use adaptive TTLs and archived records use {@link TTL_ARCHIVED_MS}.
*
* See `src/modules/workers/cache/` for worker implementations.
*
* ## This module now provides
*
* - `getCached*()` functions for reading cached data
* - `fetchAndCache*()` functions used internally by workers
* - `invalidate*()` functions for cache invalidation after mutations
* - Cache key helpers for Redis operations
*/
import { prisma, redis } from "../../constants";
import { activityCw } from "../cw-utils/activities/activities";
import { computeCacheTTL } from "../algorithms/computeCacheTTL";
import { computeSubResourceCacheTTL } from "../algorithms/computeSubResourceCacheTTL";
import {
computeProductsCacheTTL,
PRODUCTS_TTL_HOT,
} from "../algorithms/computeProductsCacheTTL";
import { connectWiseApi } from "../../constants";
import { fetchCwCompanyById } from "../cw-utils/fetchCompany";
import { fetchCompanySite } from "../cw-utils/sites/companySites";
import { opportunityCw } from "../cw-utils/opportunities/opportunities";
import { withCwRetry } from "../cw-utils/withCwRetry";
import { events } from "../globalEvents";
// ---------------------------------------------------------------------------
// Key helpers
// ---------------------------------------------------------------------------
const ACTIVITY_PREFIX = "opp:activities:";
const COMPANY_CW_PREFIX = "opp:company-cw:";
const NOTES_PREFIX = "opp:notes:";
const CONTACTS_PREFIX = "opp:contacts:";
const PRODUCTS_PREFIX = "opp:products:";
const SITE_PREFIX = "opp:site:";
const OPP_CW_PREFIX = "opp:cw-data:";
/** Redis key for cached activities by CW opportunity ID. */
export const activityCacheKey = (cwOppId: number) =>
`${ACTIVITY_PREFIX}${cwOppId}`;
/** Redis key for cached company CW hydration data by CW company ID. */
export const companyCwCacheKey = (cwCompanyId: number) =>
`${COMPANY_CW_PREFIX}${cwCompanyId}`;
/** Redis key for cached opportunity notes by CW opportunity ID. */
export const notesCacheKey = (cwOppId: number) => `${NOTES_PREFIX}${cwOppId}`;
/** Redis key for cached opportunity contacts by CW opportunity ID. */
export const contactsCacheKey = (cwOppId: number) =>
`${CONTACTS_PREFIX}${cwOppId}`;
/** Redis key for cached opportunity products by CW opportunity ID. */
export const productsCacheKey = (cwOppId: number) =>
`${PRODUCTS_PREFIX}${cwOppId}`;
/** Redis key for cached company site by CW company ID + site ID. */
export const siteCacheKey = (cwCompanyId: number, cwSiteId: number) =>
`${SITE_PREFIX}${cwCompanyId}:${cwSiteId}`;
/** Redis key for cached CW opportunity response by CW opportunity ID. */
export const oppCwDataCacheKey = (cwOppId: number) =>
`${OPP_CW_PREFIX}${cwOppId}`;
// ---------------------------------------------------------------------------
// Read helpers
// ---------------------------------------------------------------------------
/**
* Retrieve cached CW activities for an opportunity.
*
* @returns The parsed `CWActivity[]` or `null` on cache miss.
*/
export async function getCachedActivities(
cwOpportunityId: number,
): Promise<any[] | null> {
const raw = await redis.get(activityCacheKey(cwOpportunityId));
if (!raw) return null;
try {
return JSON.parse(raw);
} catch {
return null;
}
}
/**
* Retrieve cached company CW hydration data.
*
* @returns `{ company, defaultContact, allContacts }` or `null` on cache miss.
*/
export async function getCachedCompanyCwData(
cwCompanyId: number,
): Promise<{ company: any; defaultContact: any; allContacts: any[] } | null> {
const raw = await redis.get(companyCwCacheKey(cwCompanyId));
if (!raw) return null;
try {
return JSON.parse(raw);
} catch {
return null;
}
}
/**
* Retrieve cached opportunity notes (raw CW data).
*
* @returns The parsed raw CW notes array or `null` on cache miss.
*/
export async function getCachedNotes(
cwOpportunityId: number,
): Promise<any[] | null> {
const raw = await redis.get(notesCacheKey(cwOpportunityId));
if (!raw) return null;
try {
return JSON.parse(raw);
} catch {
return null;
}
}
/**
* Retrieve cached opportunity contacts (raw CW data).
*
* @returns The parsed raw CW contacts array or `null` on cache miss.
*/
export async function getCachedContacts(
cwOpportunityId: number,
): Promise<any[] | null> {
const raw = await redis.get(contactsCacheKey(cwOpportunityId));
if (!raw) return null;
try {
return JSON.parse(raw);
} catch {
return null;
}
}
/**
* Retrieve cached opportunity products (raw CW forecast + procurement blob).
*
* @returns `{ forecast, procProducts }` or `null` on cache miss.
*/
export async function getCachedProducts(
cwOpportunityId: number,
): Promise<{ forecast: any; procProducts: any[] } | null> {
const raw = await redis.get(productsCacheKey(cwOpportunityId));
if (!raw) return null;
try {
return JSON.parse(raw);
} catch {
return null;
}
}
/**
* Retrieve cached CW site data for a company/site pair.
*
* @returns Parsed site data or `null` on cache miss.
*/
export async function getCachedSite(
cwCompanyId: number,
cwSiteId: number,
): Promise<any | null> {
const raw = await redis.get(siteCacheKey(cwCompanyId, cwSiteId));
if (!raw) return null;
try {
return JSON.parse(raw);
} catch {
return null;
}
}
/**
* Retrieve cached CW opportunity response data.
*
* @returns Parsed CW opportunity object or `null` on cache miss.
*/
export async function getCachedOppCwData(
cwOpportunityId: number,
): Promise<any | null> {
const raw = await redis.get(oppCwDataCacheKey(cwOpportunityId));
if (!raw) return null;
try {
return JSON.parse(raw);
} catch {
return null;
}
}
// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------
/** Check whether an error is an Axios 404 (resource not found in CW). */
function isNotFoundError(err: unknown): boolean {
if (typeof err !== "object" || err === null) return false;
const e = err as Record<string, any>;
return e.isAxiosError === true && e.response?.status === 404;
}
/**
* Check whether an error is a transient network / timeout error.
*
* These are safe to swallow in background refresh tasks — CW will be
* retried on the next refresh cycle. Logs a concise one-line warning
* instead of dumping the full Axios error object.
*/
function isTransientError(err: unknown): boolean {
if (typeof err !== "object" || err === null) return false;
const e = err as Record<string, any>;
if (!e.isAxiosError) return false;
const code = e.code as string | undefined;
return (
code === "ECONNABORTED" ||
code === "ECONNREFUSED" ||
code === "ECONNRESET" ||
code === "ETIMEDOUT" ||
code === "ERR_NETWORK" ||
code === "ENETUNREACH" ||
code === "ERR_BAD_RESPONSE"
);
}
/** Build a concise error description for logging (avoids dumping entire Axios objects). */
function describeError(err: unknown): string {
if (typeof err !== "object" || err === null) return String(err);
const e = err as Record<string, any>;
if (e.isAxiosError) {
const method = (e.config?.method ?? "?").toUpperCase();
const url = e.config?.url ?? "unknown";
const code = e.code ?? "";
const status = e.response?.status ?? "";
return `${method} ${url}${code || `HTTP ${status}`} (${e.message})`;
}
return e.message ?? String(err);
}
/**
* When true, transient-error warnings inside fetchAndCache* are suppressed.
* Used during background refresh to avoid flooding the terminal — the
* refresh function prints a single summary line instead.
*/
let _suppressTransientWarnings = false;
// ---------------------------------------------------------------------------
// Write helpers
// ---------------------------------------------------------------------------
/**
* Fetch activities from CW and cache them with the appropriate TTL.
*
* Returns an empty array if CW responds with 404 (opportunity doesn't
* exist or was deleted upstream).
*
* @returns The raw `CWActivity[]` collection (as plain array).
*/
export async function fetchAndCacheActivities(
cwOpportunityId: number,
ttlMs: number,
): Promise<any[]> {
try {
// Use the direct (single-call) variant to avoid the extra count request
const arr = await activityCw.fetchByOpportunityDirect(cwOpportunityId);
await redis.set(
activityCacheKey(cwOpportunityId),
JSON.stringify(arr),
"PX",
ttlMs,
);
return arr;
} catch (err) {
if (isNotFoundError(err)) return [];
if (isTransientError(err)) {
console.warn(
`[cache] activities opp#${cwOpportunityId}: ${describeError(err)}`,
);
return [];
}
throw err;
}
}
/**
* Fetch company CW data (company, contacts) and cache with the given TTL.
*
* @returns The hydration blob or `null` if the company doesn't exist in CW.
*/
export async function fetchAndCacheCompanyCwData(
cwCompanyId: number,
ttlMs: number,
): Promise<{ company: any; defaultContact: any; allContacts: any[] } | null> {
try {
// Fetch company and all-contacts in parallel — the allContacts URL
// can be constructed directly without the company response.
const [cwCompany, allContactsData] = await Promise.all([
fetchCwCompanyById(cwCompanyId),
withCwRetry(
() =>
connectWiseApi.get(
`/company/companies/${cwCompanyId}/contacts?pageSize=1000`,
),
{ label: `company#${cwCompanyId}/allContacts` },
),
]);
if (!cwCompany) return null;
// Default contact: derive from allContacts instead of making an
// extra serial CW call. The company object carries the default
// contact's ID, so we can pull it from the list we already fetched.
const defaultContactId = cwCompany.defaultContact?.id;
const defaultContactData = defaultContactId
? ((allContactsData.data as any[]).find(
(c: any) => c.id === defaultContactId,
) ?? null)
: null;
const blob = {
company: cwCompany,
defaultContact: defaultContactData,
allContacts: allContactsData.data,
};
await redis.set(
companyCwCacheKey(cwCompanyId),
JSON.stringify(blob),
"PX",
ttlMs,
);
return blob;
} catch (err) {
if (isNotFoundError(err)) return null;
if (isTransientError(err)) {
console.warn(`[cache] company#${cwCompanyId}: ${describeError(err)}`);
return null;
}
throw err;
}
}
/**
* Fetch opportunity notes from CW and cache the raw response.
*
* Returns an empty array if CW responds with 404.
*
* @returns The raw CW notes array.
*/
export async function fetchAndCacheNotes(
cwOpportunityId: number,
ttlMs: number,
): Promise<any[]> {
try {
const notes = await opportunityCw.fetchNotes(cwOpportunityId);
await redis.set(
notesCacheKey(cwOpportunityId),
JSON.stringify(notes),
"PX",
ttlMs,
);
return notes;
} catch (err) {
if (isNotFoundError(err)) return [];
if (isTransientError(err)) {
console.warn(
`[cache] notes opp#${cwOpportunityId}: ${describeError(err)}`,
);
return [];
}
throw err;
}
}
/**
* Fetch opportunity contacts from CW and cache the raw response.
*
* Returns an empty array if CW responds with 404.
*
* @returns The raw CW contacts array.
*/
export async function fetchAndCacheContacts(
cwOpportunityId: number,
ttlMs: number,
): Promise<any[]> {
try {
const contacts = await opportunityCw.fetchContacts(cwOpportunityId);
await redis.set(
contactsCacheKey(cwOpportunityId),
JSON.stringify(contacts),
"PX",
ttlMs,
);
return contacts;
} catch (err) {
if (isNotFoundError(err)) return [];
if (isTransientError(err)) {
console.warn(
`[cache] contacts opp#${cwOpportunityId}: ${describeError(err)}`,
);
return [];
}
throw err;
}
}
/**
* Invalidate cached notes for an opportunity.
*
* Call this after any note mutation (create, update, delete) so the
* next read refreshes from ConnectWise.
*/
export async function invalidateNotesCache(
cwOpportunityId: number,
): Promise<void> {
await redis.del(notesCacheKey(cwOpportunityId));
}
/**
* Invalidate cached contacts for an opportunity.
*
* Call this after any contact mutation so the next read refreshes
* from ConnectWise.
*/
export async function invalidateContactsCache(
cwOpportunityId: number,
): Promise<void> {
await redis.del(contactsCacheKey(cwOpportunityId));
}
/**
* Fetch opportunity products (forecast + procurement) from CW and cache.
*
* Stores both the forecast response and procurement products together
* so that `fetchProducts()` can reconstruct ForecastProductControllers
* from a single cache hit.
*
* @returns `{ forecast, procProducts }` blob.
*/
export async function fetchAndCacheProducts(
cwOpportunityId: number,
ttlMs: number,
): Promise<{ forecast: any; procProducts: any[] }> {
try {
const [forecast, procProducts] = await Promise.all([
opportunityCw.fetchProducts(cwOpportunityId),
opportunityCw.fetchProcurementProducts(cwOpportunityId),
]);
const blob = { forecast, procProducts };
await redis.set(
productsCacheKey(cwOpportunityId),
JSON.stringify(blob),
"PX",
ttlMs,
);
return blob;
} catch (err) {
if (isNotFoundError(err))
return { forecast: { forecastItems: [] }, procProducts: [] };
if (isTransientError(err)) {
console.warn(
`[cache] products opp#${cwOpportunityId}: ${describeError(err)}`,
);
return { forecast: { forecastItems: [] }, procProducts: [] };
}
throw err;
}
}
/**
* Invalidate cached products for an opportunity.
*
* Call this after any product mutation (add, update, resequence) so the
* next read refreshes from ConnectWise.
*/
export async function invalidateProductsCache(
cwOpportunityId: number,
): Promise<void> {
await redis.del(productsCacheKey(cwOpportunityId));
}
/**
* Invalidate all cached data for an opportunity.
*
* Removes activities, notes, contacts, products, and CW data cache keys.
* Call this when an opportunity is deleted.
*/
export async function invalidateAllOpportunityCaches(
cwOpportunityId: number,
): Promise<void> {
await redis.del(
activityCacheKey(cwOpportunityId),
notesCacheKey(cwOpportunityId),
contactsCacheKey(cwOpportunityId),
productsCacheKey(cwOpportunityId),
oppCwDataCacheKey(cwOpportunityId),
);
}
/**
* Site TTL — 20 minutes. Site/address data rarely changes so we cache
* aggressively. The background refresh does NOT proactively warm site keys;
* they are populated lazily on the first detail-view request.
*/
const SITE_TTL_MS = 1_200_000;
/**
* Fetch a CW company site from ConnectWise and cache the result.
*
* @returns The raw CW site object.
*/
export async function fetchAndCacheSite(
cwCompanyId: number,
cwSiteId: number,
): Promise<any> {
try {
const site = await fetchCompanySite(cwCompanyId, cwSiteId);
await redis.set(
siteCacheKey(cwCompanyId, cwSiteId),
JSON.stringify(site),
"PX",
SITE_TTL_MS,
);
return site;
} catch (err) {
if (isNotFoundError(err)) return null;
if (isTransientError(err)) {
console.warn(
`[cache] site company#${cwCompanyId}/site#${cwSiteId}: ${describeError(err)}`,
);
return null;
}
throw err;
}
}
/**
* Fetch the raw CW opportunity response from ConnectWise and cache it.
*
* Used by `fetchItem()` in the manager to avoid a CW roundtrip when
* the detail view is reloaded within the cache TTL window.
*
* @param cwOpportunityId - The CW opportunity ID
* @param ttlMs - Cache TTL in milliseconds
* @returns The raw CW opportunity response object.
*/
export async function fetchAndCacheOppCwData(
cwOpportunityId: number,
ttlMs: number,
): Promise<any> {
try {
const cwData = await opportunityCw.fetch(cwOpportunityId);
await redis.set(
oppCwDataCacheKey(cwOpportunityId),
JSON.stringify(cwData),
"PX",
ttlMs,
);
return cwData;
} catch (err) {
if (isNotFoundError(err)) return null;
if (isTransientError(err)) {
console.warn(`[cache] opp#${cwOpportunityId}: ${describeError(err)}`);
return null;
}
throw err;
}
}
// ---------------------------------------------------------------------------
// Background refresh
// ---------------------------------------------------------------------------
/**
* Fixed 24-hour TTL used for archived (closed > 30 days) opportunity cache entries.
* These opportunities are outside the adaptive-TTL window and are rebuilt once per
* day at midnight via {@link refreshArchivedOpportunityCache}.
*/
export const TTL_ARCHIVED_MS = 24 * 60 * 60 * 1000; // 24 hours
/**
* Cache opportunities that fall outside the adaptive-TTL window — i.e. those
* closed **more than 30 days ago** — with a fixed 24-hour TTL.
*
* These opportunities are excluded by {@link computeCacheTTL} (returns `null`)
* and are therefore never warmed by {@link refreshOpportunityCache}. This
* function fills that gap so archived deals are still served from cache on
* the rare occasion they are accessed.
*
* ## Scheduling
*
* Designed to be triggered once per day at midnight from `src/index.ts`. At
* midnight `force` is `true` so every key is unconditionally overwritten,
* ensuring data is no more than 24 hours stale.
*
* On startup `force` defaults to `false` so only truly missing keys are
* populated; this avoids a large CW burst on every process restart.
*
* @param force - When `true`, overwrite every cache key without checking
* whether it already exists. Defaults to `false`.
*/
/**
* TODO: This function has been moved to a worker at
* `src/modules/workers/cache/refreshArchivedOpportunities.ts`
*
* Wire up the worker to run daily at midnight with force=true to ensure
* archived opportunities (closed > 30 days) have fresh cache entries.
*
* @deprecated - Use refreshArchivedOpportunitiesWorker from the worker module
*/
export async function refreshArchivedOpportunityCache(
force = false,
): Promise<void> {
throw new Error(
"refreshArchivedOpportunityCache has been moved to a worker. " +
"Use refreshArchivedOpportunitiesWorker from src/modules/workers/cache/refreshArchivedOpportunities.ts",
);
}
/**
* TODO: This function has been moved to a worker at
* `src/modules/workers/cache/refreshActiveOpportunities.ts`
*
* Wire up the worker to run every 30 seconds to refresh cache for active
* and recently-closed (within 30 days) opportunities.
*
* @deprecated - Use refreshActiveOpportunitiesWorker from the worker module
*/
export async function refreshOpportunityCache(): Promise<void> {
throw new Error(
"refreshOpportunityCache has been moved to a worker. " +
"Use refreshActiveOpportunitiesWorker from src/modules/workers/cache/refreshActiveOpportunities.ts",
);
}
+36 -138
View File
@@ -1,6 +1,4 @@
import { prisma, redis } from "../../constants";
import { getCachedOppCwData, getCachedProducts } from "./opportunityCache";
import { OpportunityStatus } from "../../workflows/wf.opportunity";
import { events } from "../globalEvents";
import { opportunities } from "../../managers/opportunities";
import { normalizeProbabilityRatio } from "../sales-utils/normalizeProbability";
@@ -101,13 +99,16 @@ interface CachedOpportunityRevenue {
}
interface OpportunityRow {
id: string;
cwOpportunityId: number;
id: number;
uid: string;
name: string;
primarySalesRepIdentifier: string | null;
secondarySalesRepIdentifier: string | null;
statusCwId: number | null;
statusName: string | null;
primarySalesRepId: string | null;
secondarySalesRepId: string | null;
status: {
wonFlag: boolean;
lostFlag: boolean;
closeFlag: boolean;
} | null;
closedFlag: boolean;
dateBecameLead: Date | null;
closedDate: Date | null;
@@ -137,107 +138,23 @@ const toFinite = (value: unknown): number => {
return n;
};
const isWon = (opp: {
statusCwId: number | null;
statusName: string | null;
closedFlag: boolean;
}) => {
if (opp.statusCwId === OpportunityStatus.Won) return true;
if (opp.statusName?.toLowerCase().includes("won")) return true;
if (opp.closedFlag && opp.statusName?.toLowerCase().includes("won"))
return true;
return false;
};
const isWon = (opp: { status: { wonFlag: boolean } | null }) =>
Boolean(opp.status?.wonFlag);
const isLost = (opp: {
statusCwId: number | null;
statusName: string | null;
closedFlag: boolean;
}) => {
if (opp.statusCwId === OpportunityStatus.Lost) return true;
if (opp.statusName?.toLowerCase().includes("lost")) return true;
if (opp.closedFlag && opp.statusName?.toLowerCase().includes("lost"))
return true;
return false;
};
const isLost = (opp: { status: { lostFlag: boolean } | null }) =>
Boolean(opp.status?.lostFlag);
const isClosedOpportunity = (opp: {
statusCwId: number | null;
statusName: string | null;
status: { wonFlag: boolean; lostFlag: boolean; closeFlag: boolean } | null;
closedFlag: boolean;
}) => {
if (opp.closedFlag) return true;
if (opp.status?.closeFlag) return true;
if (isWon(opp)) return true;
if (isLost(opp)) return true;
return false;
};
const buildCancellationMap = (procProducts: any[]) => {
const map = new Map<number, any>();
for (const pp of procProducts) {
const rawForecastDetailId = pp?.forecastDetailId;
const forecastDetailId =
typeof rawForecastDetailId === "number"
? rawForecastDetailId
: Number(rawForecastDetailId);
if (Number.isFinite(forecastDetailId) && forecastDetailId > 0) {
map.set(forecastDetailId, pp);
}
}
return map;
};
const computeRevenueFromProductsBlob = (
blob: any,
): Omit<OpportunityRevenue, "cacheHit"> => {
const forecastItems = Array.isArray(blob?.forecast?.forecastItems)
? blob.forecast.forecastItems
: [];
const procProducts = Array.isArray(blob?.procProducts)
? blob.procProducts
: [];
const cancellationMap = buildCancellationMap(procProducts);
let totalRevenue = 0;
let taxableRevenue = 0;
for (const item of forecastItems) {
if (!cancellationMap.has(item?.id)) continue;
if (!item?.includeFlag) continue;
const quantity = Math.max(0, toFinite(item?.quantity));
const revenue = toFinite(item?.revenue);
const cancellation = cancellationMap.get(item.id);
const cancelledFlag = Boolean(cancellation?.cancelledFlag);
const quantityCancelled = Math.max(
0,
toFinite(cancellation?.quantityCancelled),
);
if (cancelledFlag && quantity > 0 && quantityCancelled >= quantity)
continue;
const ratio =
quantity > 0 ? Math.max(0, (quantity - quantityCancelled) / quantity) : 1;
const effectiveRevenue = revenue * ratio;
totalRevenue += effectiveRevenue;
if (item?.taxableFlag) taxableRevenue += effectiveRevenue;
}
const nonTaxableRevenue = totalRevenue - taxableRevenue;
return {
totalRevenue: roundCurrency(totalRevenue),
taxableRevenue: roundCurrency(taxableRevenue),
nonTaxableRevenue: roundCurrency(nonTaxableRevenue),
};
};
const computeRevenueFromControllers = (
products: Array<{
@@ -298,20 +215,8 @@ const writeCachedOpportunityRevenue = async (
);
};
const resolveProbabilityRatio = async (opp: {
cwOpportunityId: number;
probability: number;
}): Promise<number> => {
const fromDb = normalizeProbabilityRatio(opp.probability);
if (fromDb > 0) return fromDb;
const cachedCwOpp = await getCachedOppCwData(opp.cwOpportunityId);
if (!cachedCwOpp) return 0;
const rawProbability =
cachedCwOpp?.probability?.name ?? cachedCwOpp?.probability ?? 0;
return normalizeProbabilityRatio(rawProbability);
};
const resolveProbabilityRatio = (opp: { probability: number }): number =>
normalizeProbabilityRatio(opp.probability);
const getOpportunityRevenueCacheFirst = async (
cwOpportunityId: number,
@@ -327,18 +232,6 @@ const getOpportunityRevenueCacheFirst = async (
}
}
if (!opts?.forceColdLoad) {
const cachedProducts = await getCachedProducts(cwOpportunityId);
if (cachedProducts) {
const computed = computeRevenueFromProductsBlob(cachedProducts);
await writeCachedOpportunityRevenue(cwOpportunityId, computed);
return {
...computed,
cacheHit: true,
};
}
}
try {
const opportunity = await opportunities.fetchRecord(cwOpportunityId);
const products = await opportunity.fetchProducts({
@@ -489,8 +382,8 @@ export async function refreshSalesOpportunityMetricsCache(
AND: [
{
OR: [
{ primarySalesRepIdentifier: { in: memberIdentifiers } },
{ secondarySalesRepIdentifier: { in: memberIdentifiers } },
{ primarySalesRepId: { in: memberIdentifiers } },
{ secondarySalesRepId: { in: memberIdentifiers } },
],
},
{ dateBecameLead: { gte: yearStart } },
@@ -501,12 +394,17 @@ export async function refreshSalesOpportunityMetricsCache(
},
select: {
id: true,
cwOpportunityId: true,
uid: true,
name: true,
primarySalesRepIdentifier: true,
secondarySalesRepIdentifier: true,
statusCwId: true,
statusName: true,
primarySalesRepId: true,
secondarySalesRepId: true,
status: {
select: {
wonFlag: true,
lostFlag: true,
closeFlag: true,
},
},
closedFlag: true,
dateBecameLead: true,
closedDate: true,
@@ -565,7 +463,7 @@ export async function refreshSalesOpportunityMetricsCache(
async (opp) => {
const [revenue, probabilityRatio] = await Promise.all([
withTimeout(
getOpportunityRevenueCacheFirst(opp.cwOpportunityId, {
getOpportunityRevenueCacheFirst(opp.id, {
forceColdLoad,
}),
PRODUCT_LOOKUP_TIMEOUT_MS,
@@ -619,10 +517,10 @@ export async function refreshSalesOpportunityMetricsCache(
for (const opp of opportunityRows) {
const assigned = new Set<string>();
if (opp.primarySalesRepIdentifier)
assigned.add(opp.primarySalesRepIdentifier);
if (opp.secondarySalesRepIdentifier)
assigned.add(opp.secondarySalesRepIdentifier);
if (opp.primarySalesRepId)
assigned.add(opp.primarySalesRepId);
if (opp.secondarySalesRepId)
assigned.add(opp.secondarySalesRepId);
for (const identifier of assigned) {
const bucket = opportunitiesByMember.get(identifier);
@@ -665,8 +563,8 @@ export async function refreshSalesOpportunityMetricsCache(
);
const breakdownEntry: OpportunityBreakdownEntry = {
id: opp.id,
cwId: opp.cwOpportunityId,
id: opp.uid,
cwId: opp.id,
name: opp.name,
revenue: revenue.totalRevenue,
taxableRevenue: revenue.taxableRevenue,
@@ -1,103 +0,0 @@
import { collectorSocket } from "../../constants";
export type CollectorQueryOptions = {
select?: string[];
include?: string[];
[key: string]: unknown;
};
type CollectorSuccessResponse<T> = {
success: true;
data: T;
};
type CollectorErrorResponse = {
success: false;
error: string;
};
type CollectorResponse<T> =
| CollectorSuccessResponse<T>
| CollectorErrorResponse;
const DEFAULT_ACK_TIMEOUT_MS = Number(
Bun.env.COLLECTOR_ACK_TIMEOUT_MS ?? "15000",
);
const DEFAULT_CONNECT_TIMEOUT_MS = Number(
Bun.env.COLLECTOR_CONNECT_TIMEOUT_MS ?? "5000",
);
const ensureCollectorConnected = async (
timeoutMs = DEFAULT_CONNECT_TIMEOUT_MS,
): Promise<void> => {
if (collectorSocket.connected) {
return;
}
collectorSocket.connect();
await new Promise<void>((resolve, reject) => {
const timeout = setTimeout(() => {
cleanup();
reject(new Error("Collector socket connection timeout"));
}, timeoutMs);
const onConnect = () => {
cleanup();
resolve();
};
const onConnectError = (err: Error) => {
cleanup();
reject(err);
};
const cleanup = () => {
clearTimeout(timeout);
collectorSocket.off("connect", onConnect);
collectorSocket.off("connect_error", onConnectError);
};
collectorSocket.on("connect", onConnect);
collectorSocket.on("connect_error", onConnectError);
});
};
export const runCollector = async <T = unknown>(
collector: string,
opts?: CollectorQueryOptions,
): Promise<T> => {
await ensureCollectorConnected();
const response = await new Promise<CollectorResponse<T>>(
(resolve, reject) => {
collectorSocket
.timeout(DEFAULT_ACK_TIMEOUT_MS)
.emit(
collector,
opts,
(err: Error | null, payload?: CollectorResponse<T>) => {
if (err) {
reject(err);
return;
}
if (!payload) {
reject(
new Error(`Collector '${collector}' returned an empty payload`),
);
return;
}
resolve(payload);
},
);
},
);
if (!response.success) {
throw new Error(`Collector '${collector}' failed: ${response.error}`);
}
return response.data;
};
@@ -1,27 +0,0 @@
import GenericError from "../../../Errors/GenericError";
import { activityCw } from "./activities";
import { CWActivity } from "./activity.types";
/**
* Fetch a single activity by its ConnectWise ID.
*
* @param cwActivityId - The ConnectWise activity ID
* @returns The full CW activity object
* @throws GenericError if the fetch fails
*/
export const fetchActivity = async (
cwActivityId: number,
): Promise<CWActivity> => {
try {
return await activityCw.fetch(cwActivityId);
} catch (error) {
const errBody = (error as any).response?.data || error;
console.error(`Error fetching activity with ID ${cwActivityId}:`, errBody);
throw new GenericError({
name: "FetchActivityError",
message: `Failed to fetch activity ${cwActivityId}`,
cause: typeof errBody === "string" ? errBody : JSON.stringify(errBody),
status: 502,
});
}
};
@@ -1,28 +0,0 @@
import { Collection } from "@discordjs/collection";
import GenericError from "../../../Errors/GenericError";
import { activityCw } from "./activities";
import { CWActivity } from "./activity.types";
/**
* Fetch all activities from ConnectWise with optional conditions.
*
* @param conditions - Optional CW conditions string for filtering
* @returns A Collection of CW activities keyed by their ID
* @throws GenericError if the fetch fails
*/
export const fetchAllActivities = async (
conditions?: string,
): Promise<Collection<number, CWActivity>> => {
try {
return await activityCw.fetchAll(conditions);
} catch (error) {
const errBody = (error as any).response?.data || error;
console.error("Error fetching all activities:", errBody);
throw new GenericError({
name: "FetchAllActivitiesError",
message: "Failed to fetch activities from ConnectWise",
cause: typeof errBody === "string" ? errBody : JSON.stringify(errBody),
status: 502,
});
}
};
+2 -37
View File
@@ -1,41 +1,10 @@
import { Company } from "../../types/ConnectWiseTypes";
import {
CollectorCompanyRecord,
CompanySourceRecord,
NormalizedCompanyRecord,
} from "../../types/CompanySourceTypes";
export const isCollectorCompanyRecord = (
value: unknown,
): value is CollectorCompanyRecord => {
if (!value || typeof value !== "object") {
return false;
}
const candidate = value as Partial<CollectorCompanyRecord>;
return (
typeof candidate.companyRecId === "number" &&
"companyId" in candidate &&
"companyName" in candidate
);
};
const normalizeFromCollector = (
company: CollectorCompanyRecord,
): NormalizedCompanyRecord | null => {
if (!company.companyId || !company.companyName) {
return null;
}
return {
id: company.companyRecId,
identifier: company.companyId,
name: company.companyName,
};
};
const normalizeFromCwApi = (
const normalizeCompany = (
company: Company,
): NormalizedCompanyRecord | null => {
if (!company.identifier || !company.name) {
@@ -52,11 +21,7 @@ const normalizeFromCwApi = (
export const normalizeCompanyRecord = (
source: CompanySourceRecord,
): NormalizedCompanyRecord | null => {
if (isCollectorCompanyRecord(source)) {
return normalizeFromCollector(source);
}
return normalizeFromCwApi(source);
return normalizeCompany(source);
};
export const normalizeCompanyRecords = (
@@ -1,30 +0,0 @@
import { connectWiseApi } from "../../../constants";
import { ConfigurationResponse } from "../../../types/ConnectWiseTypes";
import {
processConfigurationResponse,
ProcessedConfiguration,
} from "./processConfigurationResponse";
import GenericError from "../../../Errors/GenericError";
export const fetchCompanyConfigurations = async (
cwCompanyId: number,
): Promise<ProcessedConfiguration> => {
try {
const response = await connectWiseApi.get(
`/company/configurations?conditions=company/id=${cwCompanyId}`,
);
return processConfigurationResponse(response.data);
} catch (error) {
const errBody = (error as any).response?.data || error;
console.error(
`Error fetching configurations for company ID ${cwCompanyId}:`,
errBody,
);
throw new GenericError({
name: "FetchCompanyConfigurationsError",
message: `Failed to fetch configurations for company ${cwCompanyId}`,
cause: typeof errBody === "string" ? errBody : JSON.stringify(errBody),
status: 502,
});
}
};
@@ -1,64 +0,0 @@
import { Collection } from "@discordjs/collection";
import { connectWiseApi } from "../../constants";
import { runCollector } from "../collector-client/runCollector";
import {
CollectorCompanyRecord,
NormalizedCompanyRecord,
} from "../../types/CompanySourceTypes";
import { normalizeCompanyRecords } from "./companyTranslation";
const toCompanyCollection = (
companies: NormalizedCompanyRecord[],
): Collection<number, NormalizedCompanyRecord> => {
const allCompanies = new Collection<number, NormalizedCompanyRecord>();
for (const company of companies) {
allCompanies.set(company.id, company);
}
return allCompanies;
};
export const fetchAllCwCompanies = async (): Promise<
Collection<number, NormalizedCompanyRecord>
> => {
try {
console.log("[fetchAllCwCompanies] Attempting to fetch via collector...");
const collectorCompanies =
await runCollector<CollectorCompanyRecord[]>("fetchCompanies");
if (!Array.isArray(collectorCompanies)) {
throw new Error("Collector payload was not an array");
}
console.log(
`[fetchAllCwCompanies] ✓ Successfully used collector data (${collectorCompanies.length} companies)`,
);
return toCompanyCollection(normalizeCompanyRecords(collectorCompanies));
} catch (err) {
console.error(
`[fetchAllCwCompanies] ✗ Collector fetchCompanies failed, falling back to CW API:`,
err instanceof Error ? { message: err.message, stack: err.stack } : err,
);
}
let allCompanies = new Collection<number, NormalizedCompanyRecord>();
const pageCount = 1000;
const count = (await connectWiseApi.get("/company/companies/count")).data
.count;
const totalPages = Math.ceil(count / pageCount);
for (let page = 0; page < totalPages; page++) {
const response = await connectWiseApi.get(
`/company/companies?page=${page + 1}&pageSize=${pageCount}`,
);
const normalizedCompanies = normalizeCompanyRecords(response.data);
for (const company of normalizedCompanies) {
allCompanies.set(company.id, company);
}
}
return allCompanies;
};
-25
View File
@@ -1,25 +0,0 @@
import { connectWiseApi } from "../../constants";
import { Company } from "../../types/ConnectWiseTypes";
import { withCwRetry } from "./withCwRetry";
export const fetchCwCompanyById = async (
companyId: number,
): Promise<Company | null> => {
try {
const response = await withCwRetry(
() => connectWiseApi.get(`/company/companies/${companyId}`),
{
label: `fetchCompany#${companyId}`,
maxAttempts: 3,
baseDelayMs: 1_500,
},
);
return response.data;
} catch (error) {
console.error(
`Error fetching company with ID ${companyId}:`,
(error as any).response?.data || error,
);
return null;
}
};
@@ -1,148 +0,0 @@
import { Collection } from "@discordjs/collection";
import { connectWiseApi } from "../../../constants";
import { runCollector } from "../../collector-client/runCollector";
export interface CWMember {
id: number;
identifier: string;
firstName: string;
lastName: string;
officeEmail: string;
inactiveFlag: boolean;
_info: Record<string, string>;
}
interface CollectorMemberRecord {
memberRecId: number;
memberId: string;
firstName: string | null;
lastName: string | null;
emailAddress: string | null;
deleteFlag: boolean;
lastUpdateUtc?: string | null;
lastUpdate?: string | null;
_info?: Record<string, string>;
}
const isCollectorMemberRecord = (
value: unknown,
): value is CollectorMemberRecord => {
if (!value || typeof value !== "object") {
return false;
}
const candidate = value as Partial<CollectorMemberRecord>;
return (
typeof candidate.memberRecId === "number" &&
typeof candidate.memberId === "string"
);
};
const normalizeCollectorMember = (
member: CollectorMemberRecord,
): CWMember => {
const updatedAt = member.lastUpdateUtc ?? member.lastUpdate ?? "";
return {
id: member.memberRecId,
identifier: member.memberId,
firstName: member.firstName ?? "",
lastName: member.lastName ?? "",
officeEmail: member.emailAddress ?? "",
inactiveFlag: Boolean(member.deleteFlag),
_info: member._info ?? { lastUpdated: updatedAt },
};
};
/**
* Fetch All CW Members
*
* Fetches every member from ConnectWise using pagination and returns them
* in a Collection keyed by their identifier (e.g. "jroberts").
*
* @param opts.conditions - Optional CW conditions string to filter members
* @returns {Promise<Collection<string, CWMember>>} Collection of CW members keyed by identifier
*/
export const fetchAllCwMembers = async (opts?: {
conditions?: string;
}): Promise<Collection<string, CWMember>> => {
if (!opts?.conditions) {
try {
const collectorMembers = await runCollector<unknown[]>("fetchMembers");
if (!Array.isArray(collectorMembers)) {
throw new Error("Collector payload was not an array");
}
const members = new Collection<string, CWMember>();
for (const member of collectorMembers) {
if (!isCollectorMemberRecord(member)) {
continue;
}
const normalized = normalizeCollectorMember(member);
members.set(normalized.identifier, normalized);
}
if (members.size > 0) {
console.log(
`[fetchAllCwMembers] Using collector data from fetchMembers (${members.size} members)`,
);
return members;
}
throw new Error("Collector payload did not contain valid member records");
} catch (err) {
console.warn(
`[fetchAllCwMembers] Collector fetchMembers failed, falling back to CW API: ${err instanceof Error ? err.message : String(err)}`,
);
}
}
const members = new Collection<string, CWMember>();
const pageSize = 1000;
const conditionsParam = opts?.conditions
? `&conditions=${encodeURIComponent(opts.conditions)}`
: "";
const { data: countData } = await connectWiseApi.get(
`/system/members/count${conditionsParam ? `?${conditionsParam.slice(1)}` : ""}`,
);
const totalPages = Math.ceil(countData.count / pageSize);
for (let page = 0; page < totalPages; page++) {
const { data } = await connectWiseApi.get<CWMember[]>(
`/system/members?page=${page + 1}&pageSize=${pageSize}${conditionsParam}`,
);
for (const member of data) {
members.set(member.identifier, member);
}
}
return members;
};
/**
* Find CW Member Identifier by Email
*
* Looks up a ConnectWise member whose `officeEmail` matches the provided
* email address (case-insensitive) and returns their `identifier` string
* (e.g. "jroberts"). Returns `null` if no match is found.
*
* @param email - The email address to search for
* @param members - Optional pre-fetched member collection to search against (avoids extra API call)
* @returns {Promise<string | null>} The CW identifier or null
*/
export const findCwIdentifierByEmail = async (
email: string,
members?: Collection<string, CWMember>,
): Promise<string | null> => {
const allMembers = members ?? (await fetchAllCwMembers());
const normalised = email.toLowerCase();
const match = allMembers.find(
(m) => m.officeEmail?.toLowerCase() === normalised,
);
return match?.identifier ?? null;
};
@@ -1,141 +0,0 @@
import { Collection } from "@discordjs/collection";
import { prisma } from "../../../constants";
import { redis } from "../../../constants";
import { CWMember } from "./fetchAllMembers";
const REDIS_KEY = "cw:members";
export interface ResolvedMember {
/** Local database user ID (null if no matching local user) */
id: string | null;
/** CW member identifier (e.g. "jroberts") */
identifier: string;
/** Full name resolved from CW member cache, or raw identifier as fallback */
name: string;
/** ConnectWise member ID */
cwMemberId: number | null;
}
/**
* CW Member Cache
*
* Dual-layer cache (in-memory + Redis) of ConnectWise members keyed by
* their identifier (e.g. "jroberts"). Populated by `refreshCwIdentifiers`
* on startup and every 30 minutes thereafter.
*/
let memberCache = new Collection<string, CWMember>();
/**
* Set the member cache contents.
*
* Replaces both the in-memory Collection and the Redis snapshot.
*
* @param members - Collection of CW members keyed by identifier
*/
export const setMemberCache = async (members: Collection<string, CWMember>) => {
memberCache = members;
await redis.set(REDIS_KEY, JSON.stringify([...members.values()]));
};
/**
* Get the current member cache.
*
* Returns the in-memory Collection. If empty, attempts to hydrate from Redis
* first. Returns whatever is available (may be empty if Redis is also cold).
*/
export const getMemberCache = async (): Promise<
Collection<string, CWMember>
> => {
if (memberCache.size > 0) return memberCache;
const stored = await redis.get(REDIS_KEY);
if (stored) {
const parsed: CWMember[] = JSON.parse(stored);
memberCache = new Collection(parsed.map((m) => [m.identifier, m]));
}
return memberCache;
};
/**
* Resolve CW Identifier to Full Name
*
* Looks up a ConnectWise member by their identifier in the in-memory cache
* and returns their full name. Falls back to the raw identifier if not found.
*
* @param identifier - The CW member identifier (e.g. "jroberts")
* @returns The member's full name (e.g. "John Roberts") or the raw identifier
*/
export const resolveMemberName = (identifier: string): string => {
const member = memberCache.get(identifier);
if (!member) return identifier;
return `${member.firstName} ${member.lastName}`.trim() || identifier;
};
/**
* Resolve CW Identifier to Full Member Info
*
* Looks up a ConnectWise member by their identifier in the in-memory cache
* and cross-references with the local database to return a complete member
* reference including local user ID, CW identifier, full name, and CW member ID.
*
* @param identifier - The CW member identifier (e.g. "jroberts")
* @returns {Promise<ResolvedMember>} Resolved member info
*/
export const resolveMember = async (
identifier: string,
): Promise<ResolvedMember> => {
const cwMember = memberCache.get(identifier);
const name = cwMember
? `${cwMember.firstName} ${cwMember.lastName}`.trim() || identifier
: identifier;
const localUser = await prisma.user.findFirst({
where: { cwIdentifier: identifier },
select: { id: true },
});
return {
id: localUser?.id ?? null,
identifier,
name,
cwMemberId: cwMember?.id ?? null,
};
};
/**
* Resolve Multiple CW Identifiers in a Single Batch
*
* Same as `resolveMember` but batches the DB query so that N identifiers
* require only **one** `findMany` instead of N `findFirst` calls.
*
* @param identifiers - Array of CW member identifiers
* @returns Map of identifier → ResolvedMember
*/
export const resolveMembers = async (
identifiers: string[],
): Promise<Map<string, ResolvedMember>> => {
const unique = [...new Set(identifiers)];
// Single batched DB query for all identifiers
const localUsers = await prisma.user.findMany({
where: { cwIdentifier: { in: unique } },
select: { id: true, cwIdentifier: true },
});
const userMap = new Map(localUsers.map((u) => [u.cwIdentifier, u.id]));
const result = new Map<string, ResolvedMember>();
for (const identifier of unique) {
const cwMember = memberCache.get(identifier);
const name = cwMember
? `${cwMember.firstName} ${cwMember.lastName}`.trim() || identifier
: identifier;
result.set(identifier, {
id: userMap.get(identifier) ?? null,
identifier,
name,
cwMemberId: cwMember?.id ?? null,
});
}
return result;
};
@@ -1,46 +0,0 @@
import { connectWiseApi, prisma } from "../../../constants";
import { events } from "../../globalEvents";
import { fetchAllCwMembers, findCwIdentifierByEmail } from "./fetchAllMembers";
import { setMemberCache } from "./memberCache";
/**
* Refresh CW Identifiers
*
* Fetches all CW members and all users from the database, then updates
* each user's `cwIdentifier` field by matching their email to a CW member's
* `officeEmail`. Only users whose identifier has changed (or was previously
* null) are updated to avoid unnecessary writes.
*
* Also refreshes the in-memory member cache used for name resolution.
*/
export const refreshCwIdentifiers = async () => {
events.emit("cw:members:refresh:started");
const allMembers = await fetchAllCwMembers();
await setMemberCache(allMembers);
const allUsers = await prisma.user.findMany({
select: { id: true, email: true, cwIdentifier: true },
});
let updatedCount = 0;
await Promise.all(
allUsers.map(async (user) => {
const identifier = await findCwIdentifierByEmail(user.email, allMembers);
if (identifier !== user.cwIdentifier) {
await prisma.user.update({
where: { id: user.id },
data: { cwIdentifier: identifier },
});
updatedCount++;
}
}),
);
events.emit("cw:members:refresh:completed", {
totalMembers: allMembers.size,
totalUsers: allUsers.length,
usersUpdated: updatedCount,
});
};
@@ -1,106 +0,0 @@
import { prisma } from "../../../constants";
import { events } from "../../globalEvents";
import { fetchAllCwMembers, type CWMember } from "./fetchAllMembers";
import { setMemberCache } from "./memberCache";
import { CwMemberController } from "../../../controllers/CwMemberController";
/**
* Is Regular User
*
* Returns true if the CW member looks like a real person rather than
* a service account (e.g. "labtech", "Admin"). A regular user must
* have a last name and an email address.
*/
const isRegularUser = (member: CWMember): boolean =>
!member.inactiveFlag &&
Boolean(member.lastName?.trim()) &&
Boolean(member.officeEmail?.trim());
/**
* Refresh CW Members
*
* Syncs local CwMember records with ConnectWise using a stale-check
* pattern:
* 1. Fetch all members from CW
* 2. Filter to regular users (active, non-service accounts)
* 3. Compare against local cwLastUpdated timestamps
* 4. Upsert stale/new records
* 5. Also refreshes the in-memory member cache
*/
export const refreshCwMembers = async () => {
events.emit("cw:members:db:refresh:check");
// 1. Fetch all members from CW
const allCwMembers = await fetchAllCwMembers();
// Also refresh the in-memory cache with ALL members (used for name resolution)
await setMemberCache(allCwMembers);
// 2. Filter to regular users only (active, has last name + email)
const cwMembers = allCwMembers.filter(isRegularUser);
// 2. Fetch all DB records with their identifier and cwLastUpdated
const dbItems = await prisma.cwMember.findMany({
select: { cwMemberId: true, cwLastUpdated: true },
});
const dbMap = new Map(
dbItems.map((item) => [item.cwMemberId, item.cwLastUpdated]),
);
// 3. Determine stale / new IDs
const staleIds: number[] = [];
for (const [, member] of cwMembers) {
const cwLastUpdated = member._info?.lastUpdated
? new Date(member._info.lastUpdated)
: null;
const dbLastUpdated = dbMap.get(member.id) ?? null;
if (!dbLastUpdated || (cwLastUpdated && cwLastUpdated > dbLastUpdated)) {
staleIds.push(member.id);
}
}
if (staleIds.length === 0) {
events.emit("cw:members:db:refresh:skipped", {
totalCw: cwMembers.size,
totalDb: dbItems.length,
staleCount: 0,
});
return;
}
events.emit("cw:members:db:refresh:started", {
totalCw: cwMembers.size,
totalDb: dbItems.length,
staleCount: staleIds.length,
});
// 4. Upsert stale/new items
const staleIdSet = new Set(staleIds);
const updatedCount = (
await Promise.all(
[...cwMembers.values()]
.filter((m) => staleIdSet.has(m.id))
.map(async (member) => {
const mapped = CwMemberController.mapCwToDb(member);
return prisma.cwMember.upsert({
where: { cwMemberId: member.id },
create: {
cwMemberId: member.id,
...mapped,
},
update: mapped,
});
}),
)
).filter(Boolean).length;
events.emit("cw:members:db:refresh:completed", {
totalCw: cwMembers.size,
totalDb: dbItems.length,
staleCount: staleIds.length,
itemsUpdated: updatedCount,
});
};
@@ -1,28 +0,0 @@
import { Collection } from "@discordjs/collection";
import GenericError from "../../../Errors/GenericError";
import { opportunityCw } from "./opportunities";
import { CWOpportunity } from "./opportunity.types";
/**
* Fetch all opportunities from ConnectWise with optional conditions.
*
* @param conditions - Optional CW conditions string for filtering
* @returns A Collection of CW opportunities keyed by their ID
* @throws GenericError if the fetch fails
*/
export const fetchAllOpportunities = async (
conditions?: string,
): Promise<Collection<number, CWOpportunity>> => {
try {
return await opportunityCw.fetchAll(conditions);
} catch (error) {
const errBody = (error as any).response?.data || error;
console.error("Error fetching all opportunities:", errBody);
throw new GenericError({
name: "FetchAllOpportunitiesError",
message: "Failed to fetch opportunities from ConnectWise",
cause: typeof errBody === "string" ? errBody : JSON.stringify(errBody),
status: 502,
});
}
};
@@ -1,31 +0,0 @@
import { Collection } from "@discordjs/collection";
import GenericError from "../../../Errors/GenericError";
import { opportunityCw } from "./opportunities";
import { CWOpportunity } from "./opportunity.types";
/**
* Fetch all opportunities for a specific company from ConnectWise.
*
* @param cwCompanyId - The ConnectWise company ID
* @returns A Collection of CW opportunities for the company keyed by their ID
* @throws GenericError if the fetch fails
*/
export const fetchCompanyOpportunities = async (
cwCompanyId: number,
): Promise<Collection<number, CWOpportunity>> => {
try {
return await opportunityCw.fetchByCompany(cwCompanyId);
} catch (error) {
const errBody = (error as any).response?.data || error;
console.error(
`Error fetching opportunities for company ${cwCompanyId}:`,
errBody,
);
throw new GenericError({
name: "FetchCompanyOpportunitiesError",
message: `Failed to fetch opportunities for company ${cwCompanyId}`,
cause: typeof errBody === "string" ? errBody : JSON.stringify(errBody),
status: 502,
});
}
};
@@ -1,30 +0,0 @@
import GenericError from "../../../Errors/GenericError";
import { opportunityCw } from "./opportunities";
import { CWOpportunity } from "./opportunity.types";
/**
* Fetch a single opportunity by its ConnectWise ID.
*
* @param cwOpportunityId - The ConnectWise opportunity ID
* @returns The full CW opportunity object
* @throws GenericError if the fetch fails
*/
export const fetchOpportunity = async (
cwOpportunityId: number,
): Promise<CWOpportunity> => {
try {
return await opportunityCw.fetch(cwOpportunityId);
} catch (error) {
const errBody = (error as any).response?.data || error;
console.error(
`Error fetching opportunity with ID ${cwOpportunityId}:`,
errBody,
);
throw new GenericError({
name: "FetchOpportunityError",
message: `Failed to fetch opportunity ${cwOpportunityId}`,
cause: typeof errBody === "string" ? errBody : JSON.stringify(errBody),
status: 502,
});
}
};
@@ -1,6 +1,5 @@
import { Collection } from "@discordjs/collection";
import { connectWiseApi } from "../../../constants";
import { runCollector } from "../../collector-client/runCollector";
import {
CWOpportunity,
CWOpportunityCreate,
@@ -500,59 +499,7 @@ export const opportunityCw = {
},
/**
* Fetch All Opportunities from Collector
*
* Fetches opportunities from the dalpuri collector service with full
* opportunity data (relationships, metadata, etc.).
*
* Includes: pipeline, status, type, urgency, interest, owner,
* company, contact, addresses, marketing campaign, and sale dates.
*
* @returns {Promise<unknown[]>} — Raw collector payload of opportunities
* Fetch Opportunities from ConnectWise API
* Depends on pagination from the CW API.
*/
fetchAllOpportunitiesFromCollector: async (): Promise<unknown[]> => {
const startedAt = Date.now();
console.info("[opportunities] Collector fetchOpportunities started");
const payload = await runCollector<unknown[]>("fetchOpportunities", {
include: [
"soPipeline",
"soOppStatus",
"soType",
"soUrgency",
"soInterest",
"ownerLevel",
"company",
"contact",
"companyAddress",
"billingTerms",
"taxCode",
"currency",
"billingUnit",
"contractType",
"pmProject",
"marketingCampaign",
"agrType",
"srService",
"approvedByMember",
"rejectedByMember",
"activities",
"opportunityNotes",
"forecastItems",
"contacts",
],
});
console.info(
`[opportunities] Collector fetchOpportunities received payload in ${Date.now() - startedAt}ms`,
);
if (!Array.isArray(payload)) {
throw new Error("Collector fetchOpportunities payload was not an array");
}
console.info(`[opportunities] Collector payload rows: ${payload.length}`);
return payload;
},
};
@@ -131,6 +131,8 @@ export interface CWForecastItem {
sequenceNumber: number;
subNumber: number;
taxableFlag: boolean;
procurementNotes?: string | null;
productNarrative?: string | null;
customFields?: CWCustomField[];
_info?: Record<string, string>;
}
@@ -281,6 +283,7 @@ export interface CWOpportunityUpdate {
source?: string | null;
locationId?: number;
businessUnitId?: number;
customFields?: CWCustomField[];
}
export interface CWOpportunityCreate {
@@ -1,232 +0,0 @@
import { prisma } from "../../../constants";
import { events } from "../../globalEvents";
import { opportunities } from "../../../managers/opportunities";
import { opportunityCw } from "./opportunities";
import { OpportunityController } from "../../../controllers/OpportunityController";
import { invalidateAllOpportunityCaches } from "../../cache/opportunityCache";
/**
* Refresh Opportunities
*
* **Data-source strategy:**
* 1. Try to fetch from the collector (dalpuri) first
* 2. Fall back to ConnectWise API if collector fails or is unavailable
* 3. Normalize the result and upsert into the database
* 4. Reconcile orphaned items (records in DB but not in CW)
*
* Uses the same stale-check pattern as refreshCatalog:
* 1. Fetch lightweight summaries (id + _info.lastUpdated)
* 2. Compare against local cwLastUpdated timestamps
* 3. Full-fetch only stale/new records
* 4. Upsert stale items, optionally linking to internal Company
*/
export const refreshOpportunities = async (opts?: {
collectorFetch?: () => Promise<unknown[]>;
}) => {
events.emit("cw:opportunities:refresh:check");
// ── Step 1: Try collector first, then fall back to CW ──────────────
let cwSummaries: Map<number, any> = new Map();
let allCwItems: Map<number, any> = new Map();
let useCollector = false;
if (opts?.collectorFetch) {
try {
console.log("[refreshOpportunities] Attempting collector fetch");
const result = await opportunities.refreshOpportunitiesFromCollector({
collectorFetch: opts.collectorFetch,
});
if (result.fromCollector && result.upserted > 0) {
useCollector = true;
console.log(
`[refreshOpportunities] Collector provided ${result.upserted} opportunities`,
);
events.emit("cw:opportunities:refresh:completed", {
totalCw: result.upserted,
totalDb: result.upserted,
staleCount: result.upserted,
itemsUpdated: result.upserted,
orphanedCount: 0,
});
} else if (result.errors?.length) {
console.warn(
`[refreshOpportunities] Collector errors: ${result.errors.join("; ")}`,
);
console.log("[refreshOpportunities] Falling back to ConnectWise API");
}
} catch (err) {
console.warn(
`[refreshOpportunities] Collector fetch exception, falling back to CW: ${err instanceof Error ? err.message : String(err)}`,
);
}
}
// If collector didn't work, use traditional CW fetch
if (!useCollector) {
console.log(
"[refreshOpportunities] Fetching opportunities from ConnectWise",
);
// 1. Fetch lightweight summaries from CW
cwSummaries = await opportunityCw.fetchAllSummaries();
// 4. Full-fetch all opportunities for upserting
allCwItems = await opportunityCw.fetchAll();
}
// ── Step 2: Reconcile orphaned items ─────────────────────────────────
// 2. Fetch all DB items with their cwOpportunityId and cwLastUpdated
const dbItems = await prisma.opportunity.findMany({
select: {
id: true,
cwOpportunityId: true,
cwLastUpdated: true,
cwDateEntered: true,
},
});
const dbMap = new Map(dbItems.map((item) => [item.cwOpportunityId, item]));
if (!useCollector) {
// 3. Determine stale / new IDs (only if we fetched from CW)
const staleIds: number[] = [];
for (const [cwId, summary] of cwSummaries) {
const cwLastUpdated = summary._info?.lastUpdated
? new Date(summary._info.lastUpdated)
: null;
const dbItem = dbMap.get(cwId) ?? null;
const dbLastUpdated = dbItem?.cwLastUpdated ?? null;
// Treat as stale if never synced, CW has newer data, or cwDateEntered is missing (backfill)
if (
!dbLastUpdated ||
(cwLastUpdated && cwLastUpdated > dbLastUpdated) ||
!dbItem?.cwDateEntered
) {
staleIds.push(cwId);
}
}
// 3b. Reconcile — find local records that no longer exist in CW
const orphanedItems = dbItems.filter(
(item) => !cwSummaries.has(item.cwOpportunityId),
);
if (orphanedItems.length > 0) {
console.log(
`[refreshOpportunities] Reconciling ${orphanedItems.length} orphaned local record(s) not found in CW`,
);
await Promise.all(
orphanedItems.map(async (item) => {
await prisma.opportunity.delete({ where: { id: item.id } });
await invalidateAllOpportunityCaches(item.cwOpportunityId);
}),
);
events.emit("cw:opportunities:refresh:reconciled", {
orphanedCount: orphanedItems.length,
removedCwIds: orphanedItems.map((i) => i.cwOpportunityId),
});
}
if (staleIds.length === 0) {
events.emit("cw:opportunities:refresh:skipped", {
totalCw: cwSummaries.size,
totalDb: dbItems.length,
staleCount: 0,
orphanedCount: orphanedItems.length,
});
return;
}
events.emit("cw:opportunities:refresh:started", {
totalCw: cwSummaries.size,
totalDb: dbItems.length,
staleCount: staleIds.length,
});
// 5. Build a company CW ID → internal ID lookup for linking
const companies = await prisma.company.findMany({
select: { id: true, cw_CompanyId: true },
});
const companyMap = new Map(companies.map((c) => [c.cw_CompanyId, c.id]));
// 6. Upsert stale/new items (only if we fetched from CW)
const updatedCount = (
await Promise.all(
staleIds.map(async (cwId) => {
const item = allCwItems.get(cwId);
if (!item) return null;
const mapped = OpportunityController.mapCwToDb(item);
const companyId = item.company?.id
? (companyMap.get(item.company.id) ?? null)
: null;
return prisma.opportunity.upsert({
where: { cwOpportunityId: cwId },
create: {
cwOpportunityId: cwId,
...mapped,
companyId,
},
update: {
...mapped,
companyId,
},
});
}),
)
).filter(Boolean).length;
events.emit("cw:opportunities:refresh:completed", {
totalCw: cwSummaries.size,
totalDb: dbItems.length,
staleCount: staleIds.length,
itemsUpdated: updatedCount,
orphanedCount: orphanedItems.length,
});
} else {
// Collector-based refresh: still reconcile orphaned items but skip stale-check
console.log(
"[refreshOpportunities] Collector-based refresh: skipping stale-check, performing orphan reconciliation",
);
// Fetch list of CW opp IDs from cache or a quick count
// For now, reconcile only items older than a threshold
const twentyFourHoursAgo = new Date(Date.now() - 24 * 60 * 60 * 1000);
const orphanedItems = dbItems.filter((item) => {
const lastUpdated = item.cwLastUpdated ?? item.cwDateEntered;
// Only reconcile items that were last updated more than a day ago
return lastUpdated && lastUpdated < twentyFourHoursAgo;
});
if (orphanedItems.length > 0) {
console.log(
`[refreshOpportunities] Collector reconciling ${orphanedItems.length} stale orphan record(s)`,
);
await Promise.all(
orphanedItems.map(async (item) => {
await prisma.opportunity.delete({ where: { id: item.id } });
await invalidateAllOpportunityCaches(item.cwOpportunityId);
}),
);
events.emit("cw:opportunities:refresh:reconciled", {
orphanedCount: orphanedItems.length,
removedCwIds: orphanedItems.map((i) => i.cwOpportunityId),
});
}
events.emit("cw:opportunities:refresh:completed", {
totalCw: dbItems.length,
totalDb: dbItems.length,
staleCount: 0,
itemsUpdated: 0,
orphanedCount: orphanedItems.length,
});
}
};
@@ -1,11 +1,6 @@
import { Collection } from "@discordjs/collection";
import { connectWiseApi } from "../../../constants";
import { runCollector } from "../../collector-client/runCollector";
import { CatalogItem } from "./catalog.types.ts";
import {
normalizeCollectorProducts,
NormalizedCatalogCollectorItem,
} from "./catalogCollectorTranslation";
export interface CatalogSummary {
id: number;
@@ -18,40 +13,6 @@ export interface InventoryEntry {
}
export const catalogCw = {
fetchAllProductsFromCollector: async (): Promise<
Collection<number, NormalizedCatalogCollectorItem>
> => {
const startedAt = Date.now();
console.info("[catalog-refresh] Collector fetchProducts started");
const payload = await runCollector<unknown[]>("fetchProducts", {
include: ["subcategory", "manufacturer", "inventory", "itemVendors"],
});
console.info(
`[catalog-refresh] Collector fetchProducts received payload in ${Date.now() - startedAt}ms`,
);
if (!Array.isArray(payload)) {
throw new Error("Collector payload was not an array");
}
console.info(`[catalog-refresh] Collector payload rows: ${payload.length}`);
const normalizeStartedAt = Date.now();
const normalized = normalizeCollectorProducts(payload);
console.info(
`[catalog-refresh] Collector normalization completed in ${Date.now() - normalizeStartedAt}ms (${normalized.size} valid rows)`,
);
if (normalized.size === 0) {
throw new Error(
"Collector payload did not contain valid product records",
);
}
return new Collection<number, NormalizedCatalogCollectorItem>(normalized);
},
countItems: async (): Promise<number> => {
const response = await connectWiseApi.get("/procurement/catalog/count");
return response.data.count;
@@ -1,281 +0,0 @@
/**
* Catalog Collector Translation
*
* Maps products from the collector (dalpuri) fetchProducts schema
* to the internal database schema for normalization and storage.
*/
type NumberLike = number | string | null | undefined;
interface CollectorReference {
recId?: number;
description?: string;
name?: string;
}
interface CollectorVendorReference {
recId?: number;
identifier?: string;
description?: string;
name?: string;
}
interface CollectorInventory {
onHand?: NumberLike;
}
interface CollectorItemVendor {
vendor?: CollectorVendorReference | null;
}
/**
* Raw collector product shape from fetchProducts.
* This matches the MSSQL source structure with appropriate field names.
*/
export interface CollectorProduct {
// Current collector fields
catalogRecId?: number;
itemId?: string;
description?: string;
longDescription?: string | null;
notes?: string | null;
categoryRecId?: NumberLike;
category?: CollectorReference | null;
subcategoryRecId?: NumberLike;
subcategory?: CollectorReference | null;
manufacturerRecId?: NumberLike;
manufacturerPartNum?: string | null;
manufacturer?: CollectorReference | null;
vendorRecId?: NumberLike;
vendorSku?: string | null;
itemVendors?: CollectorItemVendor[] | null;
listPrice?: NumberLike;
currentCost?: NumberLike;
inventory?: CollectorInventory[] | null;
inactiveFlag?: boolean;
taxableFlag?: boolean;
lastUpdatedUtc?: string | null;
lastUpdate?: string | null;
dateEnteredUtc?: string | null;
// Legacy collector fields retained for backward compatibility
pId?: number;
pIdentifier?: string;
pNumber?: string;
pName?: string;
pDescription?: string;
pCustomerDescription?: string;
pInternalNotes?: string;
pCategory?: CollectorReference | null;
pSubcategory?: CollectorReference | null;
pManufacturer?: CollectorReference | null;
pManufacturerPartNumber?: string;
pVendor?: CollectorVendorReference | null;
pVendorSku?: string;
pPrice?: NumberLike;
pCost?: NumberLike;
pOnHand?: NumberLike;
pInactive?: boolean;
pSalesTaxable?: boolean;
_info?: { lastUpdated?: string; dateEntered?: string };
}
/**
* Normalized product shape that maps to the DB CatalogItem table.
* Output from normalizeCollectorProduct().
*/
export interface NormalizedCatalogCollectorItem {
// Core identifiers (required for upsert)
cwCatalogId: number;
// Basic info
identifier: string;
name: string;
description: string | null;
customerDescription: string | null;
internalNotes: string | null;
// Categorization
category: string | null;
categoryCwId: number | null;
subcategory: string | null;
subcategoryCwId: number | null;
// Vendor / Manufacturer
manufacturer: string | null;
manufactureCwId: number | null;
partNumber: string | null;
vendorName: string | null;
vendorSku: string | null;
vendorCwId: number | null;
// Pricing & Inventory
price: number;
cost: number;
onHand: number;
inactive: boolean;
salesTaxable: boolean;
// Metadata
cwLastUpdated: Date | null;
cwDateEntered: Date | null;
}
/**
* Helper: parse a date string to Date or return null.
*/
const parseDate = (dateString: string | null | undefined): Date | null => {
if (!dateString) return null;
try {
const d = new Date(dateString);
return isNaN(d.getTime()) ? null : d;
} catch {
return null;
}
};
const parseNumber = (value: NumberLike): number | null => {
if (typeof value === "number" && Number.isFinite(value)) return value;
if (typeof value === "string" && value.trim().length > 0) {
const parsed = Number(value);
if (Number.isFinite(parsed)) return parsed;
}
return null;
};
/**
* Helper: extract ID from nested object.
*/
const getId = (obj: unknown): number | null => {
if (!obj || typeof obj !== "object") return null;
const id = (obj as Record<string, unknown>).recId;
const parsed = parseNumber(id as NumberLike);
if (!parsed) return null;
return parsed > 0 ? parsed : null;
};
/**
* Helper: extract description/name from nested object.
*/
const getName = (obj: unknown, fallback = ""): string => {
if (!obj || typeof obj !== "object") return fallback;
const source = obj as Record<string, unknown>;
const desc = source.description;
const name = source.name;
const candidate = typeof desc === "string" ? desc : name;
const result = typeof candidate === "string" ? candidate : fallback;
return result || fallback;
};
const getVendorName = (item: CollectorProduct): string | null => {
const directVendor = getName(item.pVendor ?? null);
if (directVendor) return directVendor;
const firstVendor = item.itemVendors?.[0]?.vendor ?? null;
const nestedVendor = getName(firstVendor);
if (nestedVendor) return nestedVendor;
return null;
};
const getOnHand = (item: CollectorProduct): number => {
const inventory = item.inventory;
if (Array.isArray(inventory) && inventory.length > 0) {
return inventory.reduce((sum, entry) => {
const onHand = parseNumber(entry?.onHand);
return sum + (onHand ?? 0);
}, 0);
}
return parseNumber(item.pOnHand) ?? 0;
};
/**
* Normalize a collector product into the internal DB schema.
*
* Handles field mapping, type conversions, and null coercion.
*/
export const normalizeCollectorProduct = (
item: CollectorProduct,
): NormalizedCatalogCollectorItem => {
const cwCatalogId =
parseNumber(item.catalogRecId) ?? parseNumber(item.pId) ?? 0;
if (cwCatalogId <= 0) {
throw new Error("Collector product missing catalogRecId");
}
// Build normalized object mapping collector fields to DB schema
return {
cwCatalogId,
// Basic info
identifier: item.itemId ?? item.pIdentifier ?? item.pNumber ?? `product_${cwCatalogId}`,
name: item.description ?? item.pName ?? "",
description: item.longDescription ?? item.pDescription ?? null,
customerDescription: item.pCustomerDescription ?? null,
internalNotes: item.notes ?? item.pInternalNotes ?? null,
// Categorization
category: getName(item.category ?? item.pCategory),
categoryCwId:
parseNumber(item.categoryRecId) ?? getId(item.category ?? item.pCategory),
subcategory: getName(item.subcategory ?? item.pSubcategory),
subcategoryCwId:
parseNumber(item.subcategoryRecId) ??
getId(item.subcategory ?? item.pSubcategory),
// Vendor / Manufacturer
manufacturer: getName(item.manufacturer ?? item.pManufacturer),
manufactureCwId:
parseNumber(item.manufacturerRecId) ??
getId(item.manufacturer ?? item.pManufacturer),
partNumber: item.manufacturerPartNum ?? item.pManufacturerPartNumber ?? null,
vendorName: getVendorName(item),
vendorSku: item.vendorSku ?? item.pVendorSku ?? null,
vendorCwId:
parseNumber(item.vendorRecId) ??
getId(item.itemVendors?.[0]?.vendor ?? item.pVendor),
// Pricing & Inventory
price: parseNumber(item.listPrice) ?? parseNumber(item.pPrice) ?? 0,
cost: parseNumber(item.currentCost) ?? parseNumber(item.pCost) ?? 0,
onHand: getOnHand(item),
inactive: item.inactiveFlag ?? item.pInactive ?? false,
salesTaxable: item.taxableFlag ?? item.pSalesTaxable ?? false,
// Metadata
cwLastUpdated: parseDate(
item.lastUpdatedUtc ?? item.lastUpdate ?? item._info?.lastUpdated,
),
cwDateEntered: parseDate(item.dateEnteredUtc ?? item._info?.dateEntered),
};
};
/**
* Normalize a collection of products from the collector.
*/
export const normalizeCollectorProducts = (
items: unknown[],
): Map<number, NormalizedCatalogCollectorItem> => {
const normalized = new Map<number, NormalizedCatalogCollectorItem>();
for (const item of items) {
try {
const norm = normalizeCollectorProduct(item as CollectorProduct);
normalized.set(norm.cwCatalogId, norm);
} catch (err) {
console.warn(
`[catalogCollectorTranslation] Failed to normalize item: ${err instanceof Error ? err.message : String(err)}`,
);
}
}
return normalized;
};
@@ -1,469 +0,0 @@
import { prisma, redis, connectWiseApi } from "../../../constants";
import { withCwRetry } from "../withCwRetry";
import { catalogCw } from "./catalog";
import { CatalogItem } from "./catalog.types";
type JsonObject = Record<string, unknown>;
type TrackedProduct = {
cwCatalogId: number;
product: string;
onHand: string;
inventory: string;
key: string;
};
type AdjustmentSnapshot = {
key: string;
trackedRows: TrackedProduct[];
signature: string;
};
const ADJUSTMENTS_ENDPOINT = "/procurement/adjustments?pageSize=1000";
const CATALOG_ITEM_CACHE_PREFIX = "catalog:item:cw:";
const CATALOG_ITEM_CACHE_TTL_SECONDS = 20 * 60;
const MAX_SYNC_PER_CYCLE = Number(
process.env.CW_ADJUSTMENT_SYNC_MAX_PER_CYCLE ?? "50",
);
const SYNC_COOLDOWN_MS = Number(
process.env.CW_ADJUSTMENT_SYNC_COOLDOWN_MS ?? `${10 * 60 * 1000}`,
);
let previous = new Map<string, AdjustmentSnapshot>();
let previousProductState = new Map<number, string>();
const lastSyncedAt = new Map<number, number>();
let inFlight = false;
const isObject = (value: unknown): value is JsonObject =>
typeof value === "object" && value !== null && !Array.isArray(value);
const toObject = (value: unknown): JsonObject => {
if (!isObject(value)) return {};
return value;
};
const stableStringify = (value: unknown): string => {
if (Array.isArray(value)) {
const entries = value.map((entry) => stableStringify(entry)).sort();
return `[${entries.join(",")}]`;
}
if (isObject(value)) {
const keys = Object.keys(value).sort();
const pairs = keys.map(
(key) => `${JSON.stringify(key)}:${stableStringify(value[key])}`,
);
return `{${pairs.join(",")}}`;
}
return JSON.stringify(value);
};
const readPathValue = (obj: JsonObject, path: string): unknown => {
const parts = path.split(".");
let current: unknown = obj;
for (const part of parts) {
if (!isObject(current)) return null;
current = current[part];
}
return current;
};
const firstValue = (obj: JsonObject, paths: string[]): unknown => {
for (const path of paths) {
const value = readPathValue(obj, path);
if (value === null || value === undefined || value === "") continue;
return value;
}
return null;
};
const asNumber = (value: unknown): number | null => {
if (typeof value === "number" && Number.isFinite(value)) return value;
if (typeof value === "string" && value.length > 0) {
const parsed = Number(value);
if (Number.isFinite(parsed)) return parsed;
}
return null;
};
const asText = (value: unknown): string => {
if (value === null || value === undefined || value === "") return "-";
if (
typeof value === "string" ||
typeof value === "number" ||
typeof value === "boolean"
) {
return String(value);
}
if (Array.isArray(value)) {
return `[${value.map((entry) => asText(entry)).join(",")}]`;
}
if (!isObject(value)) return String(value);
const preferredFields = ["name", "identifier", "id", "code", "value"];
for (const field of preferredFields) {
const fieldValue = readPathValue(value, field);
if (fieldValue === null || fieldValue === undefined || fieldValue === "")
continue;
if (typeof fieldValue === "object") continue;
return String(fieldValue);
}
return stableStringify(value);
};
const adjustmentKey = (adjustment: JsonObject): string => {
const keyPaths = [
"id",
"adjustmentId",
"procurementAdjustmentId",
"recordId",
"recId",
"_info.id",
"_info.href",
];
for (const path of keyPaths) {
const key = firstValue(adjustment, [path]);
const keyText = asText(key);
if (keyText !== "-") return keyText;
}
return `anon:${stableStringify(adjustment)}`;
};
const trackedRow = (detail: JsonObject): TrackedProduct | null => {
const cwCatalogId = asNumber(
firstValue(detail, [
"catalogItem.id",
"catalogItemId",
"catalog.id",
"catalogId",
"item.id",
"itemId",
"product.id",
"productId",
"id",
]),
);
if (!cwCatalogId) return null;
const onHand = asText(
firstValue(detail, [
"onHand",
"onHandQty",
"onHandQuantity",
"qtyOnHand",
"quantityOnHand",
"quantity.onHand",
]),
);
const inventory = asText(
firstValue(detail, [
"inventory",
"inventoryQty",
"inventoryLevel",
"quantity",
"qty",
]),
);
if (onHand === "-" && inventory === "-") return null;
const product = asText(
firstValue(detail, [
"product.name",
"product.identifier",
"item.name",
"item.identifier",
"catalogItem.name",
"catalogItem.identifier",
"productName",
"productIdentifier",
"sku",
"identifier",
]),
);
return {
cwCatalogId,
product,
onHand,
inventory,
key: `${cwCatalogId}|${product}|${onHand}|${inventory}`,
};
};
const trackedRows = (adjustment: JsonObject): TrackedProduct[] => {
const detailCandidates = [
readPathValue(adjustment, "adjustmentDetails"),
readPathValue(adjustment, "details"),
readPathValue(adjustment, "lineItems"),
];
for (const candidate of detailCandidates) {
if (!Array.isArray(candidate)) continue;
const rows = candidate
.map((entry) => trackedRow(toObject(entry)))
.filter((entry): entry is TrackedProduct => entry !== null)
.sort((a, b) => a.key.localeCompare(b.key));
if (rows.length > 0) return rows;
}
const root = trackedRow(adjustment);
if (!root) return [];
return [root];
};
const snapshot = (rows: unknown[]): Map<string, AdjustmentSnapshot> => {
const out = new Map<string, AdjustmentSnapshot>();
for (const entry of rows) {
const adjustment = toObject(entry);
const key = adjustmentKey(adjustment);
const rowsTracked = trackedRows(adjustment);
const signature = stableStringify(rowsTracked);
out.set(key, {
key,
trackedRows: rowsTracked,
signature,
});
}
return out;
};
const changedCatalogIds = (
before: Map<number, string>,
after: Map<number, string>,
): Set<number> => {
const changed = new Set<number>();
for (const [cwCatalogId, nextSignature] of after) {
const prevSignature = before.get(cwCatalogId);
if (!prevSignature) {
changed.add(cwCatalogId);
continue;
}
if (prevSignature === nextSignature) continue;
changed.add(cwCatalogId);
}
return changed;
};
const productState = (
adjustments: Map<string, AdjustmentSnapshot>,
): Map<number, string> => {
const grouped = new Map<number, Set<string>>();
for (const snapshot of adjustments.values()) {
for (const row of snapshot.trackedRows) {
const rows = grouped.get(row.cwCatalogId) ?? new Set<string>();
rows.add(row.key);
grouped.set(row.cwCatalogId, rows);
}
}
const state = new Map<number, string>();
for (const [cwCatalogId, rows] of grouped) {
state.set(cwCatalogId, stableStringify([...rows].sort()));
}
return state;
};
const applySyncGuards = (ids: number[]): number[] => {
const now = Date.now();
const cooledIds = ids.filter((cwCatalogId) => {
const last = lastSyncedAt.get(cwCatalogId);
if (!last) return true;
return now - last >= SYNC_COOLDOWN_MS;
});
if (cooledIds.length <= MAX_SYNC_PER_CYCLE) return cooledIds;
return cooledIds.slice(0, MAX_SYNC_PER_CYCLE);
};
const fetchAdjustments = async (): Promise<unknown[]> => {
const response = await withCwRetry(
() => connectWiseApi.get(ADJUSTMENTS_ENDPOINT),
{
label: "inventory-adjustments",
maxAttempts: 3,
},
);
const payload = response.data;
if (Array.isArray(payload)) return payload;
if (isObject(payload) && Array.isArray(payload.data)) return payload.data;
return [];
};
const cacheKey = (cwCatalogId: number) =>
`${CATALOG_ITEM_CACHE_PREFIX}${cwCatalogId}`;
const cwLastUpdated = (item: CatalogItem): Date => {
const value = item._info?.lastUpdated;
if (!value) return new Date();
const parsed = new Date(value);
const invalidDate = Number.isNaN(parsed.getTime());
if (invalidDate) return new Date();
return parsed;
};
const syncCatalogItem = async (cwCatalogId: number): Promise<boolean> => {
try {
const item = await withCwRetry(
() => catalogCw.fetchByCatalogId(cwCatalogId),
{
label: `catalog-item:${cwCatalogId}`,
maxAttempts: 3,
},
);
const onHand = await withCwRetry(
() => catalogCw.fetchInventoryOnHand(cwCatalogId),
{
label: `catalog-onhand:${cwCatalogId}`,
maxAttempts: 3,
},
);
const persisted = await prisma.catalogItem.upsert({
where: { cwCatalogId },
create: {
cwCatalogId,
identifier: item.identifier,
name: item.description,
description: item.description,
customerDescription: item.customerDescription,
internalNotes: item.notes,
category: item.category?.name,
categoryCwId: item.category?.id,
subcategory: item.subcategory?.name,
subcategoryCwId: item.subcategory?.id,
manufacturer: item.manufacturer?.name,
manufactureCwId: item.manufacturer?.id,
partNumber: item.manufacturerPartNumber,
vendorName: item.vendor?.name,
vendorSku: item.vendorSku,
vendorCwId: item.vendor?.id,
price: item.price,
cost: item.cost,
inactive: item.inactiveFlag,
salesTaxable: item.taxableFlag,
onHand,
cwLastUpdated: cwLastUpdated(item),
},
update: {
identifier: item.identifier,
name: item.description,
description: item.description,
customerDescription: item.customerDescription,
internalNotes: item.notes,
category: item.category?.name,
categoryCwId: item.category?.id,
subcategory: item.subcategory?.name,
subcategoryCwId: item.subcategory?.id,
manufacturer: item.manufacturer?.name,
manufactureCwId: item.manufacturer?.id,
partNumber: item.manufacturerPartNumber,
vendorName: item.vendor?.name,
vendorSku: item.vendorSku,
vendorCwId: item.vendor?.id,
price: item.price,
cost: item.cost,
inactive: item.inactiveFlag,
salesTaxable: item.taxableFlag,
onHand,
cwLastUpdated: cwLastUpdated(item),
},
});
await redis.set(
cacheKey(cwCatalogId),
JSON.stringify({
cwCatalogId,
onHand,
cwItem: item,
dbItem: persisted,
syncedAt: new Date().toISOString(),
}),
"EX",
CATALOG_ITEM_CACHE_TTL_SECONDS,
);
return true;
} catch (err) {
console.error(
`[inventory-adjustments] failed to sync catalog item ${cwCatalogId}`,
err,
);
return false;
}
};
export const listenInventoryAdjustments = async (): Promise<void> => {
if (inFlight) return;
inFlight = true;
try {
const rows = await fetchAdjustments();
const current = snapshot(rows);
const currentProductState = productState(current);
if (previous.size === 0) {
previous = current;
previousProductState = currentProductState;
console.log(
`[inventory-adjustments] baseline captured (${current.size} adjustments, ${currentProductState.size} products)`,
);
return;
}
const changedIds = [
...changedCatalogIds(previousProductState, currentProductState),
].sort((a, b) => a - b);
const guardedIds = applySyncGuards(changedIds);
previous = current;
previousProductState = currentProductState;
if (guardedIds.length === 0) return;
let successCount = 0;
for (const cwCatalogId of guardedIds) {
const ok = await syncCatalogItem(cwCatalogId);
if (!ok) continue;
lastSyncedAt.set(cwCatalogId, Date.now());
successCount += 1;
}
const skippedByCooldown = changedIds.length - guardedIds.length;
console.log(
`[inventory-adjustments] inventory changed for ${changedIds.length} products, queued ${guardedIds.length}, synced ${successCount}, cooldown/cap skipped ${skippedByCooldown}`,
);
} catch (err) {
console.error("[inventory-adjustments] listener failed", err);
} finally {
inFlight = false;
}
};
@@ -1,429 +0,0 @@
import { prisma } from "../../../constants";
import { events } from "../../globalEvents";
import { catalogCw } from "./catalog";
import { NormalizedCatalogCollectorItem } from "./catalogCollectorTranslation";
const CONCURRENCY = 6;
const BATCH_DELAY_MS = 250;
const UPSERT_BATCH_SIZE = 50;
const MAX_ROW_ERROR_LOGS = 10;
const sleep = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms));
const truncate = (value: string, max = 400): string =>
value.length <= max ? value : `${value.slice(0, max - 3)}...`;
const summarizePrismaError = (err: unknown): string => {
const e = err as Record<string, unknown>;
const code = typeof e?.code === "string" && e.code.length > 0 ? e.code : null;
const message =
typeof e?.message === "string" && e.message.length > 0
? e.message
: String(err);
const clientVersion =
typeof e?.clientVersion === "string" ? e.clientVersion : null;
const meta =
e?.meta && typeof e.meta === "object"
? (e.meta as Record<string, unknown>)
: null;
const modelName = typeof meta?.modelName === "string" ? meta.modelName : null;
const targetRaw = meta?.target;
const target = Array.isArray(targetRaw)
? targetRaw.join(",")
: typeof targetRaw === "string"
? targetRaw
: null;
const cause = typeof meta?.cause === "string" ? meta.cause : null;
const lines: string[] = [
code ? `Prisma ${code}: ${truncate(message, 600)}` : truncate(message, 600),
];
if (modelName) lines.push(`model=${modelName}`);
if (target) lines.push(`target=${target}`);
if (cause) lines.push(`cause=${truncate(cause, 400)}`);
if (clientVersion) lines.push(`clientVersion=${clientVersion}`);
return lines.join(" | ");
};
const createCatalogErrorLogger = () => {
let loggedRowErrors = 0;
return (context: string, err: unknown, detail?: Record<string, unknown>) => {
if (loggedRowErrors >= MAX_ROW_ERROR_LOGS) return;
const detailStr = detail ? JSON.stringify(detail) : "";
const payload = detailStr ? ` | detail=${truncate(detailStr, 120)}` : "";
console.error(
`[catalog-refresh] ${context}: ${summarizePrismaError(err)}${payload}`,
);
loggedRowErrors += 1;
if (loggedRowErrors === MAX_ROW_ERROR_LOGS) {
console.error(
`[catalog-refresh] Reached ${MAX_ROW_ERROR_LOGS} row-level error logs; suppressing additional row errors for this refresh cycle`,
);
}
};
};
const runSlowParallel = async (
tasks: Array<() => Promise<void>>,
): Promise<number> => {
let failureCount = 0;
for (let i = 0; i < tasks.length; i += CONCURRENCY) {
const batch = tasks.slice(i, i + CONCURRENCY);
const results = await Promise.allSettled(batch.map((task) => task()));
for (const result of results) {
if (result.status === "rejected") failureCount += 1;
}
if (i + CONCURRENCY >= tasks.length) continue;
await sleep(BATCH_DELAY_MS);
}
return failureCount;
};
const runBatchUpserts = async (
tasks: Array<() => Promise<void>>,
): Promise<number> => {
let failureCount = 0;
for (let i = 0; i < tasks.length; i += UPSERT_BATCH_SIZE) {
const batch = tasks.slice(i, i + UPSERT_BATCH_SIZE);
const results = await Promise.allSettled(batch.map((task) => task()));
for (const result of results) {
if (result.status === "rejected") {
failureCount += 1;
}
}
}
return failureCount;
};
export const refreshCatalog = async () => {
const refreshStartedAt = Date.now();
const logCatalogError = createCatalogErrorLogger();
events.emit("cw:catalog:refresh:check");
console.info("[catalog-refresh] Refresh cycle started");
try {
console.info(
"[catalog-refresh] Attempting collector-first catalog refresh",
);
const collectorItems = await catalogCw.fetchAllProductsFromCollector();
console.info(
`[catalog-refresh] Collector returned ${collectorItems.size} products`,
);
events.emit("cw:catalog:refresh:started", {
totalCw: collectorItems.size,
totalDb: null,
staleCount: collectorItems.size,
});
const upsertStartedAt = Date.now();
const updatedCount = await upsertCollectorItems(
collectorItems,
logCatalogError,
);
console.info(
`[catalog-refresh] Collector upserts completed in ${Date.now() - upsertStartedAt}ms (${updatedCount} rows updated)`,
);
events.emit("cw:catalog:refresh:completed", {
totalCw: collectorItems.size,
totalDb: collectorItems.size,
staleCount: collectorItems.size,
itemsUpdated: updatedCount,
});
console.info(
`[catalog-refresh] Refresh cycle completed via collector in ${Date.now() - refreshStartedAt}ms`,
);
return;
} catch (err) {
console.warn(
`[catalog-refresh] Collector fetchProducts failed, falling back to CW API: ${err instanceof Error ? err.message : String(err)}`,
);
}
const fallbackStartedAt = Date.now();
console.info("[catalog-refresh] Starting CW fallback catalog refresh");
try {
await refreshCatalogFromCw(logCatalogError);
} catch (err) {
console.error(
`[catalog-refresh] CW fallback failed: ${summarizePrismaError(err)}`,
);
throw err;
}
console.info(
`[catalog-refresh] CW fallback refresh completed in ${Date.now() - fallbackStartedAt}ms (${Date.now() - refreshStartedAt}ms total cycle)`,
);
};
const upsertCollectorItems = async (
collectorItems: Map<number, NormalizedCatalogCollectorItem>,
logCatalogError: (
context: string,
err: unknown,
detail?: Record<string, unknown>,
) => void,
): Promise<number> => {
let updatedCount = 0;
const totalItems = collectorItems.size;
let processedCount = 0;
const upsertTasks: Array<() => Promise<void>> = [];
for (const item of collectorItems.values()) {
upsertTasks.push(async () => {
try {
await prisma.catalogItem.upsert({
where: { cwCatalogId: item.cwCatalogId },
create: {
cwCatalogId: item.cwCatalogId,
identifier: item.identifier,
name: item.name,
description: item.description,
customerDescription: item.customerDescription,
internalNotes: item.internalNotes,
category: item.category,
categoryCwId: item.categoryCwId,
subcategory: item.subcategory,
subcategoryCwId: item.subcategoryCwId,
manufacturer: item.manufacturer,
manufactureCwId: item.manufactureCwId,
partNumber: item.partNumber,
vendorName: item.vendorName,
vendorSku: item.vendorSku,
vendorCwId: item.vendorCwId,
price: item.price,
cost: item.cost,
inactive: item.inactive,
salesTaxable: item.salesTaxable,
onHand: item.onHand,
cwLastUpdated: item.cwLastUpdated,
},
update: {
identifier: item.identifier,
name: item.name,
description: item.description,
customerDescription: item.customerDescription,
internalNotes: item.internalNotes,
category: item.category,
categoryCwId: item.categoryCwId,
subcategory: item.subcategory,
subcategoryCwId: item.subcategoryCwId,
manufacturer: item.manufacturer,
manufactureCwId: item.manufactureCwId,
partNumber: item.partNumber,
vendorName: item.vendorName,
vendorSku: item.vendorSku,
vendorCwId: item.vendorCwId,
price: item.price,
cost: item.cost,
inactive: item.inactive,
salesTaxable: item.salesTaxable,
onHand: item.onHand,
cwLastUpdated: item.cwLastUpdated,
},
});
} catch (err) {
logCatalogError("collector upsert failed", err, {
id: item.cwCatalogId,
});
throw err;
}
updatedCount += 1;
processedCount += 1;
const shouldLogProgress =
processedCount <= 5 ||
processedCount % 250 === 0 ||
processedCount === totalItems;
if (shouldLogProgress) {
console.info(
`[catalog-refresh] Collector upsert progress: ${processedCount}/${totalItems}`,
);
}
});
}
const upsertFailures = await runBatchUpserts(upsertTasks);
if (upsertFailures > 0) {
console.warn(
`[catalog-refresh] ${upsertFailures} collector upsert task(s) failed; remaining items will retry next cycle`,
);
}
return updatedCount;
};
const refreshCatalogFromCw = async (
logCatalogError: (
context: string,
err: unknown,
detail?: Record<string, unknown>,
) => void,
) => {
// 1. Fetch lightweight summaries from CW (id + _info with lastUpdated)
const cwSummaries = await catalogCw.fetchAllCatalogSummary();
// 2. Fetch all DB items with their cwCatalogId and cwLastUpdated
const dbItems = await prisma.catalogItem.findMany({
select: { cwCatalogId: true, cwLastUpdated: true },
});
const dbMap = new Map(
dbItems.map((item) => [item.cwCatalogId, item.cwLastUpdated]),
);
// 3. Compare CW lastUpdated vs DB cwLastUpdated — collect IDs that are stale or new
const staleIds: number[] = [];
for (const [cwId, summary] of cwSummaries) {
const cwLastUpdated = summary._info?.lastUpdated
? new Date(summary._info.lastUpdated)
: null;
const dbLastUpdated = dbMap.get(cwId) ?? null;
// New item (not in DB) or CW has a newer timestamp
if (!dbLastUpdated || (cwLastUpdated && cwLastUpdated > dbLastUpdated)) {
staleIds.push(cwId);
}
}
if (staleIds.length === 0) {
events.emit("cw:catalog:refresh:skipped", {
totalCw: cwSummaries.size,
totalDb: dbItems.length,
staleCount: 0,
});
return;
}
events.emit("cw:catalog:refresh:started", {
totalCw: cwSummaries.size,
totalDb: dbItems.length,
staleCount: staleIds.length,
});
// 4. Fetch full CW item data for stale IDs using slow, bounded concurrency
const cwItemMap = new Map<number, any>();
const itemFetchTasks: Array<() => Promise<void>> = staleIds.map(
(cwId) => async () => {
const item = await catalogCw.fetchByCatalogId(cwId);
cwItemMap.set(cwId, item);
},
);
const itemFetchFailures = await runSlowParallel(itemFetchTasks);
// 5. Fetch inventory onHand for stale IDs using the same slow parallel strategy
const onHandMap = new Map<number, number>();
const inventoryTasks: Array<() => Promise<void>> = staleIds.map(
(cwId) => async () => {
try {
const onHand = await catalogCw.fetchInventoryOnHand(cwId);
onHandMap.set(cwId, onHand);
} catch {
onHandMap.set(cwId, 0);
}
},
);
const inventoryFailures = await runSlowParallel(inventoryTasks);
// 6. Upsert stale/new items with bounded slow parallel execution
let updatedCount = 0;
const upsertTasks: Array<() => Promise<void>> = staleIds.map(
(cwId) => async () => {
const item = cwItemMap.get(cwId);
if (!item) return;
const cwLastUpdated = item._info?.lastUpdated
? new Date(item._info.lastUpdated)
: new Date();
const onHand = onHandMap.get(cwId) ?? 0;
try {
await prisma.catalogItem.upsert({
where: { cwCatalogId: cwId },
create: {
cwCatalogId: cwId,
identifier: item.identifier,
name: item.description,
description: item.description,
customerDescription: item.customerDescription,
internalNotes: item.notes,
category: item.category?.name,
categoryCwId: item.category?.id,
subcategory: item.subcategory?.name,
subcategoryCwId: item.subcategory?.id,
manufacturer: item.manufacturer?.name,
manufactureCwId: item.manufacturer?.id,
partNumber: item.manufacturerPartNumber,
vendorName: item.vendor?.name,
vendorSku: item.vendorSku,
vendorCwId: item.vendor?.id,
price: item.price,
cost: item.cost,
inactive: item.inactiveFlag,
salesTaxable: item.taxableFlag,
onHand,
cwLastUpdated,
},
update: {
name: item.description,
identifier: item.identifier,
description: item.description,
customerDescription: item.customerDescription,
internalNotes: item.notes,
category: item.category?.name,
categoryCwId: item.category?.id,
subcategory: item.subcategory?.name,
subcategoryCwId: item.subcategory?.id,
manufacturer: item.manufacturer?.name,
manufactureCwId: item.manufacturer?.id,
partNumber: item.manufacturerPartNumber,
vendorName: item.vendor?.name,
vendorSku: item.vendorSku,
vendorCwId: item.vendor?.id,
price: item.price,
cost: item.cost,
inactive: item.inactiveFlag,
salesTaxable: item.taxableFlag,
onHand,
cwLastUpdated,
},
});
} catch (err) {
logCatalogError("CW fallback upsert failed", err, {
id: cwId,
});
throw err;
}
updatedCount += 1;
},
);
const upsertFailures = await runBatchUpserts(upsertTasks);
const failedTasks = itemFetchFailures + inventoryFailures + upsertFailures;
if (failedTasks > 0) {
console.warn(
`[catalog-refresh] ${failedTasks} slow-parallel task(s) failed; remaining items will retry next cycle`,
);
}
events.emit("cw:catalog:refresh:completed", {
totalCw: cwSummaries.size,
totalDb: dbItems.length,
staleCount: staleIds.length,
itemsUpdated: updatedCount,
});
};
@@ -1,92 +0,0 @@
import { prisma } from "../../../constants";
import { events } from "../../globalEvents";
import { catalogCw } from "./catalog";
const CONCURRENCY = 6;
const BATCH_DELAY_MS = 250;
const sleep = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms));
export const refreshInventory = async () => {
events.emit("cw:inventory:refresh:check");
// 1. Get all active catalog items from DB
const dbItems = await prisma.catalogItem.findMany({
where: { inactive: false },
select: { cwCatalogId: true, onHand: true },
});
if (dbItems.length === 0) {
events.emit("cw:inventory:refresh:skipped", {
totalItems: 0,
updatedCount: 0,
});
return;
}
events.emit("cw:inventory:refresh:started", {
totalItems: dbItems.length,
});
// 2. Slow-parallel fetch inventory onHand for all items
const onHandMap = new Map<number, number>();
let failedCount = 0;
for (let i = 0; i < dbItems.length; i += CONCURRENCY) {
const batch = dbItems.slice(i, i + CONCURRENCY);
const results = await Promise.allSettled(
batch.map(async (item) => {
try {
const onHand = await catalogCw.fetchInventoryOnHand(item.cwCatalogId);
onHandMap.set(item.cwCatalogId, onHand);
} catch {
onHandMap.set(item.cwCatalogId, item.onHand);
}
}),
);
for (const result of results) {
if (result.status === "rejected") failedCount += 1;
}
if (i + CONCURRENCY >= dbItems.length) continue;
await sleep(BATCH_DELAY_MS);
}
// 3. Only update items where onHand has changed
const updates = dbItems.filter((item) => {
const newOnHand = onHandMap.get(item.cwCatalogId) ?? item.onHand;
return newOnHand !== item.onHand;
});
if (updates.length === 0) {
events.emit("cw:inventory:refresh:skipped", {
totalItems: dbItems.length,
updatedCount: 0,
});
return;
}
const updatedCount = (
await Promise.all(
updates.map(async (item) => {
const newOnHand = onHandMap.get(item.cwCatalogId) ?? item.onHand;
return await prisma.catalogItem.update({
where: { cwCatalogId: item.cwCatalogId },
data: { onHand: newOnHand },
});
}),
)
).length;
events.emit("cw:inventory:refresh:completed", {
totalItems: dbItems.length,
updatedCount,
});
if (failedCount > 0) {
console.warn(
`[inventory-refresh] ${failedCount} task(s) failed; fallback values were used and will retry next sweep`,
);
}
};
@@ -1,46 +0,0 @@
import { connectWiseApi, prisma } from "../../constants";
import { events } from "../globalEvents";
import { fetchAllCwCompanies } from "./fetchAllCompanies";
export const refreshCompanies = async () => {
events.emit("cw:companies:refresh:check");
const internalCompanyCount = await prisma.company.count();
events.emit("cw:companies:refresh:started");
const allCompanies = await fetchAllCwCompanies();
const externalCompanyCount = allCompanies.size;
// Batch upserts to avoid exhausting the database connection pool
const batchSize = 50;
let updatedCount = 0;
const companiesArray = Array.from(allCompanies.values());
for (let i = 0; i < companiesArray.length; i += batchSize) {
const batch = companiesArray.slice(i, i + batchSize);
const results = await Promise.all(
batch.map((company) =>
prisma.company.upsert({
where: { cw_CompanyId: company.id },
create: {
cw_CompanyId: company.id,
cw_Identifier: company.identifier,
name: company.name,
},
update: {
name: company.name,
},
}),
),
);
updatedCount += results.length;
}
events.emit("cw:companies:refresh:completed", {
internalCompaniesCount: internalCompanyCount,
externalCompaniesCount: externalCompanyCount,
companiesUpdated: updatedCount,
});
};
@@ -1,79 +0,0 @@
import { connectWiseApi } from "../../../constants";
export interface CWCompanySite {
id: number;
name: string;
addressLine1: string;
addressLine2?: string;
city: string;
stateReference: { id: number; identifier: string; name: string } | null;
zip: string;
country: { id: number; name: string } | null;
phoneNumber: string;
faxNumber: string;
taxCodeId: number | null;
expenseReimbursement: number;
primaryAddressFlag: boolean;
defaultShippingFlag: boolean;
defaultBillingFlag: boolean;
defaultMailingFlag: boolean;
mobileGuid: string;
calendar: { id: number; name: string } | null;
timeZone: { id: number; name: string } | null;
company: { id: number; identifier: string; name: string };
_info: Record<string, string>;
}
/**
* Fetch all sites for a ConnectWise company.
*
* @param cwCompanyId - The ConnectWise company ID
* @returns Array of CW company sites
*/
export const fetchCompanySites = async (
cwCompanyId: number,
): Promise<CWCompanySite[]> => {
const response = await connectWiseApi.get(
`/company/companies/${cwCompanyId}/sites?pageSize=1000`,
);
return response.data;
};
/**
* Fetch a single site by CW site ID for a given company.
*
* @param cwCompanyId - The ConnectWise company ID
* @param cwSiteId - The ConnectWise site ID
* @returns The CW company site
*/
export const fetchCompanySite = async (
cwCompanyId: number,
cwSiteId: number,
): Promise<CWCompanySite> => {
const response = await connectWiseApi.get(
`/company/companies/${cwCompanyId}/sites/${cwSiteId}`,
);
return response.data;
};
/**
* Serialize a CW site into a clean API-friendly object.
*/
export const serializeCwSite = (site: CWCompanySite) => ({
id: site.id,
name: site.name,
address: {
line1: site.addressLine1,
line2: site.addressLine2 ?? null,
city: site.city,
state: site.stateReference?.name ?? null,
zip: site.zip,
country: site.country?.name ?? "United States",
},
phoneNumber: site.phoneNumber || null,
faxNumber: site.faxNumber || null,
primaryAddressFlag: site.primaryAddressFlag,
defaultShippingFlag: site.defaultShippingFlag,
defaultBillingFlag: site.defaultBillingFlag,
defaultMailingFlag: site.defaultMailingFlag,
});
+1 -18
View File
@@ -1,24 +1,7 @@
import { Company } from "../../../generated/prisma/client";
import { prisma } from "../../constants";
import { fetchCwCompanyById } from "./fetchCompany";
export const updateCwInternalCompany = async (
companyId: number,
): Promise<Company | null> => {
const cwCompany = await fetchCwCompanyById(companyId);
if (!cwCompany) return null;
const updatedCompany = await prisma.company.upsert({
where: { cw_CompanyId: cwCompany.id },
create: {
cw_CompanyId: cwCompany.id,
cw_Identifier: cwCompany.identifier,
name: cwCompany.name,
},
update: {
name: cwCompany.name,
},
});
return updatedCompany;
return prisma.company.findFirst({ where: { cw_CompanyId: companyId } });
};
@@ -1,6 +0,0 @@
export { userDefinedFieldsCw } from "./userDefinedFields";
export type {
CWUserDefinedField,
CWUserDefinedFieldOption,
CWUserDefinedFieldInfo,
} from "./udf.types";
@@ -1,119 +0,0 @@
import { Collection } from "@discordjs/collection";
import { connectWiseApi, redis } from "../../../constants";
import { events } from "../../globalEvents";
import { CWUserDefinedField } from "./udf.types";
const REDIS_KEY = "cw:userDefinedFields";
/** In-memory cache of all CW User Defined Fields, keyed by UDF id */
let cache: Collection<number, CWUserDefinedField> = new Collection();
export const userDefinedFieldsCw = {
/**
* Get Cache
*
* Returns the current in-memory Collection of all User Defined Fields.
* If the cache is empty, it will attempt to hydrate from Redis first,
* then fall back to a live API fetch.
*/
get: async (): Promise<Collection<number, CWUserDefinedField>> => {
if (cache.size > 0) return cache;
// Try hydrating from Redis
const stored = await redis.get(REDIS_KEY);
if (stored) {
const parsed: CWUserDefinedField[] = JSON.parse(stored);
cache = new Collection(parsed.map((udf) => [udf.id, udf]));
return cache;
}
// Nothing cached anywhere — do a live fetch
return userDefinedFieldsCw.refresh();
},
/**
* Fetch All User Defined Fields
*
* Fetches all UDFs from the ConnectWise API.
* Does NOT update the cache — use `refresh()` for that.
*/
fetchAll: async (): Promise<Collection<number, CWUserDefinedField>> => {
const allItems = new Collection<number, CWUserDefinedField>();
const pageSize = 1000;
const response = await connectWiseApi.get(
`/system/userDefinedFields?pageSize=${pageSize}`,
);
const items: CWUserDefinedField[] = response.data;
for (const item of items) {
allItems.set(item.id, item);
}
return allItems;
},
/**
* Refresh
*
* Fetches all UDFs from ConnectWise, replaces the in-memory cache
* and persists the snapshot to Redis.
*/
refresh: async (): Promise<Collection<number, CWUserDefinedField>> => {
events.emit("cw:udf:refresh:started");
const allItems = await userDefinedFieldsCw.fetchAll();
cache = allItems;
// Persist to Redis
await redis.set(REDIS_KEY, JSON.stringify([...allItems.values()]));
events.emit("cw:udf:refresh:completed", { count: allItems.size });
return cache;
},
/**
* Find by ID
*
* Returns a single UDF by its ConnectWise ID from the cache.
*/
findById: async (id: number): Promise<CWUserDefinedField | undefined> => {
const items = await userDefinedFieldsCw.get();
return items.get(id);
},
/**
* Find by Caption
*
* Returns the first UDF matching the given caption (case-insensitive).
*/
findByCaption: async (
caption: string,
): Promise<CWUserDefinedField | undefined> => {
const items = await userDefinedFieldsCw.get();
const lowerCaption = caption.toLowerCase();
return items.find((udf) => udf.caption.toLowerCase() === lowerCaption);
},
/**
* Find by Screen ID
*
* Returns all UDFs associated with a given screenId.
*/
findByScreenId: async (
screenId: string,
): Promise<Collection<number, CWUserDefinedField>> => {
const items = await userDefinedFieldsCw.get();
return items.filter((udf) => udf.screenId === screenId);
},
/**
* Invalidate
*
* Clears the in-memory cache and removes the Redis key.
*/
invalidate: async (): Promise<void> => {
cache = new Collection();
await redis.del(REDIS_KEY);
},
};
+62 -9
View File
@@ -120,11 +120,22 @@ const fonts = {
},
};
const printer = new PdfPrinter(fonts as never);
const noOpUrlResolver = {
resolve: () => Promise.resolve(),
resolved: () => Promise.resolve([]),
};
const printer = new PdfPrinter(fonts as never, undefined, noOpUrlResolver);
const fmt = (n: number) =>
"$" + n.toFixed(2).replace(/\B(?=(\d{3})+(?!\d))/g, ",");
const fmtMoney = (n: number) => {
const abs = Math.abs(n);
const formatted = "$" + abs.toFixed(2).replace(/\B(?=(\d{3})+(?!\d))/g, ",");
return n < 0 ? "-" + formatted : formatted;
};
const hr = (color = DIVIDER, weight = 0.75) => ({
canvas: [
{
@@ -152,12 +163,12 @@ function loadLogoDataUrl(logoPath: string): string | null {
export async function generateQuote(
data: QuoteData,
theme: Partial<QuoteTheme> = {},
logoPath = DEFAULT_LOGO_PATH,
logoPath = DEFAULT_LOGO_PATH
): Promise<Buffer> {
const t: QuoteTheme = { ...DEFAULT_THEME, ...theme };
const subTotal = data.lineItems.reduce(
(sum, item) => sum + item.qty * item.unitPrice,
0,
0
);
const taxableSubTotal = Math.max(0, data.taxableSubtotal ?? subTotal);
const taxAmount = taxableSubTotal * data.tax.rate;
@@ -166,6 +177,13 @@ export async function generateQuote(
const showPricing = data.showLineItemPricing ?? false;
const discountTotal = data.lineItems.reduce((sum, item) => {
const lineTotal = item.qty * item.unitPrice;
return lineTotal < 0 ? sum + lineTotal : sum;
}, 0);
const hasDiscounts = discountTotal < 0;
const showDiscount = !showPricing && hasDiscounts;
const tableHeader = [
{ text: "Qty", style: "thCell", alignment: "center" },
{ text: "Description", style: "thCell" },
@@ -174,10 +192,12 @@ export async function generateQuote(
{ text: "Unit Price", style: "thCell", alignment: "right" },
{ text: "Total", style: "thCell", alignment: "right" },
]
: showDiscount
? [{ text: "", style: "thCell", alignment: "right" }]
: []),
];
const colCount = showPricing ? 4 : 2;
const colCount = showPricing ? 4 : showDiscount ? 3 : 2;
const tableRows: Record<string, unknown>[][] = [];
for (const item of data.lineItems) {
@@ -202,13 +222,25 @@ export async function generateQuote(
...(showPricing
? [
{
text: fmt(item.unitPrice),
text: fmtMoney(item.unitPrice),
style: "tdCell",
alignment: "right",
noWrap: true,
},
{
text: fmt(item.qty * item.unitPrice),
text: fmtMoney(item.qty * item.unitPrice),
style: "tdCell",
alignment: "right",
noWrap: true,
},
]
: showDiscount
? [
{
text:
item.qty * item.unitPrice < 0
? fmtMoney(item.qty * item.unitPrice)
: "",
style: "tdCell",
alignment: "right",
noWrap: true,
@@ -231,7 +263,7 @@ export async function generateQuote(
number,
number,
number,
number,
number
],
info: {
@@ -563,7 +595,7 @@ export async function generateQuote(
table: {
headerRows: 1,
dontBreakRows: true,
widths: showPricing ? [40, "*", 75, 75] : [40, "*"],
widths: showPricing ? [40, "*", 75, 75] : showDiscount ? [40, "*", 75] : [40, "*"],
body: [tableHeader, ...tableRows],
},
layout: {
@@ -614,6 +646,26 @@ export async function generateQuote(
border: [false, false, false, true],
},
],
...(hasDiscounts
? [
[
{
text: "Discount",
style: "totalsLabel",
margin: [0, 5, 0, 5],
border: [false, false, false, true],
},
{
text: fmtMoney(discountTotal),
style: "totalsValue",
alignment: "right",
noWrap: true,
margin: [0, 5, 0, 5],
border: [false, false, false, true],
},
],
]
: []),
[
{
text: data.tax.label,
@@ -651,7 +703,8 @@ export async function generateQuote(
],
},
layout: {
hLineWidth: (i: number) => (i >= 1 && i <= 2 ? 0.5 : 0),
hLineWidth: (i: number) =>
i >= 1 && i <= (hasDiscounts ? 3 : 2) ? 0.5 : 0,
vLineWidth: () => 0,
hLineColor: () => "#E0D6C6",
},
@@ -1,427 +0,0 @@
import { Socket } from "socket.io-client";
import {
createWorkerJob,
emitWorkerGlobalEvent,
workerLog,
} from "../jobFactory";
import { WorkerQueue } from "../queues";
import {
TTL_ARCHIVED_MS,
fetchAndCacheActivities,
fetchAndCacheNotes,
fetchAndCacheContacts,
fetchAndCacheProducts,
fetchAndCacheOppCwData,
fetchAndCacheCompanyCwData,
companyCwCacheKey,
} from "../../cache/opportunityCache";
import { computeCacheTTL } from "../../algorithms/computeCacheTTL";
import { prisma, redis } from "../../../constants";
/**
* Worker factory for active opportunity cache refresh.
*
* Runs the unified opportunity cache refresh pass for all opportunities.
* Active/recent opportunities use adaptive TTL, while archived opportunities
* (where adaptive TTL resolves to null) are refreshed with TTL_ARCHIVED_MS.
* Checks which cache keys have expired and re-fetches only those from
* ConnectWise.
*
* Designed to be called on the active cache job interval.
*
* @param socket - Socket.IO client connection to manager
* @returns Promise that resolves when refresh completes
*/
export async function refreshActiveOpportunitiesWorker(
socket: Socket,
opts?: {
runFullRefresh?: () => Promise<void>;
},
): Promise<void> {
return createWorkerJob(
socket,
WorkerQueue.REFRESH_ACTIVE_OPPORTUNITIES,
async (workerSocket: Socket) => {
if (opts?.runFullRefresh) {
workerLog(
workerSocket,
"[active-refresh] Starting full opportunities refresh stage",
);
await opts.runFullRefresh();
workerLog(
workerSocket,
"[active-refresh] Completed full opportunities refresh stage",
);
}
const lockKey = "worker-lock:cache:opportunities:refresh:active";
const lockValue = `${process.pid}:${Date.now()}:${Math.random()}`;
const lockTtlMs = Number(Bun.env.ACTIVE_REFRESH_LOCK_TTL_MS ?? "1800000");
const lockSet = await redis.set(
lockKey,
lockValue,
"PX",
lockTtlMs,
"NX",
);
if (lockSet !== "OK") {
workerLog(
workerSocket,
`[active-refresh] Skipping run: lock already held (${lockKey})`,
"WARN",
);
return;
}
try {
await performActiveOpportunityRefresh(workerSocket);
} finally {
const currentLockValue = await redis.get(lockKey);
if (currentLockValue === lockValue) {
await redis.del(lockKey);
}
}
},
);
}
/**
* Core logic for active opportunity cache refresh.
*
* Queries all opportunities, checks which cache keys have expired, and
* re-fetches from ConnectWise only for expired entries.
*/
async function performActiveOpportunityRefresh(
workerSocket: Socket,
): Promise<void> {
const opportunities = await prisma.opportunity.findMany({
select: {
cwOpportunityId: true,
closedFlag: true,
closedDate: true,
expectedCloseDate: true,
cwLastUpdated: true,
statusCwId: true,
company: { select: { cw_CompanyId: true } },
},
orderBy: { cwLastUpdated: "desc" },
});
workerLog(
workerSocket,
`[active-refresh] Starting refresh for ${opportunities.length} opportunities`,
);
emitWorkerGlobalEvent(workerSocket, "cache:opportunities:refresh:started", {
totalOpportunities: opportunities.length,
});
let activitiesRefreshed = 0;
let companiesRefreshed = 0;
let notesRefreshed = 0;
let contactsRefreshed = 0;
let productsRefreshed = 0;
let oppCwDataRefreshed = 0;
let archivedCount = 0;
const eligibleOpportunities: Array<{
cwOpportunityId: number;
ttl: number;
companyId: number | null;
}> = [];
const companyTtlById = new Map<number, number>();
for (const opp of opportunities) {
const adaptiveTtl = computeCacheTTL({
closedFlag: opp.closedFlag,
closedDate: opp.closedDate,
expectedCloseDate: opp.expectedCloseDate,
lastUpdated: opp.cwLastUpdated,
});
const ttl = adaptiveTtl ?? TTL_ARCHIVED_MS;
if (adaptiveTtl === null) archivedCount++;
const companyId = opp.company?.cw_CompanyId ?? null;
eligibleOpportunities.push({
cwOpportunityId: opp.cwOpportunityId,
ttl,
companyId,
});
if (companyId === null) continue;
const prevTtl = companyTtlById.get(companyId) ?? 0;
companyTtlById.set(companyId, Math.max(prevTtl, ttl));
}
// Batch-check which keys already exist via a single pipeline.
// One EXISTS command per key avoids Redis EXISTS multi-key count semantics.
const pipeline = redis.pipeline();
for (const opp of eligibleOpportunities) {
pipeline.exists(`opp:cw-data:${opp.cwOpportunityId}`);
pipeline.exists(`opp:activities:${opp.cwOpportunityId}`);
pipeline.exists(`opp:notes:${opp.cwOpportunityId}`);
pipeline.exists(`opp:contacts:${opp.cwOpportunityId}`);
pipeline.exists(`opp:products:${opp.cwOpportunityId}`);
}
for (const companyId of Array.from(companyTtlById.keys())) {
pipeline.exists(companyCwCacheKey(companyId));
}
const existsResults = (await pipeline.exec()) || [];
const existsAt = (index: number): boolean => {
const value = existsResults[index]?.[1];
return typeof value === "number" && value > 0;
};
let existsIndex = 0;
const oppExistsById = new Map<
number,
{
oppCwDataExists: boolean;
activitiesExists: boolean;
notesExists: boolean;
contactsExists: boolean;
productsExists: boolean;
}
>();
for (const opp of eligibleOpportunities) {
oppExistsById.set(opp.cwOpportunityId, {
oppCwDataExists: existsAt(existsIndex++),
activitiesExists: existsAt(existsIndex++),
notesExists: existsAt(existsIndex++),
contactsExists: existsAt(existsIndex++),
productsExists: existsAt(existsIndex++),
});
}
const companyCacheExistsById = new Map<number, boolean>();
for (const companyId of Array.from(companyTtlById.keys())) {
companyCacheExistsById.set(companyId, existsAt(existsIndex++));
}
const refreshTasks: (() => Promise<void>)[] = [];
let plannedOppCwData = 0;
let plannedActivities = 0;
let plannedNotes = 0;
let plannedContacts = 0;
let plannedProducts = 0;
let plannedCompanies = 0;
for (const opp of eligibleOpportunities) {
const existsForOpp = oppExistsById.get(opp.cwOpportunityId);
if (!existsForOpp) continue;
if (!existsForOpp.oppCwDataExists) {
plannedOppCwData++;
refreshTasks.push(async () => {
try {
await fetchAndCacheOppCwData(opp.cwOpportunityId, opp.ttl);
oppCwDataRefreshed++;
} catch (error) {
workerLog(
workerSocket,
`[active-refresh] oppCwData refresh failed for opp${opp.cwOpportunityId}: ${describeError(error)}`,
);
}
});
}
if (!existsForOpp.activitiesExists) {
plannedActivities++;
refreshTasks.push(async () => {
try {
await fetchAndCacheActivities(opp.cwOpportunityId, opp.ttl);
activitiesRefreshed++;
} catch (error) {
workerLog(
workerSocket,
`[active-refresh] activities refresh failed for opp${opp.cwOpportunityId}: ${describeError(error)}`,
);
}
});
}
if (!existsForOpp.notesExists) {
plannedNotes++;
refreshTasks.push(async () => {
try {
await fetchAndCacheNotes(opp.cwOpportunityId, opp.ttl);
notesRefreshed++;
} catch (error) {
workerLog(
workerSocket,
`[active-refresh] notes refresh failed for opp${opp.cwOpportunityId}: ${describeError(error)}`,
);
}
});
}
if (!existsForOpp.contactsExists) {
plannedContacts++;
refreshTasks.push(async () => {
try {
await fetchAndCacheContacts(opp.cwOpportunityId, opp.ttl);
contactsRefreshed++;
} catch (error) {
workerLog(
workerSocket,
`[active-refresh] contacts refresh failed for opp${opp.cwOpportunityId}: ${describeError(error)}`,
);
}
});
}
if (!existsForOpp.productsExists) {
plannedProducts++;
refreshTasks.push(async () => {
try {
await fetchAndCacheProducts(opp.cwOpportunityId, opp.ttl);
productsRefreshed++;
} catch (error) {
workerLog(
workerSocket,
`[active-refresh] products refresh failed for opp${opp.cwOpportunityId}: ${describeError(error)}`,
);
}
});
}
}
for (const [companyId, ttl] of Array.from(companyTtlById.entries())) {
const companyExists = companyCacheExistsById.get(companyId) ?? false;
if (companyExists) continue;
plannedCompanies++;
refreshTasks.push(async () => {
try {
await fetchAndCacheCompanyCwData(companyId, ttl);
companiesRefreshed++;
} catch (error) {
workerLog(
workerSocket,
`[active-refresh] company data refresh failed for company${companyId}: ${describeError(error)}`,
);
}
});
}
if (companyTtlById.size > 0) {
const missingCompanies = Array.from(companyTtlById.keys()).filter(
(id) => !(companyCacheExistsById.get(id) ?? false),
).length;
workerLog(
workerSocket,
`[active-refresh] Company cache checks: ${companyTtlById.size} unique, ${missingCompanies} missing`,
);
}
workerLog(
workerSocket,
`[active-refresh] Planned tasks: eligible=${eligibleOpportunities.length}, archived=${archivedCount}, totalTasks=${refreshTasks.length}, oppCwData=${plannedOppCwData}, activities=${plannedActivities}, notes=${plannedNotes}, contacts=${plannedContacts}, products=${plannedProducts}, companies=${plannedCompanies}`,
);
if (refreshTasks.length === 0) {
workerLog(workerSocket, `[active-refresh] No cache keys needed refresh`);
}
// Run refresh tasks via a continuous worker pool (no inter-batch idle waits).
const parsedConcurrency = Number(Bun.env.ACTIVE_REFRESH_CONCURRENCY ?? "12");
const CONCURRENCY = Number.isFinite(parsedConcurrency)
? Math.max(1, Math.floor(parsedConcurrency))
: 12;
const progressEvery = Math.max(
1,
Number(Bun.env.ACTIVE_REFRESH_PROGRESS_EVERY ?? "50") || 50,
);
workerLog(
workerSocket,
`[active-refresh] Runner config: concurrency=${CONCURRENCY}, progressEvery=${progressEvery}`,
"DEBUG",
);
let completedTasks = 0;
let failedTasks = 0;
let nextTaskIndex = 0;
const runWorker = async () => {
while (true) {
const taskIndex = nextTaskIndex;
nextTaskIndex++;
const task = refreshTasks[taskIndex];
if (!task) return;
try {
await task();
} catch (error) {
failedTasks++;
workerLog(
workerSocket,
`[active-refresh] task ${taskIndex + 1}/${refreshTasks.length} failed: ${describeError(error)}`,
);
}
completedTasks++;
const shouldLogProgress =
completedTasks % progressEvery === 0 ||
completedTasks === refreshTasks.length;
if (shouldLogProgress) {
workerLog(
workerSocket,
`[active-refresh] Progress: completedTasks=${completedTasks}/${refreshTasks.length}, failedTasks=${failedTasks}`,
"DEBUG",
);
}
}
};
await Promise.all(
Array.from(
{ length: Math.min(CONCURRENCY, Math.max(1, refreshTasks.length)) },
() => runWorker(),
),
);
if (failedTasks > 0) {
workerLog(
workerSocket,
`[active-refresh] ${failedTasks} task(s) encountered errors`,
);
}
emitWorkerGlobalEvent(workerSocket, "cache:opportunities:refresh:completed", {
totalOpportunities: opportunities.length,
activitiesRefreshed,
companiesRefreshed,
notesRefreshed,
contactsRefreshed,
productsRefreshed,
oppCwDataRefreshed,
archivedCount,
});
workerLog(
workerSocket,
`[active-refresh] Completed: ${activitiesRefreshed} activities, ${notesRefreshed} notes, ${contactsRefreshed} contacts, ${productsRefreshed} products, ${oppCwDataRefreshed} opp cw data, ${companiesRefreshed} companies, ${archivedCount} archived`,
);
}
/**
* Build a concise error description for logging.
*/
function describeError(err: unknown): string {
if (typeof err !== "object" || err === null) return String(err);
const e = err as Record<string, any>;
if (e.isAxiosError) {
const method = (e.config?.method ?? "?").toUpperCase();
const url = e.config?.url ?? "unknown";
const code = e.code ?? "";
const status = e.response?.status ?? "";
return `${method} ${url} -> ${code || `HTTP ${status}`} (${e.message})`;
}
return e.message ?? String(err);
}
@@ -1,375 +0,0 @@
import { Socket } from "socket.io-client";
import { createWorkerJob, workerLog } from "../jobFactory";
import { WorkerQueue } from "../queues";
import {
TTL_ARCHIVED_MS,
fetchAndCacheActivities,
fetchAndCacheNotes,
fetchAndCacheContacts,
fetchAndCacheProducts,
fetchAndCacheOppCwData,
fetchAndCacheCompanyCwData,
companyCwCacheKey,
} from "../../cache/opportunityCache";
import { prisma, redis } from "../../../constants";
interface ArchiveRefreshOptions {
/**
* When true, overwrite every cache key without checking if it exists.
* Used for midnight rebuild to ensure all keys are fresh.
*
* When false, only populate missing keys. Used on startup to avoid
* large CW bursts on every process restart.
*
* Defaults to false.
*/
force?: boolean;
}
/**
* Worker factory for archived opportunity cache refresh.
*
* Refreshes cache for opportunities that are closed more than 30 days ago.
* These opportunities fall outside the adaptive TTL window and are rebuilt
* with a fixed 24-hour TTL.
*
* Typically called once per day at midnight (with force=true) to ensure
* archived deals are not stale. On startup, force=false to avoid large
* CW bursts.
*
* @param socket - Socket.IO client connection to manager
* @param options - Configuration options (force, etc.)
* @returns Promise that resolves when refresh completes
*/
export async function refreshArchivedOpportunitiesWorker(
socket: Socket,
options: ArchiveRefreshOptions = {},
): Promise<void> {
return createWorkerJob(
socket,
WorkerQueue.REFRESH_ARCHIVED_OPPORTUNITIES,
async (workerSocket: Socket) => {
const lockKey = "worker-lock:cache:opportunities:refresh:archived";
const lockValue = `${process.pid}:${Date.now()}:${Math.random()}`;
const lockTtlMs = Number(
Bun.env.ARCHIVED_REFRESH_LOCK_TTL_MS ?? "10800000",
);
const lockSet = await redis.set(
lockKey,
lockValue,
"PX",
lockTtlMs,
"NX",
);
if (lockSet !== "OK") {
workerLog(
workerSocket,
`[archived-refresh] Skipping run: lock already held (${lockKey})`,
"WARN",
);
return;
}
try {
await performArchivedOpportunityRefresh(
workerSocket,
options.force ?? false,
);
} finally {
const currentLockValue = await redis.get(lockKey);
if (currentLockValue === lockValue) {
await redis.del(lockKey);
}
}
},
);
}
/**
* Core logic for archived opportunity cache refresh.
*
* Queries opportunities closed more than 30 days ago and refreshes their cache
* with a fixed 24-hour TTL.
*
* @param workerSocket - Worker socket for logging
* @param force - If true, refresh all keys. If false, only refresh missing keys.
*/
async function performArchivedOpportunityRefresh(
workerSocket: Socket,
force: boolean,
): Promise<void> {
const thirtyDaysAgo = new Date(Date.now() - 30 * 24 * 60 * 60 * 1000);
const opportunities = await prisma.opportunity.findMany({
where: {
closedFlag: true,
OR: [{ closedDate: { lt: thirtyDaysAgo } }, { closedDate: null }],
},
select: {
cwOpportunityId: true,
company: { select: { cw_CompanyId: true } },
},
orderBy: { cwLastUpdated: "desc" },
});
const label = force ? "midnight rebuild" : "startup warm";
workerLog(
workerSocket,
`[archived-refresh] Starting ${label} for ${opportunities.length} archived opportunities`,
);
if (opportunities.length === 0) {
workerLog(
workerSocket,
`[archived-refresh] No archived opportunities found`,
);
return;
}
const uniqueCompanyIds = Array.from(
new Set(
opportunities
.map((opp) => opp.company?.cw_CompanyId)
.filter((id): id is number => id !== undefined),
),
);
const oppMissingById = new Map<
number,
{
oppCwDataMissing: boolean;
activitiesMissing: boolean;
notesMissing: boolean;
contactsMissing: boolean;
productsMissing: boolean;
}
>();
const companyMissingById = new Map<number, boolean>();
if (force) {
for (const opp of opportunities) {
oppMissingById.set(opp.cwOpportunityId, {
oppCwDataMissing: true,
activitiesMissing: true,
notesMissing: true,
contactsMissing: true,
productsMissing: true,
});
}
for (const companyId of uniqueCompanyIds) {
companyMissingById.set(companyId, true);
}
} else {
const pipeline = redis.pipeline();
for (const opp of opportunities) {
pipeline.exists(`opp:cw-data:${opp.cwOpportunityId}`);
pipeline.exists(`opp:activities:${opp.cwOpportunityId}`);
pipeline.exists(`opp:notes:${opp.cwOpportunityId}`);
pipeline.exists(`opp:contacts:${opp.cwOpportunityId}`);
pipeline.exists(`opp:products:${opp.cwOpportunityId}`);
}
for (const companyId of uniqueCompanyIds) {
pipeline.exists(companyCwCacheKey(companyId));
}
const results = (await pipeline.exec()) || [];
const existsAt = (index: number): boolean => {
const value = results[index]?.[1];
return typeof value === "number" && value > 0;
};
let idx = 0;
for (const opp of opportunities) {
oppMissingById.set(opp.cwOpportunityId, {
oppCwDataMissing: !existsAt(idx++),
activitiesMissing: !existsAt(idx++),
notesMissing: !existsAt(idx++),
contactsMissing: !existsAt(idx++),
productsMissing: !existsAt(idx++),
});
}
for (const companyId of uniqueCompanyIds) {
companyMissingById.set(companyId, !existsAt(idx++));
}
}
const refreshTasks: (() => Promise<void>)[] = [];
let plannedOppCwData = 0;
let plannedActivities = 0;
let plannedNotes = 0;
let plannedContacts = 0;
let plannedProducts = 0;
let plannedCompanies = 0;
for (const opp of opportunities) {
const missing = oppMissingById.get(opp.cwOpportunityId);
if (!missing) continue;
if (missing.oppCwDataMissing) {
plannedOppCwData++;
refreshTasks.push(async () => {
try {
await fetchAndCacheOppCwData(opp.cwOpportunityId, TTL_ARCHIVED_MS);
} catch (error) {
workerLog(
workerSocket,
`[archived-refresh] oppCwData failed for opp${opp.cwOpportunityId}: ${describeError(error)}`,
);
}
});
}
if (missing.activitiesMissing) {
plannedActivities++;
refreshTasks.push(async () => {
try {
await fetchAndCacheActivities(opp.cwOpportunityId, TTL_ARCHIVED_MS);
} catch (error) {
workerLog(
workerSocket,
`[archived-refresh] activities failed for opp${opp.cwOpportunityId}: ${describeError(error)}`,
);
}
});
}
if (missing.notesMissing) {
plannedNotes++;
refreshTasks.push(async () => {
try {
await fetchAndCacheNotes(opp.cwOpportunityId, TTL_ARCHIVED_MS);
} catch (error) {
workerLog(
workerSocket,
`[archived-refresh] notes failed for opp${opp.cwOpportunityId}: ${describeError(error)}`,
);
}
});
}
if (missing.contactsMissing) {
plannedContacts++;
refreshTasks.push(async () => {
try {
await fetchAndCacheContacts(opp.cwOpportunityId, TTL_ARCHIVED_MS);
} catch (error) {
workerLog(
workerSocket,
`[archived-refresh] contacts failed for opp${opp.cwOpportunityId}: ${describeError(error)}`,
);
}
});
}
if (missing.productsMissing) {
plannedProducts++;
refreshTasks.push(async () => {
try {
await fetchAndCacheProducts(opp.cwOpportunityId, TTL_ARCHIVED_MS);
} catch (error) {
workerLog(
workerSocket,
`[archived-refresh] products failed for opp${opp.cwOpportunityId}: ${describeError(error)}`,
);
}
});
}
}
for (const companyId of uniqueCompanyIds) {
const companyMissing = companyMissingById.get(companyId) ?? true;
if (!companyMissing) continue;
plannedCompanies++;
refreshTasks.push(async () => {
try {
await fetchAndCacheCompanyCwData(companyId, TTL_ARCHIVED_MS);
} catch (error) {
workerLog(
workerSocket,
`[archived-refresh] company data failed for company${companyId}: ${describeError(error)}`,
);
}
});
}
workerLog(
workerSocket,
`[archived-refresh] Planned tasks (${label}): opportunities=${opportunities.length}, totalTasks=${refreshTasks.length}, oppCwData=${plannedOppCwData}, activities=${plannedActivities}, notes=${plannedNotes}, contacts=${plannedContacts}, products=${plannedProducts}, companies=${plannedCompanies}, uniqueCompanies=${uniqueCompanyIds.length}`,
);
if (refreshTasks.length === 0) {
workerLog(
workerSocket,
`[archived-refresh] No cache keys needed refresh (${label})`,
);
return;
}
// Run with bounded concurrency and inter-batch delay
const CONCURRENCY = 6;
const BATCH_DELAY_MS = 250;
let failCount = 0;
let completedTasks = 0;
const totalBatches = Math.ceil(refreshTasks.length / CONCURRENCY);
for (let i = 0; i < refreshTasks.length; i += CONCURRENCY) {
const batch = refreshTasks.slice(i, i + CONCURRENCY);
const batchNumber = Math.floor(i / CONCURRENCY) + 1;
try {
await Promise.all(batch.map((task) => task()));
completedTasks += batch.length;
const shouldLogProgress =
totalBatches <= 3 ||
batchNumber % 5 === 0 ||
batchNumber === totalBatches;
if (shouldLogProgress) {
workerLog(
workerSocket,
`[archived-refresh] Progress: batch ${batchNumber}/${totalBatches}, completedTasks=${completedTasks}/${refreshTasks.length}`,
"DEBUG",
);
}
} catch (error) {
failCount++;
workerLog(
workerSocket,
`[archived-refresh] error in batch at index ${i}: ${describeError(error)}`,
);
}
if (i + CONCURRENCY < refreshTasks.length) {
await new Promise((resolve) => setTimeout(resolve, BATCH_DELAY_MS));
}
}
if (failCount > 0) {
workerLog(
workerSocket,
`[archived-refresh] ${failCount} batch(es) encountered errors`,
);
}
workerLog(
workerSocket,
`[archived-refresh] Completed (${label}): ${opportunities.length} archived opportunities, ${refreshTasks.length} tasks`,
);
}
/**
* Build a concise error description for logging.
*/
function describeError(err: unknown): string {
if (typeof err !== "object" || err === null) return String(err);
const e = err as Record<string, any>;
if (e.isAxiosError) {
const method = (e.config?.method ?? "?").toUpperCase();
const url = e.config?.url ?? "unknown";
const code = e.code ?? "";
const status = e.response?.status ?? "";
return `${method} ${url}${code || `HTTP ${status}`} (${e.message})`;
}
return e.message ?? String(err);
}
-1
View File
@@ -1,7 +1,6 @@
import { Server } from "socket.io";
import { events, EventTypes } from "../globalEvents";
import { WorkerQueue } from "./queues";
import { reserveWorkerId } from "../../workert";
function emitGlobalEvent<K extends keyof EventTypes>(
name: K,
+18
View File
@@ -0,0 +1,18 @@
import { Socket } from "socket.io-client";
import { executeFullDalpuriSync, executeForcedIncrementalDalpuriSync } from "dalpuri";
/**
* Execute a full sync from Dalpuri (ConnectWise) to the API database.
* This is the main entry point for the dalpuri sync worker.
*/
export async function executeFullSync(_workerSocket: Socket): Promise<void> {
return executeFullDalpuriSync();
}
/**
* Execute an incremental sync from Dalpuri (ConnectWise) to the API database.
* Called every 5 seconds via PgBoss from the API process interval.
*/
export async function executeIncrementalSync(): Promise<void> {
return executeForcedIncrementalDalpuriSync();
}
@@ -0,0 +1,10 @@
import { getBoss } from "../../workert";
import { WorkerQueue } from "./queues";
/**
* Enqueue a single incremental sync job via PgBoss.
* Called on an interval from the main API process so it survives worker restarts.
*/
export async function enqueueIncrementalSync(): Promise<void> {
await getBoss().send(WorkerQueue.DALPURI_INCREMENTAL_SYNC, {});
}
+4 -1
View File
@@ -1,7 +1,10 @@
export enum WorkerQueue {
WORKER_NAMESPACE_RESERVATION = "workers/namespace/reservation",
REFRESH_COMPANIES = "cw/companies/refresh",
REFRESH_OPPORTUNITIES = "cw/opportunities/refresh",
REFRESH_ACTIVE_OPPORTUNITIES = "cache/opportunities/refresh/active",
REFRESH_ARCHIVED_OPPORTUNITIES = "cache/opportunities/refresh/archived",
DALPURI_FULL_SYNC = "dalpuri/sync/full",
DALPURI_INCREMENTAL_SYNC = "dalpuri/sync/incremental",
WORKER_NAMESPACE_RESERVATION = "worker/namespace/reservation",
REFRESH_SALES_METRICS = "cache/sales/metrics/refresh",
}
+24
View File
@@ -0,0 +1,24 @@
import { refreshSalesOpportunityMetricsCache } from "../cache/salesOpportunityMetricsCache";
const LOG_PREFIX = "[job:salesMetrics]";
/**
* Execute the sales opportunity metrics cache refresh.
* This is the handler function registered with PgBoss for the
* REFRESH_SALES_METRICS queue.
*/
export async function executeSalesMetricsRefresh(opts?: {
forceColdLoad?: boolean;
}): Promise<void> {
const start = Date.now();
console.log(
`${LOG_PREFIX} refresh started | forceColdLoad=${opts?.forceColdLoad ?? false}`
);
try {
await refreshSalesOpportunityMetricsCache({ forceColdLoad: opts?.forceColdLoad });
console.log(`${LOG_PREFIX} refresh completed in ${Date.now() - start}ms`);
} catch (err) {
console.error(`${LOG_PREFIX} refresh failed in ${Date.now() - start}ms`, err);
throw err;
}
}
+50
View File
@@ -0,0 +1,50 @@
import { Socket } from "socket.io-client";
import { WorkerQueue } from "./queues";
import { createWorkerJob, workerLog } from "./jobFactory";
import { executeFullSync } from "./dalpuri-sync";
/**
* Enqueue a full sync from Dalpuri (ConnectWise) to the API.
* This will run asynchronously in the worker process.
*
* @param managerSocket - The manager socket connection for communicating with worker
* @returns Promise that resolves when sync completes
*
* @example
* const socket = await ensureManagerSocketReady();
* await enqueueDalpuriFullSync(socket);
* console.log("Sync completed!");
*/
export async function enqueueDalpuriFullSync(
managerSocket: Socket
): Promise<void> {
return createWorkerJob(
managerSocket,
WorkerQueue.DALPURI_FULL_SYNC,
async (workerSocket) => {
workerLog(
workerSocket,
"[dalpuri] Starting full sync from ConnectWise (CW) to API database",
"INFO"
);
try {
await executeFullSync(workerSocket);
workerLog(
workerSocket,
"[dalpuri] Full sync completed successfully",
"INFO"
);
} catch (error) {
const errorMessage =
error instanceof Error ? error.message : String(error);
workerLog(
workerSocket,
`[dalpuri] Full sync failed: ${errorMessage}`,
"ERROR"
);
throw error;
}
}
);
}