fix: remove nested .git folders, re-add as normal directories

This commit is contained in:
2026-03-22 17:50:47 -05:00
parent f55c7e47c9
commit 6b7eec67b8
1870 changed files with 4170168 additions and 3 deletions
@@ -0,0 +1,102 @@
/**
* @module algo.coldThreshold
*
* Cold-Detection Algorithm
* ========================
*
* Determines whether an opportunity has stalled in a status long enough
* to be considered "cold". When an opportunity goes cold it is
* automatically moved to InternalReview, a system-generated activity is
* logged, and it is flagged for the internal review report.
*
* ## Thresholds (defaults)
*
* | Status | Stall Threshold |
* |-----------------|-----------------|
* | QuoteSent | 14 days |
* | ConfirmedQuote | 30 days |
*
* Only these two statuses are eligible for cold detection. All other
* statuses return `cold: false`.
*
* ## How "last activity date" is determined
*
* The algorithm uses `lastActivityDate` — the most recent of:
* - the latest activity's `dateStart`
* - the opportunity's `cwLastUpdated`
*
* The caller is responsible for resolving this value before calling
* `checkColdStatus`.
*/
import type { OpportunityController } from "../../controllers/OpportunityController";
// ---------------------------------------------------------------------------
// Config
// ---------------------------------------------------------------------------
/** Stall thresholds in milliseconds, keyed by CW status ID. */
export const COLD_THRESHOLDS: Record<number, { days: number; ms: number }> = {
/** QuoteSent — CW status ID 43, "03. Quote Sent" */
43: { days: 14, ms: 14 * 24 * 60 * 60 * 1000 },
/** ConfirmedQuote — CW status ID 57, "04. Confirmed Quote" */
57: { days: 30, ms: 30 * 24 * 60 * 60 * 1000 },
};
// ---------------------------------------------------------------------------
// Types
// ---------------------------------------------------------------------------
export interface ColdCheckInput {
/** Current CW status ID of the opportunity. */
statusCwId: number | null;
/**
* The most recent meaningful date to measure staleness from.
* Typically the latest of the last activity dateStart or cwLastUpdated.
*/
lastActivityDate: Date | null;
/** Override for "now" — useful for testing. Defaults to `new Date()`. */
now?: Date;
}
export interface ColdCheckResult {
/** Whether the opportunity is considered cold. */
cold: boolean;
/**
* Which threshold triggered the cold flag.
* `null` when `cold` is `false`.
*/
triggeredBy: {
statusCwId: number;
statusName: string;
thresholdDays: number;
staleDays: number;
} | null;
}
// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------
const STATUS_NAMES: Record<number, string> = {
43: "QuoteSent",
57: "ConfirmedQuote",
};
// ---------------------------------------------------------------------------
// Core
// ---------------------------------------------------------------------------
/**
* Evaluate whether an opportunity has exceeded its cold-stall threshold.
*
* @returns A `ColdCheckResult` indicating cold status and trigger metadata.
*/
export function checkColdStatus(_input: ColdCheckInput): ColdCheckResult {
// Bypassed — always returns not-cold until cold-stall feature is ready
return { cold: false, triggeredBy: null };
}
@@ -0,0 +1,97 @@
/**
* @module algo.followUpScheduler
*
* Follow-Up Scheduling Algorithm
* ===============================
*
* Determines the due date for follow-up activities created by the
* opportunity workflow. The follow-up is always assigned to the user
* who triggered its creation.
*
* ## TODO — Calendar-aware scheduling
*
* This module currently uses a **dummy algorithm** that schedules the
* follow-up for the next business day at 10:00 AM local time.
*
* It needs to be replaced with an availability-aware algorithm that:
* 1. Reads the assigned user's calendar (Microsoft Graph / CW schedule).
* 2. Finds the earliest open slot of sufficient duration.
* 3. Respects company-wide blackout dates (holidays, company events).
* 4. Accounts for the user's working-hours preferences.
*
* Until that integration is complete, the simple "next business day"
* heuristic is used as a placeholder.
*/
// ---------------------------------------------------------------------------
// Types
// ---------------------------------------------------------------------------
export interface FollowUpScheduleInput {
/** The user who triggered the activity (follow-up is assigned to them). */
triggeredByUserId: string;
/** Optional override for "now" — useful for testing. */
now?: Date;
}
export interface FollowUpScheduleResult {
/** Suggested due date for the follow-up activity. */
dueDate: Date;
/** ISO string version for CW API payloads. */
dueDateIso: string;
}
// ---------------------------------------------------------------------------
// Core
// ---------------------------------------------------------------------------
/**
* Schedule a follow-up activity.
*
* Returns a suggested `dueDate` for the follow-up activity.
* Currently uses dummy logic: next business day at 10:00 AM.
*
* @param input - Scheduling parameters
* @returns The scheduled follow-up date
*/
export function scheduleFollowUp(
input: FollowUpScheduleInput,
): FollowUpScheduleResult {
const now = input.now ?? new Date();
const dueDate = getNextBusinessDay(now);
// Set to 10:00 AM
dueDate.setHours(10, 0, 0, 0);
return {
dueDate,
dueDateIso: dueDate.toISOString(),
};
}
// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------
/**
* Returns the next business day (MonFri) from the given date.
* If the given date is already a weekday before 10 AM, returns
* the NEXT business day (not the same day).
*/
function getNextBusinessDay(from: Date): Date {
const result = new Date(from);
// Always advance at least one day
result.setDate(result.getDate() + 1);
const day = result.getDay();
// Saturday → Monday (+2)
if (day === 6) result.setDate(result.getDate() + 2);
// Sunday → Monday (+1)
if (day === 0) result.setDate(result.getDate() + 1);
return result;
}
@@ -0,0 +1,168 @@
/**
* @module computeCacheTTL
*
* Adaptive Cache TTL Algorithm
* ============================
*
* Determines how long a cached record should live before it must be
* re-fetched from the upstream source (e.g. ConnectWise API).
*
* The algorithm prioritises freshness for records that are actively
* being worked on, while avoiding unnecessary API calls for stale or
* inactive data.
*
* ## Spec
*
* | # | Condition | TTL (ms) | TTL (human) | Rationale |
* |---|------------------------------------------------------------------|----------|-------------|--------------------------------------------------------------------|
* | 1 | `closedFlag` is `true` | `null` | Do not cache| Closed records are rarely accessed; caching wastes memory. |
* | 2 | `expectedCloseDate` OR `lastUpdated` is within the last **5 days**| 60 000 | 60 seconds | High-activity window — data changes frequently and must stay fresh.|
* | 3 | `expectedCloseDate` OR `lastUpdated` is within the last **14 days**| 90 000 | 90 seconds | Moderate activity — still relevant, but changes less often. |
* | 4 | Everything else (older than 14 days) | 900 000 | 15 minutes | Low activity — safe to serve from cache for longer. |
*
* ## Evaluation order
*
* Rules are evaluated **top-to-bottom**; the first matching rule wins.
* Rule 2 (5-day window) is a subset of Rule 3 (14-day window), so it
* must be checked first.
*
* ## Inputs
*
* | Field | Type | Description |
* |--------------------|------------------|--------------------------------------------------------------------|
* | `closedFlag` | `boolean` | Whether the record is closed / inactive. |
* | `expectedCloseDate`| `Date \| null` | The projected close date (future-looking relevance signal). |
* | `lastUpdated` | `Date \| null` | The last time the upstream record was modified (backward-looking). |
* | `now` | `Date` (optional)| Override for the current timestamp; defaults to `new Date()`. |
*
* ## Output
*
* Returns `number | null`:
* - A positive integer representing the TTL in **milliseconds**, or
* - `null` when the record should **not** be cached at all.
*
* ## Usage
*
* ```ts
* import { computeCacheTTL } from "../modules/algorithms/computeCacheTTL";
*
* const ttl = computeCacheTTL({
* closedFlag: opportunity.closedFlag,
* expectedCloseDate: opportunity.expectedCloseDate,
* lastUpdated: opportunity.cwLastUpdated,
* });
*
* if (ttl !== null) {
* await redis.set(key, serialised, "PX", ttl);
* }
* ```
*/
// ---------------------------------------------------------------------------
// Constants
// ---------------------------------------------------------------------------
/** 60 seconds TTL for high-activity records (within 5 days).
* Must exceed the 30-second background refresh interval so the cache
* stays warm between cycles. */
export const TTL_HIGH_ACTIVITY = 60_000;
/** 90 seconds TTL for moderate-activity records (within 14 days). */
export const TTL_MODERATE_ACTIVITY = 90_000;
/** 15 minutes TTL for low-activity / stale records. */
export const TTL_LOW_ACTIVITY = 900_000;
/** 30 days in milliseconds. */
const THIRTY_DAYS_MS = 30 * 24 * 60 * 60 * 1000;
/** 5 days in milliseconds. */
const FIVE_DAYS_MS = 5 * 24 * 60 * 60 * 1000;
/** 14 days in milliseconds. */
const FOURTEEN_DAYS_MS = 14 * 24 * 60 * 60 * 1000;
// ---------------------------------------------------------------------------
// Input type
// ---------------------------------------------------------------------------
export interface CacheTTLInput {
/** Whether the record is closed / inactive. */
closedFlag: boolean;
/** When the record was closed — used for recently-closed caching (within 30 days). */
closedDate: Date | null;
/** The projected close date — serves as a forward-looking relevance signal. */
expectedCloseDate: Date | null;
/** The date the upstream record was last modified — backward-looking signal. */
lastUpdated: Date | null;
/**
* Override for the current timestamp.
* Useful for deterministic testing. Defaults to `new Date()`.
*/
now?: Date;
}
// ---------------------------------------------------------------------------
// Algorithm
// ---------------------------------------------------------------------------
/**
* Compute the cache TTL for a record based on its activity signals.
*
* @param input - The record's activity signals. See {@link CacheTTLInput}.
* @returns The TTL in milliseconds, or `null` if the record should not be cached.
*
* @see Module-level JSDoc for the full spec table and evaluation rules.
*/
export function computeCacheTTL(input: CacheTTLInput): number | null {
const {
closedFlag,
closedDate,
expectedCloseDate,
lastUpdated,
now = new Date(),
} = input;
const nowMs = now.getTime();
/**
* Check whether a date falls within a window around `now`.
*
* "Within" means the date is between `now - windowMs` and `now + windowMs`,
* allowing both past updates and future-scheduled dates to qualify.
*/
const isWithinWindow = (date: Date | null, windowMs: number): boolean => {
if (!date) return false;
const diff = Math.abs(nowMs - date.getTime());
return diff <= windowMs;
};
// Rule 1 — Closed records
if (closedFlag) {
// Rule 1b — Recently closed (within 30 days) → cache at low-activity TTL
if (isWithinWindow(closedDate, THIRTY_DAYS_MS)) {
return TTL_LOW_ACTIVITY;
}
// Rule 1a — Closed longer than 30 days → do not cache
return null;
}
// Rule 2 — High activity (5 days)
if (
isWithinWindow(expectedCloseDate, FIVE_DAYS_MS) ||
isWithinWindow(lastUpdated, FIVE_DAYS_MS)
) {
return TTL_HIGH_ACTIVITY;
}
// Rule 3 — Moderate activity (14 days)
if (
isWithinWindow(expectedCloseDate, FOURTEEN_DAYS_MS) ||
isWithinWindow(lastUpdated, FOURTEEN_DAYS_MS)
) {
return TTL_MODERATE_ACTIVITY;
}
// Rule 4 — Low activity / stale
return TTL_LOW_ACTIVITY;
}
@@ -0,0 +1,116 @@
/**
* @module computeProductsCacheTTL
*
* Adaptive Cache TTL for Opportunity Products
* ============================================
*
* Determines how long products (forecast items) should be cached in
* Redis before being re-fetched from ConnectWise.
*
* Products have unique caching rules compared to notes or contacts
* because they are typically finalised before a deal closes and do not
* change once the opportunity reaches a terminal status.
*
* ## Spec
*
* | # | Condition | TTL (ms) | TTL (human) | Rationale |
* |---|------------------------------------------------------------------------------|------------|-------------|---------------------------------------------------------------------------------------|
* | 1 | Status is **Won**, **Lost**, **Pending Won**, or **Pending Lost** | `null` | No cache | Products on terminal / near-terminal opps are static; no need to keep them warm. |
* | 2 | Opportunity is **not cacheable** (main cache TTL is `null`) | `null` | No cache | If the opp itself is evicted, sub-resources follow suit. |
* | 3 | `lastUpdated` is within the last **3 days** | 15 000 | 15 seconds | Actively-worked deals — products are being edited and need near-real-time freshness. |
* | 4 | Everything else | 1 200 000 | 20 minutes | Lazy on-demand cache: fetched when requested, expires after 20 min without refresh. |
*
* ## Evaluation order
*
* Rules are evaluated top-to-bottom; the first matching rule wins.
*
* ## Inputs
*
* Extends {@link CacheTTLInput} from `computeCacheTTL` with an
* additional `statusCwId` field used to identify terminal statuses.
*
* ## Output
*
* Returns `number | null`:
* - Positive integer = TTL in **milliseconds**.
* - `null` = do **not** cache.
*/
import type { CacheTTLInput } from "./computeCacheTTL";
import { computeCacheTTL } from "./computeCacheTTL";
import { QUOTE_STATUSES } from "../../types/QuoteStatuses";
// ---------------------------------------------------------------------------
// Constants
// ---------------------------------------------------------------------------
/** 45 seconds — TTL for hot products (opportunity updated within 3 days).
* Must exceed the 30-second background refresh interval so the cache
* stays warm between cycles. */
export const PRODUCTS_TTL_HOT = 45_000;
/** 20 minutes — TTL for on-demand product cache (lazy fallback). */
export const PRODUCTS_TTL_LAZY = 1_200_000;
/** 3 days in milliseconds. */
const THREE_DAYS_MS = 3 * 24 * 60 * 60 * 1000;
/**
* Set of all CW status IDs that map to a Won or Lost canonical status.
*
* Built at module load from {@link QUOTE_STATUSES} so it stays in sync
* with any future status additions.
*/
export const WON_LOST_STATUS_IDS: ReadonlySet<number> = new Set(
QUOTE_STATUSES.filter((s) => s.wonFlag || s.lostFlag).flatMap((s) => [
s.id,
...s.optimaEquivalency,
]),
);
// ---------------------------------------------------------------------------
// Input type
// ---------------------------------------------------------------------------
export interface ProductsCacheTTLInput extends CacheTTLInput {
/** The CW status ID of the opportunity. */
statusCwId: number | null;
}
// ---------------------------------------------------------------------------
// Algorithm
// ---------------------------------------------------------------------------
/**
* Compute the cache TTL for an opportunity's products.
*
* @param input - The opportunity's activity signals plus status ID.
* @returns TTL in milliseconds, or `null` if products should not be cached.
*/
export function computeProductsCacheTTL(
input: ProductsCacheTTLInput,
): number | null {
const { statusCwId, lastUpdated, now = new Date() } = input;
// Rule 1 — Terminal statuses: Won / Lost / Pending Won / Pending Lost
if (statusCwId !== null && WON_LOST_STATUS_IDS.has(statusCwId)) {
return null;
}
// Rule 2 — If the opportunity itself is not cacheable, skip products too
const mainTTL = computeCacheTTL(input);
if (mainTTL === null) {
return null;
}
// Rule 3 — Hot: updated within the last 3 days
if (lastUpdated) {
const diff = Math.abs(now.getTime() - lastUpdated.getTime());
if (diff <= THREE_DAYS_MS) {
return PRODUCTS_TTL_HOT;
}
}
// Rule 4 — Lazy fallback
return PRODUCTS_TTL_LAZY;
}
@@ -0,0 +1,118 @@
/**
* @module computeSubResourceCacheTTL
*
* Adaptive Cache TTL for Opportunity Sub-Resources
* =================================================
*
* Determines how long cached sub-resource data (notes, contacts) should
* live before being re-fetched from ConnectWise.
*
* Sub-resources change less frequently than the opportunity record itself
* or its activity feed, so TTLs are longer than the primary cache. The
* same activity-signal heuristics are used (expected close date, last
* updated, closed status) but with relaxed durations.
*
* ## Spec
*
* | # | Condition | TTL (ms) | TTL (human) | Rationale |
* |---|-------------------------------------------------------------------|----------|-------------|--------------------------------------------------------------------|
* | 1 | `closedFlag` is `true` AND closed > 30 days ago | `null` | Do not cache| Old closed records are rarely accessed. |
* | 1b| `closedFlag` is `true` AND closed within 30 days | 300 000 | 5 minutes | Recently-closed records may still be viewed occasionally. |
* | 2 | `expectedCloseDate` OR `lastUpdated` within **5 days** | 60 000 | 60 seconds | Active deals — contacts/notes may still change. |
* | 3 | `expectedCloseDate` OR `lastUpdated` within **14 days** | 120 000 | 2 minutes | Moderate activity — less likely to change. |
* | 4 | Everything else (older than 14 days) | 300 000 | 5 minutes | Low activity — safe to cache longer. |
*
* ## Evaluation order
*
* Rules are evaluated top-to-bottom; the first matching rule wins.
*
* ## Inputs
*
* Uses the same {@link CacheTTLInput} interface as `computeCacheTTL`.
*
* ## Output
*
* Returns `number | null`:
* - Positive integer = TTL in **milliseconds**.
* - `null` = do **not** cache.
*/
import type { CacheTTLInput } from "./computeCacheTTL";
// ---------------------------------------------------------------------------
// Constants
// ---------------------------------------------------------------------------
/** 60 seconds — TTL for high-activity sub-resources (within 5 days). */
export const SUB_TTL_HIGH_ACTIVITY = 60_000;
/** 2 minutes — TTL for moderate-activity sub-resources (within 14 days). */
export const SUB_TTL_MODERATE_ACTIVITY = 120_000;
/** 5 minutes — TTL for low-activity / stale sub-resources. */
export const SUB_TTL_LOW_ACTIVITY = 300_000;
/** 30 days in milliseconds. */
const THIRTY_DAYS_MS = 30 * 24 * 60 * 60 * 1000;
/** 5 days in milliseconds. */
const FIVE_DAYS_MS = 5 * 24 * 60 * 60 * 1000;
/** 14 days in milliseconds. */
const FOURTEEN_DAYS_MS = 14 * 24 * 60 * 60 * 1000;
// ---------------------------------------------------------------------------
// Algorithm
// ---------------------------------------------------------------------------
/**
* Compute the cache TTL for an opportunity sub-resource (notes, contacts).
*
* @param input - The opportunity's activity signals. See {@link CacheTTLInput}.
* @returns The TTL in milliseconds, or `null` if the data should not be cached.
*/
export function computeSubResourceCacheTTL(
input: CacheTTLInput,
): number | null {
const {
closedFlag,
closedDate,
expectedCloseDate,
lastUpdated,
now = new Date(),
} = input;
const nowMs = now.getTime();
const isWithinWindow = (date: Date | null, windowMs: number): boolean => {
if (!date) return false;
return Math.abs(nowMs - date.getTime()) <= windowMs;
};
// Rule 1 — Closed records
if (closedFlag) {
if (isWithinWindow(closedDate, THIRTY_DAYS_MS)) {
return SUB_TTL_LOW_ACTIVITY;
}
return null;
}
// Rule 2 — High activity (5 days)
if (
isWithinWindow(expectedCloseDate, FIVE_DAYS_MS) ||
isWithinWindow(lastUpdated, FIVE_DAYS_MS)
) {
return SUB_TTL_HIGH_ACTIVITY;
}
// Rule 3 — Moderate activity (14 days)
if (
isWithinWindow(expectedCloseDate, FOURTEEN_DAYS_MS) ||
isWithinWindow(lastUpdated, FOURTEEN_DAYS_MS)
) {
return SUB_TTL_MODERATE_ACTIVITY;
}
// Rule 4 — Low activity / stale
return SUB_TTL_LOW_ACTIVITY;
}
+57
View File
@@ -0,0 +1,57 @@
import { ZodError } from "zod";
/**
* @ignore
*/
export const apiResponse = {
successful: (message: string, data?: any, meta?: {}) => ({
status: 200,
message,
data,
successful: true,
meta: {
timestamp: Date.now(),
...meta,
},
}),
created: (message: string, data?: any) => ({
status: 201,
message,
data,
successful: true,
meta: {
timestamp: Date.now(),
},
}),
error: (err: Error) => ({
// @ts-ignore
status: err["status"] ?? 400,
message: err.message,
error: err.name,
successful: false,
meta: {
timestamp: Date.now(),
},
}),
internalError: () => ({
status: 500,
message: "An Internal Server Error has occured...",
error: "InternalServerError",
successful: false,
meta: {
timestamp: Date.now(),
},
}),
zodError: (err: ZodError) => {
const data = JSON.parse(err.message);
return {
status: 400,
message: "TypeError",
error: data,
successful: false,
meta: {
timestamp: Date.now(),
},
};
},
};
+40
View File
@@ -0,0 +1,40 @@
import { Handler, Hono, MiddlewareHandler } from "hono";
import { Variables } from "../../types/HonoTypes";
/**
* Create a route.
*
* This method exists to serve the purpose of allowing us to split all of our api routes into different files and
* easily and quickly be able to rope them back into the main api server instance.
*
* One of the sortfallings of this method is that I was not able to figure out how to integrate the middleware to come
* before the handler, so if somebody feels upto it please figure out a way to have the middleware come before the handler
* method naturally as you would if you using a plain hono method.
*
* @TODO Move middleware handlers to come before primary handler naturally.
*
* @param method - HTTP Method
* @param path - URL Path
* @param handler - Handler function for Hono
* @param middleware - Array of Middleware Handlers for Hono
* @returns {Hono} - A new Hono instance containing the newly created route.
*/
export function createRoute(
method: string | string[],
path: string[],
handler: Handler<{
Variables: Variables;
}>,
...middleware: MiddlewareHandler<{
Variables: Variables;
}>[]
): Hono<{ Variables: Variables }> {
if (middleware)
return new Hono<{ Variables: Variables }>().on(
method as any,
path,
...middleware,
handler
);
return new Hono<{ Variables: Variables }>().on(method as any, path, handler);
}
+659
View File
@@ -0,0 +1,659 @@
/**
* @module opportunityCache
*
* Redis-backed cache for expensive ConnectWise API data associated
* with opportunities.
*
* ## What is cached
*
* Each non-closed opportunity may have cached payloads keyed by its `cwOpportunityId`:
*
* - **Activities** (`opp:activities:{cwOpportunityId}`) — the raw `CWActivity[]` array
* - **Company CW data** (`opp:company-cw:{cw_CompanyId}`) — hydrated company / contacts blob
* - **Notes** (`opp:notes:{cwOpportunityId}`) — raw CW notes array
* - **Contacts** (`opp:contacts:{cwOpportunityId}`) — raw CW contacts array
* - **Products** (`opp:products:{cwOpportunityId}`) — raw CW forecast + procurement products blob
*
* TTLs are computed dynamically via {@link computeCacheTTL}.
*
* ## Background refresh (Worker-based)
*
* **⚠️ This module is now READ-ONLY.** Cache refresh logic has been moved to workers:
*
* - {@link refreshActiveOpportunitiesWorker} — Scheduled to run every 20 minutes
* to run a unified cache pass across all opportunities. Active/recent records
* use adaptive TTLs and archived records use {@link TTL_ARCHIVED_MS}.
*
* See `src/modules/workers/cache/` for worker implementations.
*
* ## This module now provides
*
* - `getCached*()` functions for reading cached data
* - `fetchAndCache*()` functions used internally by workers
* - `invalidate*()` functions for cache invalidation after mutations
* - Cache key helpers for Redis operations
*/
import { prisma, redis } from "../../constants";
import { activityCw } from "../cw-utils/activities/activities";
import { computeCacheTTL } from "../algorithms/computeCacheTTL";
import { computeSubResourceCacheTTL } from "../algorithms/computeSubResourceCacheTTL";
import {
computeProductsCacheTTL,
PRODUCTS_TTL_HOT,
} from "../algorithms/computeProductsCacheTTL";
import { connectWiseApi } from "../../constants";
import { fetchCwCompanyById } from "../cw-utils/fetchCompany";
import { fetchCompanySite } from "../cw-utils/sites/companySites";
import { opportunityCw } from "../cw-utils/opportunities/opportunities";
import { withCwRetry } from "../cw-utils/withCwRetry";
import { events } from "../globalEvents";
// ---------------------------------------------------------------------------
// Key helpers
// ---------------------------------------------------------------------------
const ACTIVITY_PREFIX = "opp:activities:";
const COMPANY_CW_PREFIX = "opp:company-cw:";
const NOTES_PREFIX = "opp:notes:";
const CONTACTS_PREFIX = "opp:contacts:";
const PRODUCTS_PREFIX = "opp:products:";
const SITE_PREFIX = "opp:site:";
const OPP_CW_PREFIX = "opp:cw-data:";
/** Redis key for cached activities by CW opportunity ID. */
export const activityCacheKey = (cwOppId: number) =>
`${ACTIVITY_PREFIX}${cwOppId}`;
/** Redis key for cached company CW hydration data by CW company ID. */
export const companyCwCacheKey = (cwCompanyId: number) =>
`${COMPANY_CW_PREFIX}${cwCompanyId}`;
/** Redis key for cached opportunity notes by CW opportunity ID. */
export const notesCacheKey = (cwOppId: number) => `${NOTES_PREFIX}${cwOppId}`;
/** Redis key for cached opportunity contacts by CW opportunity ID. */
export const contactsCacheKey = (cwOppId: number) =>
`${CONTACTS_PREFIX}${cwOppId}`;
/** Redis key for cached opportunity products by CW opportunity ID. */
export const productsCacheKey = (cwOppId: number) =>
`${PRODUCTS_PREFIX}${cwOppId}`;
/** Redis key for cached company site by CW company ID + site ID. */
export const siteCacheKey = (cwCompanyId: number, cwSiteId: number) =>
`${SITE_PREFIX}${cwCompanyId}:${cwSiteId}`;
/** Redis key for cached CW opportunity response by CW opportunity ID. */
export const oppCwDataCacheKey = (cwOppId: number) =>
`${OPP_CW_PREFIX}${cwOppId}`;
// ---------------------------------------------------------------------------
// Read helpers
// ---------------------------------------------------------------------------
/**
* Retrieve cached CW activities for an opportunity.
*
* @returns The parsed `CWActivity[]` or `null` on cache miss.
*/
export async function getCachedActivities(
cwOpportunityId: number,
): Promise<any[] | null> {
const raw = await redis.get(activityCacheKey(cwOpportunityId));
if (!raw) return null;
try {
return JSON.parse(raw);
} catch {
return null;
}
}
/**
* Retrieve cached company CW hydration data.
*
* @returns `{ company, defaultContact, allContacts }` or `null` on cache miss.
*/
export async function getCachedCompanyCwData(
cwCompanyId: number,
): Promise<{ company: any; defaultContact: any; allContacts: any[] } | null> {
const raw = await redis.get(companyCwCacheKey(cwCompanyId));
if (!raw) return null;
try {
return JSON.parse(raw);
} catch {
return null;
}
}
/**
* Retrieve cached opportunity notes (raw CW data).
*
* @returns The parsed raw CW notes array or `null` on cache miss.
*/
export async function getCachedNotes(
cwOpportunityId: number,
): Promise<any[] | null> {
const raw = await redis.get(notesCacheKey(cwOpportunityId));
if (!raw) return null;
try {
return JSON.parse(raw);
} catch {
return null;
}
}
/**
* Retrieve cached opportunity contacts (raw CW data).
*
* @returns The parsed raw CW contacts array or `null` on cache miss.
*/
export async function getCachedContacts(
cwOpportunityId: number,
): Promise<any[] | null> {
const raw = await redis.get(contactsCacheKey(cwOpportunityId));
if (!raw) return null;
try {
return JSON.parse(raw);
} catch {
return null;
}
}
/**
* Retrieve cached opportunity products (raw CW forecast + procurement blob).
*
* @returns `{ forecast, procProducts }` or `null` on cache miss.
*/
export async function getCachedProducts(
cwOpportunityId: number,
): Promise<{ forecast: any; procProducts: any[] } | null> {
const raw = await redis.get(productsCacheKey(cwOpportunityId));
if (!raw) return null;
try {
return JSON.parse(raw);
} catch {
return null;
}
}
/**
* Retrieve cached CW site data for a company/site pair.
*
* @returns Parsed site data or `null` on cache miss.
*/
export async function getCachedSite(
cwCompanyId: number,
cwSiteId: number,
): Promise<any | null> {
const raw = await redis.get(siteCacheKey(cwCompanyId, cwSiteId));
if (!raw) return null;
try {
return JSON.parse(raw);
} catch {
return null;
}
}
/**
* Retrieve cached CW opportunity response data.
*
* @returns Parsed CW opportunity object or `null` on cache miss.
*/
export async function getCachedOppCwData(
cwOpportunityId: number,
): Promise<any | null> {
const raw = await redis.get(oppCwDataCacheKey(cwOpportunityId));
if (!raw) return null;
try {
return JSON.parse(raw);
} catch {
return null;
}
}
// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------
/** Check whether an error is an Axios 404 (resource not found in CW). */
function isNotFoundError(err: unknown): boolean {
if (typeof err !== "object" || err === null) return false;
const e = err as Record<string, any>;
return e.isAxiosError === true && e.response?.status === 404;
}
/**
* Check whether an error is a transient network / timeout error.
*
* These are safe to swallow in background refresh tasks — CW will be
* retried on the next refresh cycle. Logs a concise one-line warning
* instead of dumping the full Axios error object.
*/
function isTransientError(err: unknown): boolean {
if (typeof err !== "object" || err === null) return false;
const e = err as Record<string, any>;
if (!e.isAxiosError) return false;
const code = e.code as string | undefined;
return (
code === "ECONNABORTED" ||
code === "ECONNREFUSED" ||
code === "ECONNRESET" ||
code === "ETIMEDOUT" ||
code === "ERR_NETWORK" ||
code === "ENETUNREACH" ||
code === "ERR_BAD_RESPONSE"
);
}
/** Build a concise error description for logging (avoids dumping entire Axios objects). */
function describeError(err: unknown): string {
if (typeof err !== "object" || err === null) return String(err);
const e = err as Record<string, any>;
if (e.isAxiosError) {
const method = (e.config?.method ?? "?").toUpperCase();
const url = e.config?.url ?? "unknown";
const code = e.code ?? "";
const status = e.response?.status ?? "";
return `${method} ${url}${code || `HTTP ${status}`} (${e.message})`;
}
return e.message ?? String(err);
}
/**
* When true, transient-error warnings inside fetchAndCache* are suppressed.
* Used during background refresh to avoid flooding the terminal — the
* refresh function prints a single summary line instead.
*/
let _suppressTransientWarnings = false;
// ---------------------------------------------------------------------------
// Write helpers
// ---------------------------------------------------------------------------
/**
* Fetch activities from CW and cache them with the appropriate TTL.
*
* Returns an empty array if CW responds with 404 (opportunity doesn't
* exist or was deleted upstream).
*
* @returns The raw `CWActivity[]` collection (as plain array).
*/
export async function fetchAndCacheActivities(
cwOpportunityId: number,
ttlMs: number,
): Promise<any[]> {
try {
// Use the direct (single-call) variant to avoid the extra count request
const arr = await activityCw.fetchByOpportunityDirect(cwOpportunityId);
await redis.set(
activityCacheKey(cwOpportunityId),
JSON.stringify(arr),
"PX",
ttlMs,
);
return arr;
} catch (err) {
if (isNotFoundError(err)) return [];
if (isTransientError(err)) {
console.warn(
`[cache] activities opp#${cwOpportunityId}: ${describeError(err)}`,
);
return [];
}
throw err;
}
}
/**
* Fetch company CW data (company, contacts) and cache with the given TTL.
*
* @returns The hydration blob or `null` if the company doesn't exist in CW.
*/
export async function fetchAndCacheCompanyCwData(
cwCompanyId: number,
ttlMs: number,
): Promise<{ company: any; defaultContact: any; allContacts: any[] } | null> {
try {
// Fetch company and all-contacts in parallel — the allContacts URL
// can be constructed directly without the company response.
const [cwCompany, allContactsData] = await Promise.all([
fetchCwCompanyById(cwCompanyId),
withCwRetry(
() =>
connectWiseApi.get(
`/company/companies/${cwCompanyId}/contacts?pageSize=1000`,
),
{ label: `company#${cwCompanyId}/allContacts` },
),
]);
if (!cwCompany) return null;
// Default contact: derive from allContacts instead of making an
// extra serial CW call. The company object carries the default
// contact's ID, so we can pull it from the list we already fetched.
const defaultContactId = cwCompany.defaultContact?.id;
const defaultContactData = defaultContactId
? ((allContactsData.data as any[]).find(
(c: any) => c.id === defaultContactId,
) ?? null)
: null;
const blob = {
company: cwCompany,
defaultContact: defaultContactData,
allContacts: allContactsData.data,
};
await redis.set(
companyCwCacheKey(cwCompanyId),
JSON.stringify(blob),
"PX",
ttlMs,
);
return blob;
} catch (err) {
if (isNotFoundError(err)) return null;
if (isTransientError(err)) {
console.warn(`[cache] company#${cwCompanyId}: ${describeError(err)}`);
return null;
}
throw err;
}
}
/**
* Fetch opportunity notes from CW and cache the raw response.
*
* Returns an empty array if CW responds with 404.
*
* @returns The raw CW notes array.
*/
export async function fetchAndCacheNotes(
cwOpportunityId: number,
ttlMs: number,
): Promise<any[]> {
try {
const notes = await opportunityCw.fetchNotes(cwOpportunityId);
await redis.set(
notesCacheKey(cwOpportunityId),
JSON.stringify(notes),
"PX",
ttlMs,
);
return notes;
} catch (err) {
if (isNotFoundError(err)) return [];
if (isTransientError(err)) {
console.warn(
`[cache] notes opp#${cwOpportunityId}: ${describeError(err)}`,
);
return [];
}
throw err;
}
}
/**
* Fetch opportunity contacts from CW and cache the raw response.
*
* Returns an empty array if CW responds with 404.
*
* @returns The raw CW contacts array.
*/
export async function fetchAndCacheContacts(
cwOpportunityId: number,
ttlMs: number,
): Promise<any[]> {
try {
const contacts = await opportunityCw.fetchContacts(cwOpportunityId);
await redis.set(
contactsCacheKey(cwOpportunityId),
JSON.stringify(contacts),
"PX",
ttlMs,
);
return contacts;
} catch (err) {
if (isNotFoundError(err)) return [];
if (isTransientError(err)) {
console.warn(
`[cache] contacts opp#${cwOpportunityId}: ${describeError(err)}`,
);
return [];
}
throw err;
}
}
/**
* Invalidate cached notes for an opportunity.
*
* Call this after any note mutation (create, update, delete) so the
* next read refreshes from ConnectWise.
*/
export async function invalidateNotesCache(
cwOpportunityId: number,
): Promise<void> {
await redis.del(notesCacheKey(cwOpportunityId));
}
/**
* Invalidate cached contacts for an opportunity.
*
* Call this after any contact mutation so the next read refreshes
* from ConnectWise.
*/
export async function invalidateContactsCache(
cwOpportunityId: number,
): Promise<void> {
await redis.del(contactsCacheKey(cwOpportunityId));
}
/**
* Fetch opportunity products (forecast + procurement) from CW and cache.
*
* Stores both the forecast response and procurement products together
* so that `fetchProducts()` can reconstruct ForecastProductControllers
* from a single cache hit.
*
* @returns `{ forecast, procProducts }` blob.
*/
export async function fetchAndCacheProducts(
cwOpportunityId: number,
ttlMs: number,
): Promise<{ forecast: any; procProducts: any[] }> {
try {
const [forecast, procProducts] = await Promise.all([
opportunityCw.fetchProducts(cwOpportunityId),
opportunityCw.fetchProcurementProducts(cwOpportunityId),
]);
const blob = { forecast, procProducts };
await redis.set(
productsCacheKey(cwOpportunityId),
JSON.stringify(blob),
"PX",
ttlMs,
);
return blob;
} catch (err) {
if (isNotFoundError(err))
return { forecast: { forecastItems: [] }, procProducts: [] };
if (isTransientError(err)) {
console.warn(
`[cache] products opp#${cwOpportunityId}: ${describeError(err)}`,
);
return { forecast: { forecastItems: [] }, procProducts: [] };
}
throw err;
}
}
/**
* Invalidate cached products for an opportunity.
*
* Call this after any product mutation (add, update, resequence) so the
* next read refreshes from ConnectWise.
*/
export async function invalidateProductsCache(
cwOpportunityId: number,
): Promise<void> {
await redis.del(productsCacheKey(cwOpportunityId));
}
/**
* Invalidate all cached data for an opportunity.
*
* Removes activities, notes, contacts, products, and CW data cache keys.
* Call this when an opportunity is deleted.
*/
export async function invalidateAllOpportunityCaches(
cwOpportunityId: number,
): Promise<void> {
await redis.del(
activityCacheKey(cwOpportunityId),
notesCacheKey(cwOpportunityId),
contactsCacheKey(cwOpportunityId),
productsCacheKey(cwOpportunityId),
oppCwDataCacheKey(cwOpportunityId),
);
}
/**
* Site TTL — 20 minutes. Site/address data rarely changes so we cache
* aggressively. The background refresh does NOT proactively warm site keys;
* they are populated lazily on the first detail-view request.
*/
const SITE_TTL_MS = 1_200_000;
/**
* Fetch a CW company site from ConnectWise and cache the result.
*
* @returns The raw CW site object.
*/
export async function fetchAndCacheSite(
cwCompanyId: number,
cwSiteId: number,
): Promise<any> {
try {
const site = await fetchCompanySite(cwCompanyId, cwSiteId);
await redis.set(
siteCacheKey(cwCompanyId, cwSiteId),
JSON.stringify(site),
"PX",
SITE_TTL_MS,
);
return site;
} catch (err) {
if (isNotFoundError(err)) return null;
if (isTransientError(err)) {
console.warn(
`[cache] site company#${cwCompanyId}/site#${cwSiteId}: ${describeError(err)}`,
);
return null;
}
throw err;
}
}
/**
* Fetch the raw CW opportunity response from ConnectWise and cache it.
*
* Used by `fetchItem()` in the manager to avoid a CW roundtrip when
* the detail view is reloaded within the cache TTL window.
*
* @param cwOpportunityId - The CW opportunity ID
* @param ttlMs - Cache TTL in milliseconds
* @returns The raw CW opportunity response object.
*/
export async function fetchAndCacheOppCwData(
cwOpportunityId: number,
ttlMs: number,
): Promise<any> {
try {
const cwData = await opportunityCw.fetch(cwOpportunityId);
await redis.set(
oppCwDataCacheKey(cwOpportunityId),
JSON.stringify(cwData),
"PX",
ttlMs,
);
return cwData;
} catch (err) {
if (isNotFoundError(err)) return null;
if (isTransientError(err)) {
console.warn(`[cache] opp#${cwOpportunityId}: ${describeError(err)}`);
return null;
}
throw err;
}
}
// ---------------------------------------------------------------------------
// Background refresh
// ---------------------------------------------------------------------------
/**
* Fixed 24-hour TTL used for archived (closed > 30 days) opportunity cache entries.
* These opportunities are outside the adaptive-TTL window and are rebuilt once per
* day at midnight via {@link refreshArchivedOpportunityCache}.
*/
export const TTL_ARCHIVED_MS = 24 * 60 * 60 * 1000; // 24 hours
/**
* Cache opportunities that fall outside the adaptive-TTL window — i.e. those
* closed **more than 30 days ago** — with a fixed 24-hour TTL.
*
* These opportunities are excluded by {@link computeCacheTTL} (returns `null`)
* and are therefore never warmed by {@link refreshOpportunityCache}. This
* function fills that gap so archived deals are still served from cache on
* the rare occasion they are accessed.
*
* ## Scheduling
*
* Designed to be triggered once per day at midnight from `src/index.ts`. At
* midnight `force` is `true` so every key is unconditionally overwritten,
* ensuring data is no more than 24 hours stale.
*
* On startup `force` defaults to `false` so only truly missing keys are
* populated; this avoids a large CW burst on every process restart.
*
* @param force - When `true`, overwrite every cache key without checking
* whether it already exists. Defaults to `false`.
*/
/**
* TODO: This function has been moved to a worker at
* `src/modules/workers/cache/refreshArchivedOpportunities.ts`
*
* Wire up the worker to run daily at midnight with force=true to ensure
* archived opportunities (closed > 30 days) have fresh cache entries.
*
* @deprecated - Use refreshArchivedOpportunitiesWorker from the worker module
*/
export async function refreshArchivedOpportunityCache(
force = false,
): Promise<void> {
throw new Error(
"refreshArchivedOpportunityCache has been moved to a worker. " +
"Use refreshArchivedOpportunitiesWorker from src/modules/workers/cache/refreshArchivedOpportunities.ts",
);
}
/**
* TODO: This function has been moved to a worker at
* `src/modules/workers/cache/refreshActiveOpportunities.ts`
*
* Wire up the worker to run every 30 seconds to refresh cache for active
* and recently-closed (within 30 days) opportunities.
*
* @deprecated - Use refreshActiveOpportunitiesWorker from the worker module
*/
export async function refreshOpportunityCache(): Promise<void> {
throw new Error(
"refreshOpportunityCache has been moved to a worker. " +
"Use refreshActiveOpportunitiesWorker from src/modules/workers/cache/refreshActiveOpportunities.ts",
);
}
+900
View File
@@ -0,0 +1,900 @@
import { prisma, redis } from "../../constants";
import { getCachedOppCwData, getCachedProducts } from "./opportunityCache";
import { OpportunityStatus } from "../../workflows/wf.opportunity";
import { events } from "../globalEvents";
import { opportunities } from "../../managers/opportunities";
import { normalizeProbabilityRatio } from "../sales-utils/normalizeProbability";
const METRICS_CACHE_TTL_MS = 10 * 60 * 1000;
const ALL_MEMBERS_KEY = "sales:metrics:members:all";
const MEMBER_KEY_PREFIX = "sales:metrics:member:";
const OPP_REVENUE_KEY_PREFIX = "sales:metrics:oppRevenue:";
const PRODUCT_FETCH_CONCURRENCY = 6;
const PRODUCT_LOOKUP_TIMEOUT_MS = 35_000;
const LOG_PREFIX = "[cache:salesMetrics]";
const log = (message: string) => {
const ts = new Date().toISOString();
console.log(`${LOG_PREFIX} ${ts} ${message}`);
};
let salesMetricsRefreshInFlight: Promise<void> | null = null;
const memberKey = (identifier: string) =>
`${MEMBER_KEY_PREFIX}${identifier.toLowerCase()}`;
const oppRevenueKey = (cwOpportunityId: number) =>
`${OPP_REVENUE_KEY_PREFIX}${cwOpportunityId}`;
const deleteKeysByPrefix = async (prefix: string) => {
const keys = await redis.keys(`${prefix}*`);
if (keys.length === 0) return 0;
await redis.del(...keys);
return keys.length;
};
export interface OpportunityBreakdownEntry {
id: string;
cwId: number;
name: string;
revenue: number;
taxableRevenue: number;
nonTaxableRevenue: number;
/** Probability as a 0100 percent value */
probability: number;
weightedRevenue: number;
closedDate: string | null;
}
export interface MemberSalesMetrics {
memberIdentifier: string;
memberName: string;
generatedAt: string;
pipelineRevenue: number;
closedWonRevenueMtd: number;
closedWonRevenueYtd: number;
winCount: { mtd: number; ytd: number };
lossCount: { mtd: number; ytd: number };
avgDaysToClose: number;
openOpportunityCount: number;
wonOpportunityCount: { mtd: number; ytd: number };
lostOpportunityCount: { mtd: number; ytd: number };
closedOpportunityCount: { mtd: number; ytd: number };
weightedPipelineRevenue: number;
taxablePipelineRevenue: number;
nonTaxablePipelineRevenue: number;
avgOpenDealSize: number;
avgWonDealSize: { mtd: number; ytd: number };
winRate: { mtd: number; ytd: number };
lossRate: { mtd: number; ytd: number };
assignedOpportunityCount: number;
cacheHitCount: number;
cacheMissCount: number;
cacheHitRate: number;
opportunityBreakdown: {
pipeline: OpportunityBreakdownEntry[];
closedWonMtd: OpportunityBreakdownEntry[];
closedWonYtd: OpportunityBreakdownEntry[];
closedLostMtd: OpportunityBreakdownEntry[];
closedLostYtd: OpportunityBreakdownEntry[];
};
}
export interface SalesMetricsCacheEnvelope {
generatedAt: string;
activeMemberCount: number;
memberIdentifiers: string[];
members: Record<string, MemberSalesMetrics>;
}
interface OpportunityRevenue {
totalRevenue: number;
taxableRevenue: number;
nonTaxableRevenue: number;
cacheHit: boolean;
}
interface CachedOpportunityRevenue {
totalRevenue: number;
taxableRevenue: number;
nonTaxableRevenue: number;
}
interface OpportunityRow {
id: string;
cwOpportunityId: number;
name: string;
primarySalesRepIdentifier: string | null;
secondarySalesRepIdentifier: string | null;
statusCwId: number | null;
statusName: string | null;
closedFlag: boolean;
dateBecameLead: Date | null;
closedDate: Date | null;
probability: number;
}
interface RefreshSalesOpportunityMetricsCacheOptions {
forceColdLoad?: boolean;
}
const roundCurrency = (value: number) => Math.round(value * 100) / 100;
const daysBetween = (start: Date, end: Date): number => {
const msPerDay = 1000 * 60 * 60 * 24;
return Math.max(0, (end.getTime() - start.getTime()) / msPerDay);
};
const startOfMonthUtc = (input: Date): Date =>
new Date(Date.UTC(input.getUTCFullYear(), input.getUTCMonth(), 1, 0, 0, 0));
const startOfYearUtc = (input: Date): Date =>
new Date(Date.UTC(input.getUTCFullYear(), 0, 1, 0, 0, 0));
const toFinite = (value: unknown): number => {
const n = Number(value);
if (!Number.isFinite(n)) return 0;
return n;
};
const isWon = (opp: {
statusCwId: number | null;
statusName: string | null;
closedFlag: boolean;
}) => {
if (opp.statusCwId === OpportunityStatus.Won) return true;
if (opp.statusName?.toLowerCase().includes("won")) return true;
if (opp.closedFlag && opp.statusName?.toLowerCase().includes("won"))
return true;
return false;
};
const isLost = (opp: {
statusCwId: number | null;
statusName: string | null;
closedFlag: boolean;
}) => {
if (opp.statusCwId === OpportunityStatus.Lost) return true;
if (opp.statusName?.toLowerCase().includes("lost")) return true;
if (opp.closedFlag && opp.statusName?.toLowerCase().includes("lost"))
return true;
return false;
};
const isClosedOpportunity = (opp: {
statusCwId: number | null;
statusName: string | null;
closedFlag: boolean;
}) => {
if (opp.closedFlag) return true;
if (isWon(opp)) return true;
if (isLost(opp)) return true;
return false;
};
const buildCancellationMap = (procProducts: any[]) => {
const map = new Map<number, any>();
for (const pp of procProducts) {
const rawForecastDetailId = pp?.forecastDetailId;
const forecastDetailId =
typeof rawForecastDetailId === "number"
? rawForecastDetailId
: Number(rawForecastDetailId);
if (Number.isFinite(forecastDetailId) && forecastDetailId > 0) {
map.set(forecastDetailId, pp);
}
}
return map;
};
const computeRevenueFromProductsBlob = (
blob: any,
): Omit<OpportunityRevenue, "cacheHit"> => {
const forecastItems = Array.isArray(blob?.forecast?.forecastItems)
? blob.forecast.forecastItems
: [];
const procProducts = Array.isArray(blob?.procProducts)
? blob.procProducts
: [];
const cancellationMap = buildCancellationMap(procProducts);
let totalRevenue = 0;
let taxableRevenue = 0;
for (const item of forecastItems) {
if (!cancellationMap.has(item?.id)) continue;
if (!item?.includeFlag) continue;
const quantity = Math.max(0, toFinite(item?.quantity));
const revenue = toFinite(item?.revenue);
const cancellation = cancellationMap.get(item.id);
const cancelledFlag = Boolean(cancellation?.cancelledFlag);
const quantityCancelled = Math.max(
0,
toFinite(cancellation?.quantityCancelled),
);
if (cancelledFlag && quantity > 0 && quantityCancelled >= quantity)
continue;
const ratio =
quantity > 0 ? Math.max(0, (quantity - quantityCancelled) / quantity) : 1;
const effectiveRevenue = revenue * ratio;
totalRevenue += effectiveRevenue;
if (item?.taxableFlag) taxableRevenue += effectiveRevenue;
}
const nonTaxableRevenue = totalRevenue - taxableRevenue;
return {
totalRevenue: roundCurrency(totalRevenue),
taxableRevenue: roundCurrency(taxableRevenue),
nonTaxableRevenue: roundCurrency(nonTaxableRevenue),
};
};
const computeRevenueFromControllers = (
products: Array<{
includeFlag: boolean;
taxableFlag: boolean;
cancellationType: "full" | "partial" | null;
effectiveRevenue: number;
}>,
): Omit<OpportunityRevenue, "cacheHit"> => {
let totalRevenue = 0;
let taxableRevenue = 0;
for (const item of products) {
if (!item.includeFlag) continue;
if (item.cancellationType === "full") continue;
const effectiveRevenue = Math.max(0, toFinite(item.effectiveRevenue));
totalRevenue += effectiveRevenue;
if (item.taxableFlag) taxableRevenue += effectiveRevenue;
}
const nonTaxableRevenue = totalRevenue - taxableRevenue;
return {
totalRevenue: roundCurrency(totalRevenue),
taxableRevenue: roundCurrency(taxableRevenue),
nonTaxableRevenue: roundCurrency(nonTaxableRevenue),
};
};
const readCachedOpportunityRevenue = async (
cwOpportunityId: number,
): Promise<CachedOpportunityRevenue | null> => {
const raw = await redis.get(oppRevenueKey(cwOpportunityId));
if (!raw) return null;
try {
const parsed = JSON.parse(raw) as CachedOpportunityRevenue;
return {
totalRevenue: toFinite(parsed.totalRevenue),
taxableRevenue: toFinite(parsed.taxableRevenue),
nonTaxableRevenue: toFinite(parsed.nonTaxableRevenue),
};
} catch {
return null;
}
};
const writeCachedOpportunityRevenue = async (
cwOpportunityId: number,
revenue: Omit<OpportunityRevenue, "cacheHit">,
) => {
await redis.set(
oppRevenueKey(cwOpportunityId),
JSON.stringify(revenue),
"PX",
METRICS_CACHE_TTL_MS,
);
};
const resolveProbabilityRatio = async (opp: {
cwOpportunityId: number;
probability: number;
}): Promise<number> => {
const fromDb = normalizeProbabilityRatio(opp.probability);
if (fromDb > 0) return fromDb;
const cachedCwOpp = await getCachedOppCwData(opp.cwOpportunityId);
if (!cachedCwOpp) return 0;
const rawProbability =
cachedCwOpp?.probability?.name ?? cachedCwOpp?.probability ?? 0;
return normalizeProbabilityRatio(rawProbability);
};
const getOpportunityRevenueCacheFirst = async (
cwOpportunityId: number,
opts?: RefreshSalesOpportunityMetricsCacheOptions,
): Promise<OpportunityRevenue> => {
if (!opts?.forceColdLoad) {
const cachedRevenue = await readCachedOpportunityRevenue(cwOpportunityId);
if (cachedRevenue) {
return {
...cachedRevenue,
cacheHit: true,
};
}
}
if (!opts?.forceColdLoad) {
const cachedProducts = await getCachedProducts(cwOpportunityId);
if (cachedProducts) {
const computed = computeRevenueFromProductsBlob(cachedProducts);
await writeCachedOpportunityRevenue(cwOpportunityId, computed);
return {
...computed,
cacheHit: true,
};
}
}
try {
const opportunity = await opportunities.fetchRecord(cwOpportunityId);
const products = await opportunity.fetchProducts({
fresh: opts?.forceColdLoad,
});
const computed = computeRevenueFromControllers(products);
await writeCachedOpportunityRevenue(cwOpportunityId, computed);
return {
...computed,
cacheHit: false,
};
} catch {
return {
totalRevenue: 0,
taxableRevenue: 0,
nonTaxableRevenue: 0,
cacheHit: false,
};
}
};
const withTimeout = async <T>(
promise: Promise<T>,
timeoutMs: number,
): Promise<T> => {
return Promise.race([
promise,
new Promise<T>((_, reject) => {
setTimeout(() => reject(new Error("Timeout")), timeoutMs);
}),
]);
};
async function mapWithConcurrency<T, R>(
items: T[],
concurrency: number,
mapper: (item: T) => Promise<R>,
): Promise<R[]> {
const results: R[] = new Array(items.length);
let index = 0;
const worker = async () => {
while (true) {
const current = index;
index += 1;
if (current >= items.length) return;
results[current] = await mapper(items[current]!);
}
};
const workers = Array.from(
{ length: Math.min(concurrency, items.length) },
() => worker(),
);
await Promise.all(workers);
return results;
}
const buildEmptyMetrics = (
memberIdentifier: string,
memberName: string,
generatedAt: string,
): MemberSalesMetrics => ({
memberIdentifier,
memberName,
generatedAt,
pipelineRevenue: 0,
closedWonRevenueMtd: 0,
closedWonRevenueYtd: 0,
winCount: { mtd: 0, ytd: 0 },
lossCount: { mtd: 0, ytd: 0 },
avgDaysToClose: 0,
openOpportunityCount: 0,
wonOpportunityCount: { mtd: 0, ytd: 0 },
lostOpportunityCount: { mtd: 0, ytd: 0 },
closedOpportunityCount: { mtd: 0, ytd: 0 },
weightedPipelineRevenue: 0,
taxablePipelineRevenue: 0,
nonTaxablePipelineRevenue: 0,
avgOpenDealSize: 0,
avgWonDealSize: { mtd: 0, ytd: 0 },
winRate: { mtd: 0, ytd: 0 },
lossRate: { mtd: 0, ytd: 0 },
assignedOpportunityCount: 0,
cacheHitCount: 0,
cacheMissCount: 0,
cacheHitRate: 0,
opportunityBreakdown: {
pipeline: [],
closedWonMtd: [],
closedWonYtd: [],
closedLostMtd: [],
closedLostYtd: [],
},
});
export async function refreshSalesOpportunityMetricsCache(
opts?: RefreshSalesOpportunityMetricsCacheOptions,
): Promise<void> {
if (salesMetricsRefreshInFlight) {
log(
"refresh requested while previous run is still in-flight; reusing existing run",
);
return salesMetricsRefreshInFlight;
}
salesMetricsRefreshInFlight = (async () => {
const startedAt = Date.now();
const forceColdLoad = opts?.forceColdLoad === true;
log(`refresh started${forceColdLoad ? " | mode=cold" : " | mode=warm"}`);
if (forceColdLoad) {
const [deletedMemberKeys, deletedRevenueKeys] = await Promise.all([
deleteKeysByPrefix(MEMBER_KEY_PREFIX),
deleteKeysByPrefix(OPP_REVENUE_KEY_PREFIX),
redis.del(ALL_MEMBERS_KEY),
]);
log(
`cold-load reset completed: memberKeysCleared=${deletedMemberKeys} oppRevenueKeysCleared=${deletedRevenueKeys}`,
);
}
const now = new Date();
const generatedAt = now.toISOString();
const monthStart = startOfMonthUtc(now);
const yearStart = startOfYearUtc(now);
try {
const activeMembers = await prisma.cwMember.findMany({
where: { inactiveFlag: false },
select: {
identifier: true,
firstName: true,
lastName: true,
},
});
const memberIdentifiers = activeMembers.map(
(member) => member.identifier,
);
log(`members fetched: activeMembers=${memberIdentifiers.length}`);
const opportunityRows: OpportunityRow[] =
await prisma.opportunity.findMany({
where: {
AND: [
{
OR: [
{ primarySalesRepIdentifier: { in: memberIdentifiers } },
{ secondarySalesRepIdentifier: { in: memberIdentifiers } },
],
},
{ dateBecameLead: { gte: yearStart } },
{
OR: [{ closedFlag: false }, { closedDate: { gte: yearStart } }],
},
],
},
select: {
id: true,
cwOpportunityId: true,
name: true,
primarySalesRepIdentifier: true,
secondarySalesRepIdentifier: true,
statusCwId: true,
statusName: true,
closedFlag: true,
dateBecameLead: true,
closedDate: true,
probability: true,
},
});
log(
`opportunities fetched: assignedOpportunityRows=${opportunityRows.length}`,
);
events.emit("cache:salesMetrics:refresh:started", {
activeMemberCount: memberIdentifiers.length,
opportunityCount: opportunityRows.length,
});
if (memberIdentifiers.length === 0) {
const emptyEnvelope: SalesMetricsCacheEnvelope = {
generatedAt,
activeMemberCount: 0,
memberIdentifiers: [],
members: {},
};
await redis.set(
ALL_MEMBERS_KEY,
JSON.stringify(emptyEnvelope),
"PX",
METRICS_CACHE_TTL_MS,
);
events.emit("cache:salesMetrics:refresh:completed", {
activeMemberCount: 0,
opportunityCount: 0,
memberMetricsWritten: 0,
cacheHitCount: 0,
cacheMissCount: 0,
durationMs: Date.now() - startedAt,
});
log("no active members found; wrote empty cache envelope");
return;
}
const revenuePhaseStartedAt = Date.now();
let revenueLookupProcessed = 0;
let revenueLookupTimeouts = 0;
let revenueLookupFailures = 0;
let revenueLookupCacheHits = 0;
let revenueLookupCacheMisses = 0;
log(
`revenue lookup phase started: concurrency=${PRODUCT_FETCH_CONCURRENCY} timeoutMs=${PRODUCT_LOOKUP_TIMEOUT_MS}`,
);
const revenueRows = await mapWithConcurrency(
opportunityRows,
PRODUCT_FETCH_CONCURRENCY,
async (opp) => {
const [revenue, probabilityRatio] = await Promise.all([
withTimeout(
getOpportunityRevenueCacheFirst(opp.cwOpportunityId, {
forceColdLoad,
}),
PRODUCT_LOOKUP_TIMEOUT_MS,
).catch((err: any) => {
if (err?.message === "Timeout") {
revenueLookupTimeouts += 1;
}
if (err?.message !== "Timeout") {
revenueLookupFailures += 1;
}
return {
totalRevenue: 0,
taxableRevenue: 0,
nonTaxableRevenue: 0,
cacheHit: false,
};
}),
resolveProbabilityRatio(opp),
]);
revenueLookupProcessed += 1;
if (revenue.cacheHit) revenueLookupCacheHits += 1;
if (!revenue.cacheHit) revenueLookupCacheMisses += 1;
if (revenueLookupProcessed % 100 === 0) {
log(
`revenue lookup progress: processed=${revenueLookupProcessed}/${opportunityRows.length} cacheHits=${revenueLookupCacheHits} cacheMisses=${revenueLookupCacheMisses} timeouts=${revenueLookupTimeouts} failures=${revenueLookupFailures}`,
);
}
return { oppId: opp.id, revenue, probabilityRatio };
},
);
log(
`revenue lookup phase completed in ${Date.now() - revenuePhaseStartedAt}ms: processed=${revenueLookupProcessed}/${opportunityRows.length} cacheHits=${revenueLookupCacheHits} cacheMisses=${revenueLookupCacheMisses} timeouts=${revenueLookupTimeouts} failures=${revenueLookupFailures}`,
);
const revenueByOppId = new Map(
revenueRows.map((row) => [row.oppId, row.revenue]),
);
const probabilityByOppId = new Map(
revenueRows.map((row) => [row.oppId, row.probabilityRatio]),
);
const opportunitiesByMember = new Map<string, OpportunityRow[]>();
for (const identifier of memberIdentifiers) {
opportunitiesByMember.set(identifier, []);
}
for (const opp of opportunityRows) {
const assigned = new Set<string>();
if (opp.primarySalesRepIdentifier)
assigned.add(opp.primarySalesRepIdentifier);
if (opp.secondarySalesRepIdentifier)
assigned.add(opp.secondarySalesRepIdentifier);
for (const identifier of assigned) {
const bucket = opportunitiesByMember.get(identifier);
if (!bucket) continue;
bucket.push(opp);
}
}
const members: Record<string, MemberSalesMetrics> = {};
log("member aggregation phase started");
for (const member of activeMembers) {
const identifier = member.identifier;
const assigned = opportunitiesByMember.get(identifier) ?? [];
const metric = buildEmptyMetrics(
identifier,
`${member.firstName} ${member.lastName}`.trim() || identifier,
generatedAt,
);
let wonDaysSumYtd = 0;
for (const opp of assigned) {
const revenue = revenueByOppId.get(opp.id) ?? {
totalRevenue: 0,
taxableRevenue: 0,
nonTaxableRevenue: 0,
cacheHit: false,
};
metric.cacheHitCount += revenue.cacheHit ? 1 : 0;
metric.cacheMissCount += revenue.cacheHit ? 0 : 1;
const won = isWon(opp);
const lost = isLost(opp);
const closed = isClosedOpportunity(opp);
const probabilityRatio = Math.max(
0,
Math.min(1, toFinite(probabilityByOppId.get(opp.id))),
);
const breakdownEntry: OpportunityBreakdownEntry = {
id: opp.id,
cwId: opp.cwOpportunityId,
name: opp.name,
revenue: revenue.totalRevenue,
taxableRevenue: revenue.taxableRevenue,
nonTaxableRevenue: revenue.nonTaxableRevenue,
probability: roundCurrency(probabilityRatio * 100),
weightedRevenue: roundCurrency(
revenue.totalRevenue * probabilityRatio,
),
closedDate: opp.closedDate?.toISOString() ?? null,
};
if (!closed) {
metric.openOpportunityCount += 1;
metric.pipelineRevenue += revenue.totalRevenue;
metric.taxablePipelineRevenue += revenue.taxableRevenue;
metric.nonTaxablePipelineRevenue += revenue.nonTaxableRevenue;
metric.weightedPipelineRevenue +=
revenue.totalRevenue * probabilityRatio;
metric.opportunityBreakdown.pipeline.push(breakdownEntry);
}
const closedDate = opp.closedDate;
if (!closedDate) continue;
const isMtd = closedDate >= monthStart;
const isYtd = closedDate >= yearStart;
if (won) {
if (isMtd) {
metric.winCount.mtd += 1;
metric.wonOpportunityCount.mtd += 1;
metric.closedOpportunityCount.mtd += 1;
metric.closedWonRevenueMtd += revenue.totalRevenue;
metric.opportunityBreakdown.closedWonMtd.push(breakdownEntry);
}
if (isYtd) {
metric.winCount.ytd += 1;
metric.wonOpportunityCount.ytd += 1;
metric.closedOpportunityCount.ytd += 1;
metric.closedWonRevenueYtd += revenue.totalRevenue;
wonDaysSumYtd += daysBetween(
opp.dateBecameLead ?? closedDate,
closedDate,
);
metric.opportunityBreakdown.closedWonYtd.push(breakdownEntry);
}
}
if (!lost) continue;
if (isMtd) {
metric.lossCount.mtd += 1;
metric.lostOpportunityCount.mtd += 1;
metric.closedOpportunityCount.mtd += 1;
metric.opportunityBreakdown.closedLostMtd.push(breakdownEntry);
}
if (!isYtd) continue;
metric.lossCount.ytd += 1;
metric.lostOpportunityCount.ytd += 1;
metric.closedOpportunityCount.ytd += 1;
metric.opportunityBreakdown.closedLostYtd.push(breakdownEntry);
}
metric.assignedOpportunityCount = assigned.length;
metric.avgDaysToClose =
metric.winCount.ytd > 0 ? wonDaysSumYtd / metric.winCount.ytd : 0;
metric.avgOpenDealSize =
metric.openOpportunityCount > 0
? metric.pipelineRevenue / metric.openOpportunityCount
: 0;
metric.avgWonDealSize.mtd =
metric.winCount.mtd > 0
? metric.closedWonRevenueMtd / metric.winCount.mtd
: 0;
metric.avgWonDealSize.ytd =
metric.winCount.ytd > 0
? metric.closedWonRevenueYtd / metric.winCount.ytd
: 0;
const closedMtd = metric.winCount.mtd + metric.lossCount.mtd;
const closedYtd = metric.winCount.ytd + metric.lossCount.ytd;
metric.winRate.mtd =
closedMtd > 0 ? metric.winCount.mtd / closedMtd : 0;
metric.winRate.ytd =
closedYtd > 0 ? metric.winCount.ytd / closedYtd : 0;
metric.lossRate.mtd =
closedMtd > 0 ? metric.lossCount.mtd / closedMtd : 0;
metric.lossRate.ytd =
closedYtd > 0 ? metric.lossCount.ytd / closedYtd : 0;
const totalLookups = metric.cacheHitCount + metric.cacheMissCount;
metric.cacheHitRate =
totalLookups > 0 ? metric.cacheHitCount / totalLookups : 0;
metric.pipelineRevenue = roundCurrency(metric.pipelineRevenue);
metric.closedWonRevenueMtd = roundCurrency(metric.closedWonRevenueMtd);
metric.closedWonRevenueYtd = roundCurrency(metric.closedWonRevenueYtd);
metric.weightedPipelineRevenue = roundCurrency(
metric.weightedPipelineRevenue,
);
metric.taxablePipelineRevenue = roundCurrency(
metric.taxablePipelineRevenue,
);
metric.nonTaxablePipelineRevenue = roundCurrency(
metric.nonTaxablePipelineRevenue,
);
metric.avgDaysToClose = roundCurrency(metric.avgDaysToClose);
metric.avgOpenDealSize = roundCurrency(metric.avgOpenDealSize);
metric.avgWonDealSize.mtd = roundCurrency(metric.avgWonDealSize.mtd);
metric.avgWonDealSize.ytd = roundCurrency(metric.avgWonDealSize.ytd);
metric.winRate.mtd = roundCurrency(metric.winRate.mtd);
metric.winRate.ytd = roundCurrency(metric.winRate.ytd);
metric.lossRate.mtd = roundCurrency(metric.lossRate.mtd);
metric.lossRate.ytd = roundCurrency(metric.lossRate.ytd);
metric.cacheHitRate = roundCurrency(metric.cacheHitRate);
members[identifier] = metric;
if (Object.keys(members).length % 25 === 0) {
log(
`member aggregation progress: aggregated=${Object.keys(members).length}/${activeMembers.length}`,
);
}
}
log(
`member aggregation completed: totalMembers=${Object.keys(members).length}`,
);
const envelope: SalesMetricsCacheEnvelope = {
generatedAt,
activeMemberCount: memberIdentifiers.length,
memberIdentifiers,
members,
};
const pipeline = redis.pipeline();
log("redis write phase started");
pipeline.set(
ALL_MEMBERS_KEY,
JSON.stringify(envelope),
"PX",
METRICS_CACHE_TTL_MS,
);
for (const identifier of Object.keys(members)) {
pipeline.set(
memberKey(identifier),
JSON.stringify(members[identifier]),
"PX",
METRICS_CACHE_TTL_MS,
);
}
await pipeline.exec();
log("redis write phase completed");
const cacheHitCount = Object.values(members).reduce(
(sum, metric) => sum + metric.cacheHitCount,
0,
);
const cacheMissCount = Object.values(members).reduce(
(sum, metric) => sum + metric.cacheMissCount,
0,
);
events.emit("cache:salesMetrics:refresh:completed", {
activeMemberCount: memberIdentifiers.length,
opportunityCount: opportunityRows.length,
memberMetricsWritten: Object.keys(members).length,
cacheHitCount,
cacheMissCount,
durationMs: Date.now() - startedAt,
});
log(
`completed in ${Date.now() - startedAt}ms | activeMembers=${memberIdentifiers.length} opportunities=${opportunityRows.length} memberMetrics=${Object.keys(members).length} cacheHits=${cacheHitCount} cacheMisses=${cacheMissCount}`,
);
} catch (error) {
log(`refresh failed in ${Date.now() - startedAt}ms: ${String(error)}`);
events.emit("cache:salesMetrics:refresh:error", {
error,
durationMs: Date.now() - startedAt,
});
throw error;
}
})().finally(() => {
salesMetricsRefreshInFlight = null;
});
return salesMetricsRefreshInFlight;
}
export async function getSalesOpportunityMetricsAll(): Promise<SalesMetricsCacheEnvelope | null> {
const raw = await redis.get(ALL_MEMBERS_KEY);
if (!raw) return null;
try {
return JSON.parse(raw) as SalesMetricsCacheEnvelope;
} catch {
return null;
}
}
export async function getSalesOpportunityMetricsForMember(
identifier: string,
): Promise<MemberSalesMetrics | null> {
const normalized = identifier.trim().toLowerCase();
if (!normalized) return null;
const raw = await redis.get(memberKey(normalized));
if (raw) {
try {
return JSON.parse(raw) as MemberSalesMetrics;
} catch {
return null;
}
}
const all = await getSalesOpportunityMetricsAll();
if (!all) return null;
return all.members[normalized] ?? null;
}
@@ -0,0 +1,498 @@
/**
* Catalog Categories & Ecosystems
*
* This module defines the complete category/subcategory hierarchy and
* ecosystem decision trees used for product filtering in the UI.
*
* --- Terminology ---
*
* Category: Top-level CW category (e.g. "Technology", "Field", "General").
* A category is NEVER a subcategory.
*
* Subcategory: The CW subcategory name stored on each catalog item.
* At the second level of the tree, if there are no children
* beneath it then the node name IS the subcategory.
* If children exist, the second-level node is an *umbrella*
* that groups related subcategories — the children are the
* actual subcategory names.
*
* Ecosystem: A cross-cutting product grouping defined by manufacturer +
* category + subcategory-prefix rules. Ecosystems let the UI
* present a "Networking" or "Video Surveillance" view that
* spans manufacturers regardless of where CW filed them.
*
* --- Data shapes ---
*
* SubcategoryNode a leaf: `{ name, cwId? }`
* CategoryGroup an umbrella with children: `{ name, children[] }`
* CategoryEntry either a leaf OR a group at the 2nd level
* TopLevelCategory `{ name, cwId?, entries[] }`
*
* The `CATEGORY_TREE` export is the single source of truth; helpers derive
* flat lists, lookup maps, and search predicates from it.
*/
// ─── Data types ──────────────────────────────────────────────────────────────
export interface SubcategoryNode {
/** The exact CW subcategory name */
name: string;
/** CW subcategory id (optional, for reference) */
cwId?: number;
}
export interface CategoryGroup {
/** Display name of the umbrella (e.g. "Network", "Cables", "AlarmBurg") */
name: string;
/** The subcategories that belong to this umbrella */
children: SubcategoryNode[];
}
/** A second-level entry is either a direct subcategory or an umbrella group */
export type CategoryEntry = SubcategoryNode | CategoryGroup;
export interface TopLevelCategory {
/** The CW category name */
name: string;
/** CW category id (optional, for reference) */
cwId?: number;
/** Second-level entries under this category */
entries: CategoryEntry[];
}
/** Helper type guard */
export function isCategoryGroup(entry: CategoryEntry): entry is CategoryGroup {
return "children" in entry;
}
// ─── Ecosystem types ─────────────────────────────────────────────────────────
export interface EcosystemManufacturer {
/** Manufacturer name as stored in CW */
name: string;
/** CW manufacturer id */
cwId?: number;
/** Which CW category these products fall under */
category: string;
/** Subcategory prefix — matches any subcategory starting with this string */
subcategoryPrefix: string;
}
export interface Ecosystem {
/** Display name (e.g. "Networking", "Video Surveillance") */
name: string;
/** Manufacturers that belong to this ecosystem */
manufacturers: EcosystemManufacturer[];
}
// ─── Category Tree ───────────────────────────────────────────────────────────
export const CATEGORY_TREE: TopLevelCategory[] = [
{
name: "Technology",
cwId: 18,
entries: [
{ name: "GeneralEquip", cwId: 57 },
{ name: "Home Entertainment", cwId: 114 },
{ name: "Monitor", cwId: 115 },
{ name: "Printers", cwId: 120 },
{ name: "Storage", cwId: 108 },
{
name: "Network",
children: [
{ name: "Network-Other", cwId: 174 },
{ name: "Network-Router", cwId: 119 },
{ name: "Network-Switch", cwId: 112 },
{ name: "Network-Wireless", cwId: 111 },
],
},
{
name: "Computer",
children: [
{ name: "Computer-Components", cwId: 109 },
{ name: "Computer-Desktop", cwId: 106 },
{ name: "Computer-Laptop", cwId: 107 },
],
},
{
name: "Recurring",
children: [
{ name: "Recurring - Online", cwId: 83 },
{ name: "Recurring - Other", cwId: 84 },
{ name: "Recurring - Protection", cwId: 81 },
{ name: "Recurring - Telephone", cwId: 133 },
],
},
{
name: "Telephone",
children: [
{ name: "Tele-HSet-Digital", cwId: 116 },
{ name: "Tele-HSet-IP", cwId: 206 },
{ name: "Tele-HSet-SLT" },
{ name: "Tele-Misc", cwId: 75 },
{ name: "Tele-Paging", cwId: 76 },
{ name: "Tele-SystemCards", cwId: 135 },
{ name: "Tele-Systems", cwId: 78 },
],
},
],
},
{
name: "General",
cwId: 25,
entries: [
{ name: "Batteries", cwId: 80 },
{ name: "Battery Backups", cwId: 144 },
{ name: "BulkWire", cwId: 200 },
{
name: "Cables",
children: [
{ name: "Cables-Adapters", cwId: 182 },
{ name: "Cables-HDMI", cwId: 176 },
{ name: "Cables-Network", cwId: 87 },
{ name: "Cables-Other", cwId: 177 },
{ name: "Cables-USB", cwId: 178 },
{ name: "Cables-VGA", cwId: 179 },
],
},
{ name: "Elec Cords & Adapters", cwId: 142 },
{ name: "Enclosures", cwId: 141 },
{ name: "PowerSupply", cwId: 167 },
{
name: "RackEquip",
children: [
{ name: "RackEquip-Rack", cwId: 143 },
{ name: "RackEquip-Shelves", cwId: 190 },
],
},
],
},
{
name: "Field",
cwId: 28,
entries: [
{ name: "Conduit" },
{ name: "Electric", cwId: 199 },
{ name: "GateControl", cwId: 45 },
{ name: "Locksets" },
{ name: "Other", cwId: 46 },
{ name: "Relays", cwId: 168 },
{
name: "AccessControl",
children: [
{ name: "AccessControl-Controllers", cwId: 137 },
{ name: "AccessControl-Credential", cwId: 183 },
{ name: "AccessControl-LockDevices", cwId: 138 },
{ name: "AccessControl-Other", cwId: 44 },
{ name: "AccessControl-Readers", cwId: 136 },
{ name: "AccessControl-VideoEntry", cwId: 139 },
],
},
{
name: "AlarmBurg",
children: [
{ name: "AlarmBurg-Communicators", cwId: 96 },
{ name: "AlarmBurg-Keypads", cwId: 93 },
{ name: "AlarmBurg-Modules", cwId: 140 },
{ name: "AlarmBurg-Other", cwId: 92 },
{ name: "AlarmBurg-Panels", cwId: 42 },
{ name: "AlarmBurg-Sensors-Wireless", cwId: 147 },
{ name: "AlarmBurg-Sensors-Wired", cwId: 146 },
{ name: "AlarmBurg-Siren", cwId: 145 },
],
},
{
name: "AlarmFire",
children: [
{ name: "AlarmFire-Communicators", cwId: 97 },
{ name: "AlarmFire-Devices", cwId: 169 },
{ name: "AlarmFire-Modules", cwId: 170 },
{ name: "AlarmFire-Other", cwId: 98 },
{ name: "AlarmFire-Panels", cwId: 95 },
{ name: "AlarmFire-Sensors", cwId: 94 },
],
},
{
name: "Automation",
children: [
{ name: "Automation-General", cwId: 99 },
{ name: "Automation-HVAC", cwId: 181 },
{ name: "Automation-Lights", cwId: 180 },
{ name: "Automation-Locks", cwId: 192 },
{ name: "Automation-Thermostat" },
],
},
{
name: "AV",
children: [
{ name: "AV-Adapters&Cables", cwId: 171 },
{ name: "AV-Components", cwId: 172 },
{ name: "AV-Mounts", cwId: 191 },
{ name: "AV-Other", cwId: 184 },
{ name: "AV-Speakers", cwId: 173 },
{ name: "AV-Television", cwId: 175 },
],
},
{
name: "StrCbl",
children: [
{ name: "StrCbl-Jacks", cwId: 186 },
{ name: "StrCbl-PatchPanel", cwId: 187 },
{ name: "StrCbl-Plates", cwId: 185 },
],
},
{
name: "Surveillance",
children: [
{ name: "Surveillance-Accs", cwId: 90 },
{ name: "Surveillance-CamerasAnalog", cwId: 89 },
{ name: "Surveillance-CamerasIP", cwId: 88 },
{ name: "Surveillance-NVR", cwId: 43 },
],
},
],
},
];
// ─── Ecosystem Tree ──────────────────────────────────────────────────────────
export const ECOSYSTEM_TREE: Ecosystem[] = [
{
name: "Networking",
manufacturers: [
{
name: "Ubiquiti",
cwId: 248,
category: "Technology",
subcategoryPrefix: "Network-",
},
{
name: "TP-Link",
cwId: 259,
category: "Technology",
subcategoryPrefix: "Network-",
},
],
},
{
name: "Video Surveillance",
manufacturers: [
{
name: "Uniview",
cwId: 239,
category: "Field",
subcategoryPrefix: "Surveillance-",
},
{
name: "Hikvision",
cwId: 299,
category: "Field",
subcategoryPrefix: "Surveillance-",
},
{
name: "Alarm.com",
cwId: 294,
category: "Field",
subcategoryPrefix: "Surveillance-",
},
],
},
{
name: "Burg/Alarm",
manufacturers: [
{
name: "Qolsys",
cwId: 376,
category: "Field",
subcategoryPrefix: "AlarmBurg-",
},
{
name: "DSC",
cwId: 287,
category: "Field",
subcategoryPrefix: "AlarmBurg-",
},
],
},
];
// ─── Derived helpers ─────────────────────────────────────────────────────────
/**
* Returns a flat list of all subcategory names under a given category.
*/
export function getSubcategoriesForCategory(categoryName: string): string[] {
const category = CATEGORY_TREE.find((c) => c.name === categoryName);
if (!category) return [];
const subcats: string[] = [];
for (const entry of category.entries) {
if (isCategoryGroup(entry)) {
for (const child of entry.children) {
subcats.push(child.name);
}
} else {
subcats.push(entry.name);
}
}
return subcats;
}
/**
* Returns all subcategory names under a given umbrella group within a category.
* e.g. getSubcategoriesForGroup("Field", "AlarmBurg") → ["AlarmBurg-Communicators", ...]
*/
export function getSubcategoriesForGroup(
categoryName: string,
groupName: string,
): string[] {
const category = CATEGORY_TREE.find((c) => c.name === categoryName);
if (!category) return [];
const group = category.entries.find(
(e) => isCategoryGroup(e) && e.name === groupName,
);
if (!group || !isCategoryGroup(group)) return [];
return group.children.map((c) => c.name);
}
/**
* Returns all top-level category names.
*/
export function getCategoryNames(): string[] {
return CATEGORY_TREE.map((c) => c.name);
}
/**
* Returns the umbrella group name for a given subcategory, or null if it's a
* direct entry (not under an umbrella).
*/
export function getGroupForSubcategory(
subcategoryName: string,
): { category: string; group: string } | null {
for (const cat of CATEGORY_TREE) {
for (const entry of cat.entries) {
if (isCategoryGroup(entry)) {
if (entry.children.some((c) => c.name === subcategoryName)) {
return { category: cat.name, group: entry.name };
}
}
}
}
return null;
}
/**
* Returns the full tree serialized for the API / UI consumption.
* Each top-level category includes its entries, with umbrella groups
* expanded to show children.
*/
export function serializeCategoryTree() {
return CATEGORY_TREE.map((cat) => ({
name: cat.name,
cwId: cat.cwId ?? null,
entries: cat.entries.map((entry) => {
if (isCategoryGroup(entry)) {
return {
type: "group" as const,
name: entry.name,
subcategories: entry.children.map((c) => ({
name: c.name,
cwId: c.cwId ?? null,
})),
};
}
return {
type: "subcategory" as const,
name: entry.name,
cwId: (entry as SubcategoryNode).cwId ?? null,
};
}),
}));
}
/**
* Returns the ecosystem tree serialized for the API / UI consumption.
*/
export function serializeEcosystemTree() {
return ECOSYSTEM_TREE.map((eco) => ({
name: eco.name,
manufacturers: eco.manufacturers.map((m) => ({
name: m.name,
cwId: m.cwId ?? null,
category: m.category,
subcategoryPrefix: m.subcategoryPrefix,
})),
}));
}
/**
* Returns a flat list of every known subcategory name across all categories.
*/
export function getAllSubcategoryNames(): string[] {
const names: string[] = [];
for (const cat of CATEGORY_TREE) {
for (const entry of cat.entries) {
if (isCategoryGroup(entry)) {
for (const child of entry.children) {
names.push(child.name);
}
} else {
names.push(entry.name);
}
}
}
return names;
}
/**
* Given a CW subcategory name, resolves which top-level category it belongs to.
*/
export function getCategoryForSubcategory(
subcategoryName: string,
): string | null {
for (const cat of CATEGORY_TREE) {
for (const entry of cat.entries) {
if (isCategoryGroup(entry)) {
if (entry.children.some((c) => c.name === subcategoryName)) {
return cat.name;
}
} else if (entry.name === subcategoryName) {
return cat.name;
}
}
}
return null;
}
/**
* Given a CW manufacturer name, returns which ecosystems it belongs to.
*/
export function getEcosystemsForManufacturer(
manufacturerName: string,
): string[] {
return ECOSYSTEM_TREE.filter((eco) =>
eco.manufacturers.some(
(m) => m.name.toLowerCase() === manufacturerName.toLowerCase(),
),
).map((eco) => eco.name);
}
/**
* Checks if a catalog item (by manufacturer + subcategory) matches a given ecosystem.
*/
export function matchesEcosystem(
ecosystemName: string,
manufacturer: string | null,
subcategory: string | null,
): boolean {
const eco = ECOSYSTEM_TREE.find((e) => e.name === ecosystemName);
if (!eco) return false;
return eco.manufacturers.some(
(m) =>
m.name.toLowerCase() === (manufacturer ?? "").toLowerCase() &&
(subcategory ?? "").startsWith(m.subcategoryPrefix),
);
}
@@ -0,0 +1,103 @@
import { collectorSocket } from "../../constants";
export type CollectorQueryOptions = {
select?: string[];
include?: string[];
[key: string]: unknown;
};
type CollectorSuccessResponse<T> = {
success: true;
data: T;
};
type CollectorErrorResponse = {
success: false;
error: string;
};
type CollectorResponse<T> =
| CollectorSuccessResponse<T>
| CollectorErrorResponse;
const DEFAULT_ACK_TIMEOUT_MS = Number(
Bun.env.COLLECTOR_ACK_TIMEOUT_MS ?? "15000",
);
const DEFAULT_CONNECT_TIMEOUT_MS = Number(
Bun.env.COLLECTOR_CONNECT_TIMEOUT_MS ?? "5000",
);
const ensureCollectorConnected = async (
timeoutMs = DEFAULT_CONNECT_TIMEOUT_MS,
): Promise<void> => {
if (collectorSocket.connected) {
return;
}
collectorSocket.connect();
await new Promise<void>((resolve, reject) => {
const timeout = setTimeout(() => {
cleanup();
reject(new Error("Collector socket connection timeout"));
}, timeoutMs);
const onConnect = () => {
cleanup();
resolve();
};
const onConnectError = (err: Error) => {
cleanup();
reject(err);
};
const cleanup = () => {
clearTimeout(timeout);
collectorSocket.off("connect", onConnect);
collectorSocket.off("connect_error", onConnectError);
};
collectorSocket.on("connect", onConnect);
collectorSocket.on("connect_error", onConnectError);
});
};
export const runCollector = async <T = unknown>(
collector: string,
opts?: CollectorQueryOptions,
): Promise<T> => {
await ensureCollectorConnected();
const response = await new Promise<CollectorResponse<T>>(
(resolve, reject) => {
collectorSocket
.timeout(DEFAULT_ACK_TIMEOUT_MS)
.emit(
collector,
opts,
(err: Error | null, payload?: CollectorResponse<T>) => {
if (err) {
reject(err);
return;
}
if (!payload) {
reject(
new Error(`Collector '${collector}' returned an empty payload`),
);
return;
}
resolve(payload);
},
);
},
);
if (!response.success) {
throw new Error(`Collector '${collector}' failed: ${response.error}`);
}
return response.data;
};
@@ -0,0 +1,24 @@
export enum ValueType {
PLAIN_TEXT = "plain_text",
LICENSE_KEY = "license_key",
IP_ADDRESS = "ip_address",
GENERIC_SECRET = "generic_secret",
BITLOCKER_KEY = "bitlocker_key",
PASSWORD = "password",
MULTI_CREDENTIAL = "multi_credential",
}
export interface CredentialTypeField {
id: string; // I.e. "clientId", "clientSecret", etc.
name: string; // I.e. "Client ID", "Client Secret", etc.
required: boolean;
subFields?: CredentialTypeField[]; // For multi-credential fields, defines the sub-fields that are required
secure: boolean; // Whether this field should be stored encrypted in the database
valueType: ValueType; // For future extensibility, currently all fields are strings
}
export interface CredentialField {
fieldId: string; // I.e. "clientId", "clientSecret", etc.
value: string; // Encrypted value stored in the database
subCredentials?: string[]; // For multi-credential fields, the IDs of the sub-credentials that are associated with this field
}
@@ -0,0 +1,75 @@
import { Collection } from "@discordjs/collection";
import {
CredentialField,
CredentialTypeField,
ValueType,
} from "./credentialTypeDefs";
import GenericError from "../../Errors/GenericError";
export interface ValidatedField {
fieldId: string;
value: string;
secure: boolean;
isMultiCredential?: boolean;
subCredentials?: string[];
}
/**
* Field Validator
*
* This method will take a record of the fields being submitted and compare them against a record of the acceptable fields
* for a credential type. If any of the submitted fields do not match an acceptable field, an error will be thrown.
*
* If all the credentials pass, it will return a processed version of the submitted fields including fields that need to be
* stored securely (encrypted) and fields that do not.
*
* Multi-credential fields are handled specially — they don't carry a direct value but instead
* reference sub-credential IDs.
*
* @param fields - The fields in object form that are being submitted.
* @param acceptableFields - The acceptable field to be compared against.
*/
export const fieldValidator = async (
fields: CredentialField[],
acceptableFields: CredentialTypeField[],
): Promise<ValidatedField[]> => {
const afCollection = new Collection(acceptableFields.map((f) => [f.id, f]));
await Promise.all(
fields.map(async (field) => {
const matchingField = afCollection.get(field.fieldId);
if (!matchingField) {
throw new GenericError({
message: `Invalid field ID: ${field.fieldId}`,
name: "InvalidCredentialField",
cause: `No acceptable field with ID '${field.fieldId}' found.`,
status: 400,
});
}
return;
}),
);
return fields.map((field) => {
const matchingField = afCollection.get(field.fieldId)!;
// Multi-credential fields don't carry a direct value;
// they reference sub-credential IDs instead.
if (matchingField.valueType === ValueType.MULTI_CREDENTIAL) {
return {
fieldId: field.fieldId,
value: "",
secure: false,
isMultiCredential: true,
subCredentials: field.subCredentials ?? [],
};
}
return {
fieldId: field.fieldId,
value: field.value,
secure: matchingField.secure,
};
});
};
@@ -0,0 +1,27 @@
import Password from "../tools/Password";
import crypto from "crypto";
import { secureValuesPublicKey } from "../../constants";
export const generateSecureValue = (content: string) => {
// Generate a hash of the content
const hash = Password.hash(content);
// Parse the PEM key into a proper KeyObject
const publicKey = crypto.createPublicKey(secureValuesPublicKey);
// Encrypt the content using the .secureValues.pub public key
const encrypted = crypto.publicEncrypt(
{
key: publicKey,
padding: crypto.constants.RSA_PKCS1_OAEP_PADDING,
oaepHash: "sha256",
},
Buffer.from(content, "utf-8"),
);
// Return the encrypted content and the hash for storage
return {
encrypted: encrypted.toString("base64"),
hash,
};
};
@@ -0,0 +1,46 @@
import Password from "../tools/Password";
import crypto from "crypto";
import { secureValuesPrivateKey } from "../../constants";
import GenericError from "../../Errors/GenericError";
const privateKey = crypto.createPrivateKey(secureValuesPrivateKey);
export const readSecureValue = (
encryptedContent: string,
hash?: string,
): string => {
let decrypted: Buffer;
try {
// Decrypt the content using the .secureValues.key private key
decrypted = crypto.privateDecrypt(
{
key: privateKey,
padding: crypto.constants.RSA_PKCS1_OAEP_PADDING,
oaepHash: "sha256",
},
Buffer.from(encryptedContent, "base64"),
);
} catch {
throw new GenericError({
name: "SecureValueDecryptionError",
message:
"Unable to decrypt secure value. The value was encrypted with a different key and must be re-entered.",
cause:
"RSA key mismatch — the current private key does not match the public key used to encrypt this value.",
status: 422,
});
}
const content = decrypted.toString("utf-8");
// Optionally validate the hash if provided
if (hash) {
const isValid = Password.validate(content, hash);
if (!isValid) {
throw new Error("Secure value hash validation failed");
}
}
return content;
};
@@ -0,0 +1,186 @@
import { Collection } from "@discordjs/collection";
import { connectWiseApi } from "../../../constants";
import {
CWActivity,
CWActivitySummary,
CWCreateActivity,
CWPatchOperation,
} from "./activity.types";
export const activityCw = {
/**
* Count Activities
*
* Returns the total number of activities in ConnectWise.
* Optionally accepts CW conditions string for filtered counts.
*/
countItems: async (conditions?: string): Promise<number> => {
const query = conditions
? `/sales/activities/count?conditions=${encodeURIComponent(conditions)}`
: "/sales/activities/count";
const response = await connectWiseApi.get(query);
return response.data.count;
},
/**
* Fetch All Activity Summaries
*
* Lightweight fetch returning only id and _info (for lastUpdated comparison).
* Paginates through all activities.
*/
fetchAllSummaries: async (): Promise<
Collection<number, CWActivitySummary>
> => {
const allItems = new Collection<number, CWActivitySummary>();
const pageSize = 1000;
const count = await activityCw.countItems();
const totalPages = Math.ceil(count / pageSize);
for (let page = 0; page < totalPages; page++) {
const response = await connectWiseApi.get(
`/sales/activities?page=${page + 1}&pageSize=${pageSize}&fields=id,_info`,
);
const items: CWActivitySummary[] = response.data;
for (const item of items) {
allItems.set(item.id, item);
}
}
return allItems;
},
/**
* Fetch All Activities (Full)
*
* Fetches all activities with complete data. Paginates through
* the full list. Optionally accepts CW conditions string for filtering.
*/
fetchAll: async (
conditions?: string,
): Promise<Collection<number, CWActivity>> => {
const allItems = new Collection<number, CWActivity>();
const pageSize = 1000;
const count = await activityCw.countItems(conditions);
const totalPages = Math.ceil(count / pageSize);
for (let page = 0; page < totalPages; page++) {
const conditionsParam = conditions
? `&conditions=${encodeURIComponent(conditions)}`
: "";
const response = await connectWiseApi.get(
`/sales/activities?page=${page + 1}&pageSize=${pageSize}${conditionsParam}`,
);
const items: CWActivity[] = response.data;
for (const item of items) {
allItems.set(item.id, item);
}
}
return allItems;
},
/**
* Fetch Single Activity
*
* Fetches a single activity by its ConnectWise ID.
*/
fetch: async (id: number): Promise<CWActivity> => {
const response = await connectWiseApi.get(`/sales/activities/${id}`);
return response.data;
},
/**
* Fetch Activities by Company
*
* Fetches all activities associated with a specific ConnectWise company ID.
*/
fetchByCompany: async (
cwCompanyId: number,
): Promise<Collection<number, CWActivity>> => {
return activityCw.fetchAll(`company/id=${cwCompanyId}`);
},
/**
* Fetch Activities by Opportunity
*
* Fetches all activities associated with a specific opportunity ID.
*/
fetchByOpportunity: async (
opportunityId: number,
): Promise<Collection<number, CWActivity>> => {
return activityCw.fetchAll(`opportunity/id=${opportunityId}`);
},
/**
* Fetch Activities by Opportunity (Direct)
*
* Lightweight single-call variant that skips the count request.
* Fetches up to 1000 activities in a single GET — sufficient for
* virtually all opportunities. Used by the background cache refresh
* to avoid doubling CW API calls.
*/
fetchByOpportunityDirect: async (
opportunityId: number,
): Promise<CWActivity[]> => {
const conditions = encodeURIComponent(`opportunity/id=${opportunityId}`);
const response = await connectWiseApi.get(
`/sales/activities?pageSize=1000&conditions=${conditions}`,
);
return response.data;
},
/**
* Create Activity
*
* Creates a new activity in ConnectWise.
*/
create: async (activity: CWCreateActivity): Promise<CWActivity> => {
const response = await connectWiseApi.post("/sales/activities", activity);
return response.data;
},
/**
* Update Activity (PATCH)
*
* Updates an existing activity using JSON Patch operations.
*/
update: async (
id: number,
operations: CWPatchOperation[],
): Promise<CWActivity> => {
const response = await connectWiseApi.patch(
`/sales/activities/${id}`,
operations,
);
return response.data;
},
/**
* Replace Activity (PUT)
*
* Replaces an entire activity record in ConnectWise.
*/
replace: async (
id: number,
activity: CWCreateActivity,
): Promise<CWActivity> => {
const response = await connectWiseApi.put(
`/sales/activities/${id}`,
activity,
);
return response.data;
},
/**
* Delete Activity
*
* Deletes an activity by its ConnectWise ID.
*/
delete: async (id: number): Promise<void> => {
await connectWiseApi.delete(`/sales/activities/${id}`);
},
};
@@ -0,0 +1,123 @@
interface CWReference {
id: number;
name: string;
_info?: Record<string, string>;
}
interface CWMemberReference {
id: number;
identifier: string;
name: string;
_info?: Record<string, string>;
}
interface CWCompanyReference {
id: number;
identifier: string;
name: string;
_info?: Record<string, string>;
}
interface CWContactReference {
id: number;
name: string;
_info?: Record<string, string>;
}
export interface CWActivity {
id: number;
name: string;
type: CWReference;
company: CWCompanyReference;
contact: CWContactReference;
phoneNumber: string;
email: string;
status: CWReference;
opportunity: CWReference;
ticket: CWReference;
agreement: CWReference;
campaign: CWReference;
notes: string;
dateStart: string;
dateEnd: string;
assignTo: CWMemberReference;
scheduleStatus: CWReference;
reminder: CWReference;
where: CWReference;
notifyFlag: boolean;
mobileGuid: string;
currency: CWReference;
customFields: CWActivityCustomField[];
_info: CWActivityInfo;
}
export interface CWActivityCustomField {
id: number;
caption: string;
type: string;
entryMethod: string;
numberOfDecimals: number;
value: unknown;
}
export interface CWActivityInfo {
lastUpdated: string;
updatedBy: string;
dateEntered: string;
enteredBy: string;
}
export interface CWActivitySummary {
id: number;
_info?: Record<string, string>;
}
export interface CWCreateActivity {
name: string;
type?: { id: number };
company?: { id: number };
contact?: { id: number };
phoneNumber?: string;
email?: string;
status?: { id: number };
opportunity?: { id: number };
ticket?: { id: number };
agreement?: { id: number };
campaign?: { id: number };
notes?: string;
dateStart?: string;
dateEnd?: string;
assignTo?: { id: number };
scheduleStatus?: { id: number };
reminder?: { id: number };
where?: { id: number };
notifyFlag?: boolean;
}
export interface CWUpdateActivity {
name?: string;
type?: { id: number };
company?: { id: number };
contact?: { id: number };
phoneNumber?: string;
email?: string;
status?: { id: number };
opportunity?: { id: number };
ticket?: { id: number };
agreement?: { id: number };
campaign?: { id: number };
notes?: string;
dateStart?: string;
dateEnd?: string;
assignTo?: { id: number };
scheduleStatus?: { id: number };
reminder?: { id: number };
where?: { id: number };
notifyFlag?: boolean;
}
export interface CWPatchOperation {
op: "replace" | "add" | "remove";
path: string;
value: unknown;
}
@@ -0,0 +1,27 @@
import GenericError from "../../../Errors/GenericError";
import { activityCw } from "./activities";
import { CWActivity, CWCreateActivity } from "./activity.types";
/**
* Create a new activity in ConnectWise.
*
* @param activity - The activity data to create
* @returns The newly created CW activity object
* @throws GenericError if the creation fails
*/
export const createActivity = async (
activity: CWCreateActivity,
): Promise<CWActivity> => {
try {
return await activityCw.create(activity);
} catch (error) {
const errBody = (error as any).response?.data || error;
console.error("Error creating activity:", errBody);
throw new GenericError({
name: "CreateActivityError",
message: "Failed to create activity in ConnectWise",
cause: typeof errBody === "string" ? errBody : JSON.stringify(errBody),
status: 502,
});
}
};
@@ -0,0 +1,27 @@
import GenericError from "../../../Errors/GenericError";
import { activityCw } from "./activities";
import { CWActivity } from "./activity.types";
/**
* Fetch a single activity by its ConnectWise ID.
*
* @param cwActivityId - The ConnectWise activity ID
* @returns The full CW activity object
* @throws GenericError if the fetch fails
*/
export const fetchActivity = async (
cwActivityId: number,
): Promise<CWActivity> => {
try {
return await activityCw.fetch(cwActivityId);
} catch (error) {
const errBody = (error as any).response?.data || error;
console.error(`Error fetching activity with ID ${cwActivityId}:`, errBody);
throw new GenericError({
name: "FetchActivityError",
message: `Failed to fetch activity ${cwActivityId}`,
cause: typeof errBody === "string" ? errBody : JSON.stringify(errBody),
status: 502,
});
}
};
@@ -0,0 +1,28 @@
import { Collection } from "@discordjs/collection";
import GenericError from "../../../Errors/GenericError";
import { activityCw } from "./activities";
import { CWActivity } from "./activity.types";
/**
* Fetch all activities from ConnectWise with optional conditions.
*
* @param conditions - Optional CW conditions string for filtering
* @returns A Collection of CW activities keyed by their ID
* @throws GenericError if the fetch fails
*/
export const fetchAllActivities = async (
conditions?: string,
): Promise<Collection<number, CWActivity>> => {
try {
return await activityCw.fetchAll(conditions);
} catch (error) {
const errBody = (error as any).response?.data || error;
console.error("Error fetching all activities:", errBody);
throw new GenericError({
name: "FetchAllActivitiesError",
message: "Failed to fetch activities from ConnectWise",
cause: typeof errBody === "string" ? errBody : JSON.stringify(errBody),
status: 502,
});
}
};
@@ -0,0 +1,15 @@
export { activityCw } from "./activities";
export { fetchActivity } from "./fetchActivity";
export { fetchAllActivities } from "./fetchAllActivities";
export { createActivity } from "./createActivity";
export { updateActivity } from "./updateActivity";
export type {
CWActivity,
CWActivitySummary,
CWActivityCustomField,
CWActivityInfo,
CWCreateActivity,
CWUpdateActivity,
CWPatchOperation,
} from "./activity.types";
@@ -0,0 +1,29 @@
import GenericError from "../../../Errors/GenericError";
import { activityCw } from "./activities";
import { CWActivity, CWPatchOperation } from "./activity.types";
/**
* Update an existing activity in ConnectWise using JSON Patch operations.
*
* @param cwActivityId - The ConnectWise activity ID to update
* @param operations - Array of JSON Patch operations to apply
* @returns The updated CW activity object
* @throws GenericError if the update fails
*/
export const updateActivity = async (
cwActivityId: number,
operations: CWPatchOperation[],
): Promise<CWActivity> => {
try {
return await activityCw.update(cwActivityId, operations);
} catch (error) {
const errBody = (error as any).response?.data || error;
console.error(`Error updating activity with ID ${cwActivityId}:`, errBody);
throw new GenericError({
name: "UpdateActivityError",
message: `Failed to update activity ${cwActivityId}`,
cause: typeof errBody === "string" ? errBody : JSON.stringify(errBody),
status: 502,
});
}
};
@@ -0,0 +1,68 @@
import { Company } from "../../types/ConnectWiseTypes";
import {
CollectorCompanyRecord,
CompanySourceRecord,
NormalizedCompanyRecord,
} from "../../types/CompanySourceTypes";
export const isCollectorCompanyRecord = (
value: unknown,
): value is CollectorCompanyRecord => {
if (!value || typeof value !== "object") {
return false;
}
const candidate = value as Partial<CollectorCompanyRecord>;
return (
typeof candidate.companyRecId === "number" &&
"companyId" in candidate &&
"companyName" in candidate
);
};
const normalizeFromCollector = (
company: CollectorCompanyRecord,
): NormalizedCompanyRecord | null => {
if (!company.companyId || !company.companyName) {
return null;
}
return {
id: company.companyRecId,
identifier: company.companyId,
name: company.companyName,
};
};
const normalizeFromCwApi = (
company: Company,
): NormalizedCompanyRecord | null => {
if (!company.identifier || !company.name) {
return null;
}
return {
id: company.id,
identifier: company.identifier,
name: company.name,
};
};
export const normalizeCompanyRecord = (
source: CompanySourceRecord,
): NormalizedCompanyRecord | null => {
if (isCollectorCompanyRecord(source)) {
return normalizeFromCollector(source);
}
return normalizeFromCwApi(source);
};
export const normalizeCompanyRecords = (
sourceRecords: CompanySourceRecord[],
): NormalizedCompanyRecord[] => {
return sourceRecords
.map(normalizeCompanyRecord)
.filter((company): company is NormalizedCompanyRecord => company !== null);
};
@@ -0,0 +1,30 @@
import { connectWiseApi } from "../../../constants";
import { ConfigurationResponse } from "../../../types/ConnectWiseTypes";
import {
processConfigurationResponse,
ProcessedConfiguration,
} from "./processConfigurationResponse";
import GenericError from "../../../Errors/GenericError";
export const fetchCompanyConfigurations = async (
cwCompanyId: number,
): Promise<ProcessedConfiguration> => {
try {
const response = await connectWiseApi.get(
`/company/configurations?conditions=company/id=${cwCompanyId}`,
);
return processConfigurationResponse(response.data);
} catch (error) {
const errBody = (error as any).response?.data || error;
console.error(
`Error fetching configurations for company ID ${cwCompanyId}:`,
errBody,
);
throw new GenericError({
name: "FetchCompanyConfigurationsError",
message: `Failed to fetch configurations for company ${cwCompanyId}`,
cause: typeof errBody === "string" ? errBody : JSON.stringify(errBody),
status: 502,
});
}
};
@@ -0,0 +1,29 @@
import { ConfigurationResponse } from "../../../types/ConnectWiseTypes";
export type ProcessedConfiguration = ReturnType<
typeof processConfigurationResponse
>;
export const processConfigurationResponse = (c: ConfigurationResponse) => {
return c.map((item) => ({
id: item.id,
name: item.name,
active: item.activeFlag,
serialNumber: item.serialNumber,
type: item.type,
notes: item.notes,
status: {
id: item.status.id,
name: item.status.name,
},
questions: !item.questions
? null
: item.questions.map((q) => ({
id: q.questionId,
question: q.question,
answer: q.answer,
fieldType: q.fieldType,
})),
info: item._info,
}));
};
+142
View File
@@ -0,0 +1,142 @@
/**
* @module cwApiLogger
*
* Axios interceptor-based logger that records every ConnectWise API
* request to a JSONL (newline-delimited JSON) file for post-hoc analysis.
*
* Each line in the log file is a self-contained JSON object with:
* - timestamp (ISO-8601)
* - method, url, baseURL
* - status (HTTP status or null on network error)
* - durationMs (wall-clock time from request start → response/error)
* - error (error code / message, if any)
* - timeout (configured timeout in ms)
*
* Logging is **opt-in** — set the `LOG_CW_API` environment variable to
* any truthy value to enable it. When enabled, each process start creates
* a new timestamped file inside the `cw-api-logs/` directory:
*
* LOG_CW_API=1 bun run dev # uses cw-api-logs/<timestamp>.jsonl
* bun run dev:log # shorthand (sets LOG_CW_API=1)
*
* Appends are non-blocking (fire-and-forget) to avoid slowing down
* the actual API flow.
*
* Usage:
* import { attachCwApiLogger } from "./modules/cw-utils/cwApiLogger";
* attachCwApiLogger(connectWiseApi);
*/
import { appendFile, mkdir } from "fs/promises";
import path from "path";
import type { AxiosInstance, InternalAxiosRequestConfig } from "axios";
const LOG_DIR = path.resolve(process.cwd(), "cw-api-logs");
/** Build a timestamped filename like `2026-03-02T14-30-05.123Z.jsonl` */
function buildLogPath(): string {
const ts = new Date().toISOString().replace(/:/g, "-");
return path.join(LOG_DIR, `${ts}.jsonl`);
}
let LOG_PATH: string | null = null;
// Symbol used to stash the start time on the request config
const START_TIME = Symbol("cwLogStartTime");
interface TimedConfig extends InternalAxiosRequestConfig {
[START_TIME]?: number;
}
export interface CwApiLogEntry {
timestamp: string;
method: string;
url: string;
baseURL: string;
status: number | null;
durationMs: number;
error: string | null;
timeout: number | undefined;
}
/** Write a single log entry (fire-and-forget). */
function writeEntry(entry: CwApiLogEntry): void {
if (!LOG_PATH) return;
appendFile(LOG_PATH, JSON.stringify(entry) + "\n").catch((err) => {
// Swallow write errors — logging should never crash the app
console.error("[cw-logger] failed to write log entry:", err.message);
});
}
/**
* Attach request/response interceptors to an Axios instance to log
* every CW API call with timing information.
*/
export function attachCwApiLogger(api: AxiosInstance): void {
if (!process.env.LOG_CW_API) {
return;
}
// Create the log directory and build a unique file path for this run
LOG_PATH = buildLogPath();
mkdir(LOG_DIR, { recursive: true }).catch((err) => {
console.error("[cw-logger] failed to create log directory:", err.message);
});
// ---- Request interceptor: record start time --------------------------
api.interceptors.request.use((config: TimedConfig) => {
config[START_TIME] = performance.now();
return config;
});
// ---- Response interceptor: log successful calls ----------------------
api.interceptors.response.use(
(response) => {
const config = response.config as TimedConfig;
const start = config[START_TIME] ?? performance.now();
const durationMs = Math.round(performance.now() - start);
writeEntry({
timestamp: new Date().toISOString(),
method: (config.method ?? "GET").toUpperCase(),
url: config.url ?? "",
baseURL: config.baseURL ?? "",
status: response.status,
durationMs,
error: null,
timeout: config.timeout,
});
return response;
},
// ---- Error interceptor: log failed calls -----------------------------
(err) => {
const config = (err.config ?? {}) as TimedConfig;
const start = config[START_TIME] ?? performance.now();
const durationMs = Math.round(performance.now() - start);
writeEntry({
timestamp: new Date().toISOString(),
method: (config.method ?? "GET").toUpperCase(),
url: config.url ?? "",
baseURL: config.baseURL ?? "",
status: err.response?.status ?? null,
durationMs,
error: err.code
? `${err.code}: ${err.message}`
: (err.message ?? "unknown"),
timeout: config.timeout,
});
return Promise.reject(err);
},
);
console.log(`[cw-logger] logging CW API calls to ${LOG_PATH}`);
}
/** Returns the current log file path (or null if logging is disabled). */
export function getCwLogPath(): string | null {
return LOG_PATH;
}
@@ -0,0 +1,79 @@
/**
* CW API Concurrency Limiter
*
* Limits the number of simultaneous in-flight requests to the ConnectWise
* API. CW responds significantly slower under high concurrency (observed
* ~3× slower at 9 concurrent vs 56 concurrent), so bounding the
* parallelism actually reduces total wall-clock time.
*
* Implemented as an Axios request interceptor that gates on a simple
* counting semaphore. When the limit is reached, new requests queue and
* resolve in FIFO order as earlier requests complete.
*/
import type { AxiosInstance, InternalAxiosRequestConfig } from "axios";
// ---------------------------------------------------------------------------
// Semaphore
// ---------------------------------------------------------------------------
class Semaphore {
private _current = 0;
private _queue: (() => void)[] = [];
constructor(private _max: number) {}
/** Acquire a slot — resolves immediately if under the limit, else waits. */
acquire(): Promise<void> {
if (this._current < this._max) {
this._current++;
return Promise.resolve();
}
return new Promise<void>((resolve) => {
this._queue.push(resolve);
});
}
/** Release a slot — wakes the next queued caller, if any. */
release(): void {
const next = this._queue.shift();
if (next) {
// Hand the slot directly to the next waiter (don't decrement)
next();
} else {
this._current--;
}
}
}
// ---------------------------------------------------------------------------
// Interceptor attachment
// ---------------------------------------------------------------------------
/**
* Attach a concurrency-limiting interceptor to an Axios instance.
*
* @param api - The Axios instance to limit.
* @param max - Maximum concurrent in-flight requests (default: 6).
*/
export function attachCwConcurrencyLimiter(api: AxiosInstance, max = 6): void {
const sem = new Semaphore(max);
// Request interceptor: wait for a slot before the request fires
api.interceptors.request.use(async (config: InternalAxiosRequestConfig) => {
await sem.acquire();
return config;
});
// Response interceptor: release the slot on success or failure
api.interceptors.response.use(
(response) => {
sem.release();
return response;
},
(error) => {
sem.release();
return Promise.reject(error);
},
);
}
@@ -0,0 +1,64 @@
import { Collection } from "@discordjs/collection";
import { connectWiseApi } from "../../constants";
import { runCollector } from "../collector-client/runCollector";
import {
CollectorCompanyRecord,
NormalizedCompanyRecord,
} from "../../types/CompanySourceTypes";
import { normalizeCompanyRecords } from "./companyTranslation";
const toCompanyCollection = (
companies: NormalizedCompanyRecord[],
): Collection<number, NormalizedCompanyRecord> => {
const allCompanies = new Collection<number, NormalizedCompanyRecord>();
for (const company of companies) {
allCompanies.set(company.id, company);
}
return allCompanies;
};
export const fetchAllCwCompanies = async (): Promise<
Collection<number, NormalizedCompanyRecord>
> => {
try {
console.log("[fetchAllCwCompanies] Attempting to fetch via collector...");
const collectorCompanies =
await runCollector<CollectorCompanyRecord[]>("fetchCompanies");
if (!Array.isArray(collectorCompanies)) {
throw new Error("Collector payload was not an array");
}
console.log(
`[fetchAllCwCompanies] ✓ Successfully used collector data (${collectorCompanies.length} companies)`,
);
return toCompanyCollection(normalizeCompanyRecords(collectorCompanies));
} catch (err) {
console.error(
`[fetchAllCwCompanies] ✗ Collector fetchCompanies failed, falling back to CW API:`,
err instanceof Error ? { message: err.message, stack: err.stack } : err,
);
}
let allCompanies = new Collection<number, NormalizedCompanyRecord>();
const pageCount = 1000;
const count = (await connectWiseApi.get("/company/companies/count")).data
.count;
const totalPages = Math.ceil(count / pageCount);
for (let page = 0; page < totalPages; page++) {
const response = await connectWiseApi.get(
`/company/companies?page=${page + 1}&pageSize=${pageCount}`,
);
const normalizedCompanies = normalizeCompanyRecords(response.data);
for (const company of normalizedCompanies) {
allCompanies.set(company.id, company);
}
}
return allCompanies;
};
+25
View File
@@ -0,0 +1,25 @@
import { connectWiseApi } from "../../constants";
import { Company } from "../../types/ConnectWiseTypes";
import { withCwRetry } from "./withCwRetry";
export const fetchCwCompanyById = async (
companyId: number,
): Promise<Company | null> => {
try {
const response = await withCwRetry(
() => connectWiseApi.get(`/company/companies/${companyId}`),
{
label: `fetchCompany#${companyId}`,
maxAttempts: 3,
baseDelayMs: 1_500,
},
);
return response.data;
} catch (error) {
console.error(
`Error fetching company with ID ${companyId}:`,
(error as any).response?.data || error,
);
return null;
}
};
@@ -0,0 +1,148 @@
import { Collection } from "@discordjs/collection";
import { connectWiseApi } from "../../../constants";
import { runCollector } from "../../collector-client/runCollector";
export interface CWMember {
id: number;
identifier: string;
firstName: string;
lastName: string;
officeEmail: string;
inactiveFlag: boolean;
_info: Record<string, string>;
}
interface CollectorMemberRecord {
memberRecId: number;
memberId: string;
firstName: string | null;
lastName: string | null;
emailAddress: string | null;
deleteFlag: boolean;
lastUpdateUtc?: string | null;
lastUpdate?: string | null;
_info?: Record<string, string>;
}
const isCollectorMemberRecord = (
value: unknown,
): value is CollectorMemberRecord => {
if (!value || typeof value !== "object") {
return false;
}
const candidate = value as Partial<CollectorMemberRecord>;
return (
typeof candidate.memberRecId === "number" &&
typeof candidate.memberId === "string"
);
};
const normalizeCollectorMember = (
member: CollectorMemberRecord,
): CWMember => {
const updatedAt = member.lastUpdateUtc ?? member.lastUpdate ?? "";
return {
id: member.memberRecId,
identifier: member.memberId,
firstName: member.firstName ?? "",
lastName: member.lastName ?? "",
officeEmail: member.emailAddress ?? "",
inactiveFlag: Boolean(member.deleteFlag),
_info: member._info ?? { lastUpdated: updatedAt },
};
};
/**
* Fetch All CW Members
*
* Fetches every member from ConnectWise using pagination and returns them
* in a Collection keyed by their identifier (e.g. "jroberts").
*
* @param opts.conditions - Optional CW conditions string to filter members
* @returns {Promise<Collection<string, CWMember>>} Collection of CW members keyed by identifier
*/
export const fetchAllCwMembers = async (opts?: {
conditions?: string;
}): Promise<Collection<string, CWMember>> => {
if (!opts?.conditions) {
try {
const collectorMembers = await runCollector<unknown[]>("fetchMembers");
if (!Array.isArray(collectorMembers)) {
throw new Error("Collector payload was not an array");
}
const members = new Collection<string, CWMember>();
for (const member of collectorMembers) {
if (!isCollectorMemberRecord(member)) {
continue;
}
const normalized = normalizeCollectorMember(member);
members.set(normalized.identifier, normalized);
}
if (members.size > 0) {
console.log(
`[fetchAllCwMembers] Using collector data from fetchMembers (${members.size} members)`,
);
return members;
}
throw new Error("Collector payload did not contain valid member records");
} catch (err) {
console.warn(
`[fetchAllCwMembers] Collector fetchMembers failed, falling back to CW API: ${err instanceof Error ? err.message : String(err)}`,
);
}
}
const members = new Collection<string, CWMember>();
const pageSize = 1000;
const conditionsParam = opts?.conditions
? `&conditions=${encodeURIComponent(opts.conditions)}`
: "";
const { data: countData } = await connectWiseApi.get(
`/system/members/count${conditionsParam ? `?${conditionsParam.slice(1)}` : ""}`,
);
const totalPages = Math.ceil(countData.count / pageSize);
for (let page = 0; page < totalPages; page++) {
const { data } = await connectWiseApi.get<CWMember[]>(
`/system/members?page=${page + 1}&pageSize=${pageSize}${conditionsParam}`,
);
for (const member of data) {
members.set(member.identifier, member);
}
}
return members;
};
/**
* Find CW Member Identifier by Email
*
* Looks up a ConnectWise member whose `officeEmail` matches the provided
* email address (case-insensitive) and returns their `identifier` string
* (e.g. "jroberts"). Returns `null` if no match is found.
*
* @param email - The email address to search for
* @param members - Optional pre-fetched member collection to search against (avoids extra API call)
* @returns {Promise<string | null>} The CW identifier or null
*/
export const findCwIdentifierByEmail = async (
email: string,
members?: Collection<string, CWMember>,
): Promise<string | null> => {
const allMembers = members ?? (await fetchAllCwMembers());
const normalised = email.toLowerCase();
const match = allMembers.find(
(m) => m.officeEmail?.toLowerCase() === normalised,
);
return match?.identifier ?? null;
};
@@ -0,0 +1,141 @@
import { Collection } from "@discordjs/collection";
import { prisma } from "../../../constants";
import { redis } from "../../../constants";
import { CWMember } from "./fetchAllMembers";
const REDIS_KEY = "cw:members";
export interface ResolvedMember {
/** Local database user ID (null if no matching local user) */
id: string | null;
/** CW member identifier (e.g. "jroberts") */
identifier: string;
/** Full name resolved from CW member cache, or raw identifier as fallback */
name: string;
/** ConnectWise member ID */
cwMemberId: number | null;
}
/**
* CW Member Cache
*
* Dual-layer cache (in-memory + Redis) of ConnectWise members keyed by
* their identifier (e.g. "jroberts"). Populated by `refreshCwIdentifiers`
* on startup and every 30 minutes thereafter.
*/
let memberCache = new Collection<string, CWMember>();
/**
* Set the member cache contents.
*
* Replaces both the in-memory Collection and the Redis snapshot.
*
* @param members - Collection of CW members keyed by identifier
*/
export const setMemberCache = async (members: Collection<string, CWMember>) => {
memberCache = members;
await redis.set(REDIS_KEY, JSON.stringify([...members.values()]));
};
/**
* Get the current member cache.
*
* Returns the in-memory Collection. If empty, attempts to hydrate from Redis
* first. Returns whatever is available (may be empty if Redis is also cold).
*/
export const getMemberCache = async (): Promise<
Collection<string, CWMember>
> => {
if (memberCache.size > 0) return memberCache;
const stored = await redis.get(REDIS_KEY);
if (stored) {
const parsed: CWMember[] = JSON.parse(stored);
memberCache = new Collection(parsed.map((m) => [m.identifier, m]));
}
return memberCache;
};
/**
* Resolve CW Identifier to Full Name
*
* Looks up a ConnectWise member by their identifier in the in-memory cache
* and returns their full name. Falls back to the raw identifier if not found.
*
* @param identifier - The CW member identifier (e.g. "jroberts")
* @returns The member's full name (e.g. "John Roberts") or the raw identifier
*/
export const resolveMemberName = (identifier: string): string => {
const member = memberCache.get(identifier);
if (!member) return identifier;
return `${member.firstName} ${member.lastName}`.trim() || identifier;
};
/**
* Resolve CW Identifier to Full Member Info
*
* Looks up a ConnectWise member by their identifier in the in-memory cache
* and cross-references with the local database to return a complete member
* reference including local user ID, CW identifier, full name, and CW member ID.
*
* @param identifier - The CW member identifier (e.g. "jroberts")
* @returns {Promise<ResolvedMember>} Resolved member info
*/
export const resolveMember = async (
identifier: string,
): Promise<ResolvedMember> => {
const cwMember = memberCache.get(identifier);
const name = cwMember
? `${cwMember.firstName} ${cwMember.lastName}`.trim() || identifier
: identifier;
const localUser = await prisma.user.findFirst({
where: { cwIdentifier: identifier },
select: { id: true },
});
return {
id: localUser?.id ?? null,
identifier,
name,
cwMemberId: cwMember?.id ?? null,
};
};
/**
* Resolve Multiple CW Identifiers in a Single Batch
*
* Same as `resolveMember` but batches the DB query so that N identifiers
* require only **one** `findMany` instead of N `findFirst` calls.
*
* @param identifiers - Array of CW member identifiers
* @returns Map of identifier → ResolvedMember
*/
export const resolveMembers = async (
identifiers: string[],
): Promise<Map<string, ResolvedMember>> => {
const unique = [...new Set(identifiers)];
// Single batched DB query for all identifiers
const localUsers = await prisma.user.findMany({
where: { cwIdentifier: { in: unique } },
select: { id: true, cwIdentifier: true },
});
const userMap = new Map(localUsers.map((u) => [u.cwIdentifier, u.id]));
const result = new Map<string, ResolvedMember>();
for (const identifier of unique) {
const cwMember = memberCache.get(identifier);
const name = cwMember
? `${cwMember.firstName} ${cwMember.lastName}`.trim() || identifier
: identifier;
result.set(identifier, {
id: userMap.get(identifier) ?? null,
identifier,
name,
cwMemberId: cwMember?.id ?? null,
});
}
return result;
};
@@ -0,0 +1,46 @@
import { connectWiseApi, prisma } from "../../../constants";
import { events } from "../../globalEvents";
import { fetchAllCwMembers, findCwIdentifierByEmail } from "./fetchAllMembers";
import { setMemberCache } from "./memberCache";
/**
* Refresh CW Identifiers
*
* Fetches all CW members and all users from the database, then updates
* each user's `cwIdentifier` field by matching their email to a CW member's
* `officeEmail`. Only users whose identifier has changed (or was previously
* null) are updated to avoid unnecessary writes.
*
* Also refreshes the in-memory member cache used for name resolution.
*/
export const refreshCwIdentifiers = async () => {
events.emit("cw:members:refresh:started");
const allMembers = await fetchAllCwMembers();
await setMemberCache(allMembers);
const allUsers = await prisma.user.findMany({
select: { id: true, email: true, cwIdentifier: true },
});
let updatedCount = 0;
await Promise.all(
allUsers.map(async (user) => {
const identifier = await findCwIdentifierByEmail(user.email, allMembers);
if (identifier !== user.cwIdentifier) {
await prisma.user.update({
where: { id: user.id },
data: { cwIdentifier: identifier },
});
updatedCount++;
}
}),
);
events.emit("cw:members:refresh:completed", {
totalMembers: allMembers.size,
totalUsers: allUsers.length,
usersUpdated: updatedCount,
});
};
@@ -0,0 +1,106 @@
import { prisma } from "../../../constants";
import { events } from "../../globalEvents";
import { fetchAllCwMembers, type CWMember } from "./fetchAllMembers";
import { setMemberCache } from "./memberCache";
import { CwMemberController } from "../../../controllers/CwMemberController";
/**
* Is Regular User
*
* Returns true if the CW member looks like a real person rather than
* a service account (e.g. "labtech", "Admin"). A regular user must
* have a last name and an email address.
*/
const isRegularUser = (member: CWMember): boolean =>
!member.inactiveFlag &&
Boolean(member.lastName?.trim()) &&
Boolean(member.officeEmail?.trim());
/**
* Refresh CW Members
*
* Syncs local CwMember records with ConnectWise using a stale-check
* pattern:
* 1. Fetch all members from CW
* 2. Filter to regular users (active, non-service accounts)
* 3. Compare against local cwLastUpdated timestamps
* 4. Upsert stale/new records
* 5. Also refreshes the in-memory member cache
*/
export const refreshCwMembers = async () => {
events.emit("cw:members:db:refresh:check");
// 1. Fetch all members from CW
const allCwMembers = await fetchAllCwMembers();
// Also refresh the in-memory cache with ALL members (used for name resolution)
await setMemberCache(allCwMembers);
// 2. Filter to regular users only (active, has last name + email)
const cwMembers = allCwMembers.filter(isRegularUser);
// 2. Fetch all DB records with their identifier and cwLastUpdated
const dbItems = await prisma.cwMember.findMany({
select: { cwMemberId: true, cwLastUpdated: true },
});
const dbMap = new Map(
dbItems.map((item) => [item.cwMemberId, item.cwLastUpdated]),
);
// 3. Determine stale / new IDs
const staleIds: number[] = [];
for (const [, member] of cwMembers) {
const cwLastUpdated = member._info?.lastUpdated
? new Date(member._info.lastUpdated)
: null;
const dbLastUpdated = dbMap.get(member.id) ?? null;
if (!dbLastUpdated || (cwLastUpdated && cwLastUpdated > dbLastUpdated)) {
staleIds.push(member.id);
}
}
if (staleIds.length === 0) {
events.emit("cw:members:db:refresh:skipped", {
totalCw: cwMembers.size,
totalDb: dbItems.length,
staleCount: 0,
});
return;
}
events.emit("cw:members:db:refresh:started", {
totalCw: cwMembers.size,
totalDb: dbItems.length,
staleCount: staleIds.length,
});
// 4. Upsert stale/new items
const staleIdSet = new Set(staleIds);
const updatedCount = (
await Promise.all(
[...cwMembers.values()]
.filter((m) => staleIdSet.has(m.id))
.map(async (member) => {
const mapped = CwMemberController.mapCwToDb(member);
return prisma.cwMember.upsert({
where: { cwMemberId: member.id },
create: {
cwMemberId: member.id,
...mapped,
},
update: mapped,
});
}),
)
).filter(Boolean).length;
events.emit("cw:members:db:refresh:completed", {
totalCw: cwMembers.size,
totalDb: dbItems.length,
staleCount: staleIds.length,
itemsUpdated: updatedCount,
});
};
@@ -0,0 +1,28 @@
import { Collection } from "@discordjs/collection";
import GenericError from "../../../Errors/GenericError";
import { opportunityCw } from "./opportunities";
import { CWOpportunity } from "./opportunity.types";
/**
* Fetch all opportunities from ConnectWise with optional conditions.
*
* @param conditions - Optional CW conditions string for filtering
* @returns A Collection of CW opportunities keyed by their ID
* @throws GenericError if the fetch fails
*/
export const fetchAllOpportunities = async (
conditions?: string,
): Promise<Collection<number, CWOpportunity>> => {
try {
return await opportunityCw.fetchAll(conditions);
} catch (error) {
const errBody = (error as any).response?.data || error;
console.error("Error fetching all opportunities:", errBody);
throw new GenericError({
name: "FetchAllOpportunitiesError",
message: "Failed to fetch opportunities from ConnectWise",
cause: typeof errBody === "string" ? errBody : JSON.stringify(errBody),
status: 502,
});
}
};
@@ -0,0 +1,31 @@
import { Collection } from "@discordjs/collection";
import GenericError from "../../../Errors/GenericError";
import { opportunityCw } from "./opportunities";
import { CWOpportunity } from "./opportunity.types";
/**
* Fetch all opportunities for a specific company from ConnectWise.
*
* @param cwCompanyId - The ConnectWise company ID
* @returns A Collection of CW opportunities for the company keyed by their ID
* @throws GenericError if the fetch fails
*/
export const fetchCompanyOpportunities = async (
cwCompanyId: number,
): Promise<Collection<number, CWOpportunity>> => {
try {
return await opportunityCw.fetchByCompany(cwCompanyId);
} catch (error) {
const errBody = (error as any).response?.data || error;
console.error(
`Error fetching opportunities for company ${cwCompanyId}:`,
errBody,
);
throw new GenericError({
name: "FetchCompanyOpportunitiesError",
message: `Failed to fetch opportunities for company ${cwCompanyId}`,
cause: typeof errBody === "string" ? errBody : JSON.stringify(errBody),
status: 502,
});
}
};
@@ -0,0 +1,30 @@
import GenericError from "../../../Errors/GenericError";
import { opportunityCw } from "./opportunities";
import { CWOpportunity } from "./opportunity.types";
/**
* Fetch a single opportunity by its ConnectWise ID.
*
* @param cwOpportunityId - The ConnectWise opportunity ID
* @returns The full CW opportunity object
* @throws GenericError if the fetch fails
*/
export const fetchOpportunity = async (
cwOpportunityId: number,
): Promise<CWOpportunity> => {
try {
return await opportunityCw.fetch(cwOpportunityId);
} catch (error) {
const errBody = (error as any).response?.data || error;
console.error(
`Error fetching opportunity with ID ${cwOpportunityId}:`,
errBody,
);
throw new GenericError({
name: "FetchOpportunityError",
message: `Failed to fetch opportunity ${cwOpportunityId}`,
cause: typeof errBody === "string" ? errBody : JSON.stringify(errBody),
status: 502,
});
}
};
@@ -0,0 +1,558 @@
import { Collection } from "@discordjs/collection";
import { connectWiseApi } from "../../../constants";
import { runCollector } from "../../collector-client/runCollector";
import {
CWOpportunity,
CWOpportunityCreate,
CWOpportunitySummary,
CWForecast,
CWForecastItem,
CWForecastItemCreate,
CWProcurementProduct,
CWProcurementProductCreate,
CWOpportunityNote,
CWOpportunityNoteCreate,
CWOpportunityNoteUpdate,
CWOpportunityContact,
CWOpportunityUpdate,
} from "./opportunity.types";
export const opportunityCw = {
/**
* Count Opportunities
*
* Returns the total number of opportunities in ConnectWise.
* Optionally accepts CW conditions string for filtered counts.
*/
countItems: async (conditions?: string): Promise<number> => {
const query = conditions
? `/sales/opportunities/count?conditions=${encodeURIComponent(conditions)}`
: "/sales/opportunities/count";
const response = await connectWiseApi.get(query);
return response.data.count;
},
/**
* Fetch All Opportunity Summaries
*
* Lightweight fetch returning only id and _info (for lastUpdated comparison).
* Paginates through all opportunities.
*/
fetchAllSummaries: async (): Promise<
Collection<number, CWOpportunitySummary>
> => {
const allItems = new Collection<number, CWOpportunitySummary>();
const pageSize = 1000;
const count = await opportunityCw.countItems();
const totalPages = Math.ceil(count / pageSize);
for (let page = 0; page < totalPages; page++) {
const response = await connectWiseApi.get(
`/sales/opportunities?page=${page + 1}&pageSize=${pageSize}&fields=id,_info`,
);
const items: CWOpportunitySummary[] = response.data;
for (const item of items) {
allItems.set(item.id, item);
}
}
return allItems;
},
/**
* Fetch All Opportunities (Full)
*
* Fetches all opportunities with complete data. Paginates through
* the full list.
*/
fetchAll: async (
conditions?: string,
): Promise<Collection<number, CWOpportunity>> => {
const allItems = new Collection<number, CWOpportunity>();
const pageSize = 1000;
const count = await opportunityCw.countItems(conditions);
const totalPages = Math.ceil(count / pageSize);
for (let page = 0; page < totalPages; page++) {
const conditionsParam = conditions
? `&conditions=${encodeURIComponent(conditions)}`
: "";
const response = await connectWiseApi.get(
`/sales/opportunities?page=${page + 1}&pageSize=${pageSize}${conditionsParam}`,
);
const items: CWOpportunity[] = response.data;
for (const item of items) {
allItems.set(item.id, item);
}
}
return allItems;
},
/**
* Fetch Single Opportunity
*
* Fetches a single opportunity by its ConnectWise ID.
*/
fetch: async (id: number): Promise<CWOpportunity> => {
const response = await connectWiseApi.get(`/sales/opportunities/${id}`);
return response.data;
},
/**
* Create Opportunity
*
* Creates a new opportunity in ConnectWise via POST.
* Strips null/undefined values from the payload — CW rejects
* null reference objects on create; omitting them lets CW apply
* its own defaults.
*/
create: async (data: CWOpportunityCreate): Promise<CWOpportunity> => {
const cleaned = Object.fromEntries(
Object.entries(data).filter(([, v]) => v != null),
);
const response = await connectWiseApi.post("/sales/opportunities", cleaned);
return response.data;
},
/**
* Update Opportunity
*
* Applies a JSON Patch update to an opportunity record in ConnectWise.
* Each key in `data` produces a replace operation.
*/
update: async (
opportunityId: number,
data: CWOpportunityUpdate,
): Promise<CWOpportunity> => {
const operations = Object.entries(data).map(([key, value]) => ({
op: "replace" as const,
path: key,
value,
}));
const response = await connectWiseApi.patch(
`/sales/opportunities/${opportunityId}`,
operations,
);
return response.data;
},
/**
* Fetch Opportunities by Company
*
* Fetches all opportunities associated with a specific ConnectWise company ID.
*/
fetchByCompany: async (
cwCompanyId: number,
): Promise<Collection<number, CWOpportunity>> => {
return opportunityCw.fetchAll(`company/id=${cwCompanyId}`);
},
/**
* Fetch Opportunity Products
*
* Fetches the full forecast object (products, revenue summaries, totals)
* for a given opportunity.
*/
fetchProducts: async (opportunityId: number): Promise<CWForecast> => {
const response = await connectWiseApi.get(
`/sales/opportunities/${opportunityId}/forecast`,
);
return response.data;
},
/**
* Create Forecast Items
*
* Adds one or more forecast items (products) to an opportunity using
* POST. The CW forecast endpoint expects a Forecast object with a
* `forecastItems` array — we wrap just the new items inside that
* structure so existing items are never sent or touched.
*/
createProducts: async (
opportunityId: number,
data: CWForecastItemCreate | CWForecastItemCreate[],
): Promise<CWForecastItem[]> => {
const items_to_add = Array.isArray(data) ? data : [data];
const url = `/sales/opportunities/${opportunityId}/forecast`;
// 1. Fetch existing forecast to derive defaults & diff IDs later
const existing = await opportunityCw.fetchProducts(opportunityId);
const existingIds = new Set(
(existing.forecastItems ?? []).map((fi) => fi.id),
);
// Derive sensible defaults from an existing item when available
const templateItem = (existing.forecastItems ?? [])[0];
const defaultStatus = templateItem?.status
? { id: templateItem.status.id }
: { id: 1 };
const defaultForecastType = templateItem?.forecastType ?? "Product";
// 2. Build forecast items with required CW fields filled in
const forecastItems = items_to_add.map((newItem) => ({
opportunity: { id: opportunityId },
status: defaultStatus,
forecastType: defaultForecastType,
...(newItem as Record<string, unknown>),
}));
// 3. POST a Forecast wrapper containing only the new items
const response = await connectWiseApi.post(url, { forecastItems });
const updatedForecast: CWForecast = response.data;
// 4. Find newly-created item(s) by diffing IDs
const newItems = (updatedForecast.forecastItems ?? []).filter(
(fi) => !existingIds.has(fi.id),
);
// Fall back to the last N items if ID diffing finds nothing
return newItems.length > 0
? newItems
: (updatedForecast.forecastItems ?? []).slice(-items_to_add.length);
},
/**
* Update Forecast Item
*
* PATCHes a single forecast item on the parent `/forecast` endpoint.
* CW supports JSON Patch with paths like `/forecastItems/{index}/field`.
* This preserves item IDs (unlike PUT which always regenerates them)
* and does NOT recalculate revenue/cost from linked catalog items.
*
* NOTE: Not all fields are patchable — `sequenceNumber` and `quantity`
* are read-only on forecast items. Product ordering is managed locally
* via `OpportunityController.resequenceProducts()` and stored in the
* database `productSequence` field.
*/
updateProduct: async (
opportunityId: number,
forecastItemId: number,
data: Record<string, unknown>,
): Promise<CWForecastItem> => {
const forecast = await opportunityCw.fetchProducts(opportunityId);
const items = forecast.forecastItems ?? [];
const idx = items.findIndex((fi) => fi.id === forecastItemId);
if (idx === -1) {
throw new Error(
`Forecast item ${forecastItemId} not found on opportunity ${opportunityId}`,
);
}
const operations = Object.entries(data).map(([key, value]) => ({
op: "replace" as const,
path: `/forecastItems/${idx}/${key}`,
value,
}));
const url = `/sales/opportunities/${opportunityId}/forecast`;
const response = await connectWiseApi.patch(url, operations);
const updated: CWForecast = response.data;
return (updated.forecastItems ?? [])[idx]!;
},
/**
* Bulk-update Forecast Items
*
* PATCHes multiple forecast items in a single request via the parent
* `/forecast` endpoint. All patch operations are sent in one array.
*/
bulkUpdateProducts: async (
opportunityId: number,
updates: Map<number, Record<string, unknown>>,
): Promise<CWForecastItem[]> => {
const forecast = await opportunityCw.fetchProducts(opportunityId);
const items = forecast.forecastItems ?? [];
const operations: { op: "replace"; path: string; value: unknown }[] = [];
const touchedIndices: number[] = [];
for (const [itemId, changes] of updates) {
const idx = items.findIndex((fi) => fi.id === itemId);
if (idx === -1) {
throw new Error(
`Forecast item ${itemId} not found on opportunity ${opportunityId}`,
);
}
touchedIndices.push(idx);
for (const [key, value] of Object.entries(changes)) {
operations.push({
op: "replace",
path: `/forecastItems/${idx}/${key}`,
value,
});
}
}
const url = `/sales/opportunities/${opportunityId}/forecast`;
const response = await connectWiseApi.patch(url, operations);
const updated: CWForecast = response.data;
return touchedIndices.map((i) => (updated.forecastItems ?? [])[i]!);
},
/**
* Delete Forecast Item
*
* Removes a forecast item from an opportunity by PUTting the forecast
* without the target item. CW's forecast endpoint replaces the entire
* forecast items list on PUT.
*/
deleteProduct: async (
opportunityId: number,
forecastItemId: number,
): Promise<void> => {
const forecast = await opportunityCw.fetchProducts(opportunityId);
const items = forecast.forecastItems ?? [];
const filtered = items.filter((fi) => fi.id !== forecastItemId);
if (filtered.length === items.length) {
throw new Error(
`Forecast item ${forecastItemId} not found on opportunity ${opportunityId}`,
);
}
const url = `/sales/opportunities/${opportunityId}/forecast`;
await connectWiseApi.put(url, { ...forecast, forecastItems: filtered });
},
/**
* Fetch Opportunity Notes
*
* Fetches notes associated with a given opportunity.
*/
fetchNotes: async (opportunityId: number): Promise<CWOpportunityNote[]> => {
const response = await connectWiseApi.get(
`/sales/opportunities/${opportunityId}/notes`,
);
return response.data;
},
/**
* Fetch Single Note
*
* Fetches a single note by its ID on the given opportunity.
*/
fetchNote: async (
opportunityId: number,
noteId: number,
): Promise<CWOpportunityNote> => {
const response = await connectWiseApi.get(
`/sales/opportunities/${opportunityId}/notes/${noteId}`,
);
return response.data;
},
/**
* Create Note
*
* Creates a new note on the given opportunity.
*/
createNote: async (
opportunityId: number,
data: CWOpportunityNoteCreate,
): Promise<CWOpportunityNote> => {
const response = await connectWiseApi.post(
`/sales/opportunities/${opportunityId}/notes`,
data,
);
return response.data;
},
/**
* Update Note
*
* Updates an existing note on the given opportunity.
*/
updateNote: async (
opportunityId: number,
noteId: number,
data: CWOpportunityNoteUpdate,
): Promise<CWOpportunityNote> => {
const response = await connectWiseApi.patch(
`/sales/opportunities/${opportunityId}/notes/${noteId}`,
Object.entries(data).map(([key, value]) => ({
op: "replace",
path: key,
value,
})),
);
return response.data;
},
/**
* Delete Note
*
* Deletes a note from the given opportunity.
*/
deleteNote: async (opportunityId: number, noteId: number): Promise<void> => {
await connectWiseApi.delete(
`/sales/opportunities/${opportunityId}/notes/${noteId}`,
);
},
/**
* Fetch Opportunity Contacts
*
* Fetches contacts associated with a given opportunity.
*/
fetchContacts: async (
opportunityId: number,
): Promise<CWOpportunityContact[]> => {
const response = await connectWiseApi.get(
`/sales/opportunities/${opportunityId}/contacts`,
);
return response.data;
},
/**
* Fetch Procurement Products
*
* Fetches procurement product records linked to an opportunity.
* These contain cancellation data (cancelledFlag, cancelledReason, etc.)
* that the forecast endpoint does not provide.
*/
fetchProcurementProducts: async (
opportunityId: number,
): Promise<Record<string, unknown>[]> => {
const response = await connectWiseApi.get(
`/procurement/products?conditions=${encodeURIComponent(`opportunity/id=${opportunityId}`)}&fields=id,forecastDetailId,cancelledFlag,quantityCancelled,cancelledReason,cancelledBy,cancelledDate,customFields`,
);
return response.data;
},
/**
* Create Procurement Products
*
* Creates one or more procurement products linked to an opportunity.
* This endpoint supports procurement customFields (unlike forecast items).
*/
createProcurementProducts: async (
data: CWProcurementProductCreate | CWProcurementProductCreate[],
): Promise<CWProcurementProduct[]> => {
const productsToCreate = Array.isArray(data) ? data : [data];
const created: CWProcurementProduct[] = [];
for (const product of productsToCreate) {
const response = await connectWiseApi.post(
`/procurement/products`,
product,
);
created.push(response.data as CWProcurementProduct);
}
return created;
},
/**
* Fetch Procurement Product by Forecast Detail
*
* Finds the procurement product linked to a given forecast item ID
* on an opportunity.
*/
fetchProcurementProductByForecastDetail: async (
opportunityId: number,
forecastDetailId: number,
): Promise<CWProcurementProduct | null> => {
const conditions = `opportunity/id=${opportunityId} and forecastDetailId=${forecastDetailId}`;
const response = await connectWiseApi.get(
`/procurement/products?conditions=${encodeURIComponent(conditions)}&fields=id,forecastDetailId,description,customerDescription,quantity,price,cost,taxableFlag,specialOrderFlag,customFields`,
);
const items = (response.data ?? []) as CWProcurementProduct[];
return items[0] ?? null;
},
/**
* Update Procurement Product
*
* Applies a JSON Patch update to a procurement product record.
*/
updateProcurementProduct: async (
procurementProductId: number,
data: Record<string, unknown>,
): Promise<CWProcurementProduct> => {
const operations = Object.entries(data).map(([key, value]) => ({
op: "replace" as const,
path: key,
value,
}));
const response = await connectWiseApi.patch(
`/procurement/products/${procurementProductId}`,
operations,
);
return response.data as CWProcurementProduct;
},
/**
* Delete Opportunity
*
* Deletes an opportunity from ConnectWise by its CW opportunity ID.
*/
delete: async (opportunityId: number): Promise<void> => {
await connectWiseApi.delete(`/sales/opportunities/${opportunityId}`);
},
/**
* Fetch All Opportunities from Collector
*
* Fetches opportunities from the dalpuri collector service with full
* opportunity data (relationships, metadata, etc.).
*
* Includes: pipeline, status, type, urgency, interest, owner,
* company, contact, addresses, marketing campaign, and sale dates.
*
* @returns {Promise<unknown[]>} — Raw collector payload of opportunities
*/
fetchAllOpportunitiesFromCollector: async (): Promise<unknown[]> => {
const startedAt = Date.now();
console.info("[opportunities] Collector fetchOpportunities started");
const payload = await runCollector<unknown[]>("fetchOpportunities", {
include: [
"soPipeline",
"soOppStatus",
"soType",
"soUrgency",
"soInterest",
"ownerLevel",
"company",
"contact",
"companyAddress",
"billingTerms",
"taxCode",
"currency",
"billingUnit",
"contractType",
"pmProject",
"marketingCampaign",
"agrType",
"srService",
"approvedByMember",
"rejectedByMember",
"activities",
"opportunityNotes",
"forecastItems",
"contacts",
],
});
console.info(
`[opportunities] Collector fetchOpportunities received payload in ${Date.now() - startedAt}ms`,
);
if (!Array.isArray(payload)) {
throw new Error("Collector fetchOpportunities payload was not an array");
}
console.info(`[opportunities] Collector payload rows: ${payload.length}`);
return payload;
},
};
@@ -0,0 +1,310 @@
interface CWReference {
id: number;
name: string;
_info?: Record<string, string>;
}
interface CWMemberReference {
id: number;
identifier: string;
name: string;
_info?: Record<string, string>;
}
interface CWCompanyReference {
id: number;
identifier: string;
name: string;
_info?: Record<string, string>;
}
interface CWContactReference {
id: number;
name: string;
_info?: Record<string, string>;
}
interface CWSiteReference {
id: number;
name: string;
_info?: Record<string, string>;
}
export interface CWCustomField {
id: number;
caption: string;
type: string;
entryMethod: string;
numberOfDecimals: number;
value: unknown;
connectWiseId: string;
rowNum: number;
userDefinedFieldRecId: number;
podId: string;
}
export interface CWOpportunity {
id: number;
name: string;
expectedCloseDate: string;
type: CWReference;
stage: CWReference;
status: CWReference;
priority: CWReference;
notes: string;
source: string;
rating: CWReference;
campaign: CWReference;
primarySalesRep: CWMemberReference;
secondarySalesRep: CWMemberReference;
locationId: number;
businessUnitId: number;
company: CWCompanyReference;
contact: CWContactReference;
site: CWSiteReference;
customerPO: string;
pipelineChangeDate: string;
dateBecameLead: string;
closedDate: string;
closedBy: CWMemberReference;
totalSalesTax: number;
probability: CWReference;
shipToCompany: CWCompanyReference;
shipToContact: CWContactReference;
shipToSite: CWSiteReference;
billToCompany: CWCompanyReference;
billToContact: CWContactReference;
billToSite: CWSiteReference;
billingTerms: CWReference;
taxCode: CWReference;
currency: CWReference;
companyLocationId: number;
location: CWReference;
department: CWReference;
closedFlag: boolean;
mobileGuid: string;
customFields: CWCustomField[];
_info: CWOpportunityInfo;
}
export interface CWOpportunityInfo {
lastUpdated: string;
updatedBy: string;
dateEntered: string;
enteredBy: string;
forecasts_href: string;
notes_href: string;
products_href: string;
contacts_href: string;
configurations_href: string;
team_href: string;
documents_href: string;
activities_href: string;
}
export interface CWForecastItem {
id: number;
forecastDescription: string;
opportunity: CWReference;
quantity: number;
status: CWReference;
catalogItem?: {
id: number;
identifier: string;
_info?: Record<string, string>;
};
productDescription: string;
customerDescription?: string;
productClass: string;
revenue: number;
cost: number;
margin: number;
percentage: number;
includeFlag: boolean;
quoteWerksQuantity: number;
forecastType: string;
linkFlag: boolean;
recurringRevenue: number;
recurringCost: number;
cycles: number;
recurringFlag: boolean;
sequenceNumber: number;
subNumber: number;
taxableFlag: boolean;
customFields?: CWCustomField[];
_info?: Record<string, string>;
}
export interface CWForecastRevenueSummary {
id: number;
revenue: number;
cost: number;
margin: number;
percentage: number;
_info?: Record<string, string>;
}
export interface CWForecast {
id: number;
forecastItems: CWForecastItem[];
productRevenue: CWForecastRevenueSummary;
serviceRevenue: CWForecastRevenueSummary;
agreementRevenue: CWForecastRevenueSummary;
timeRevenue: CWForecastRevenueSummary;
expenseRevenue: CWForecastRevenueSummary;
forecastRevenueTotals: CWForecastRevenueSummary;
inclusiveRevenueTotals: CWForecastRevenueSummary;
recurringTotal: number;
wonRevenue: CWForecastRevenueSummary;
lostRevenue: CWForecastRevenueSummary;
openRevenue: CWForecastRevenueSummary;
otherRevenue1: CWForecastRevenueSummary;
otherRevenue2: CWForecastRevenueSummary;
salesTaxRevenue: number;
forecastTotalWithTaxes: number;
expectedProbability: number;
taxCode: CWReference;
billingTerms: CWReference;
currency: {
id: number;
symbol: string;
currencyCode: string;
name: string;
_info?: Record<string, string>;
};
_info?: Record<string, string>;
}
export interface CWOpportunityNote {
id: number;
opportunity: CWReference;
text: string;
type: CWReference;
flagged: boolean;
enteredBy: string;
mobileGuid: string;
_info?: Record<string, string>;
}
export interface CWOpportunityNoteCreate {
text: string;
type?: { id: number };
flagged?: boolean;
}
export interface CWOpportunityNoteUpdate {
text?: string;
type?: { id: number };
flagged?: boolean;
}
export interface CWOpportunityContact {
id: number;
opportunity: CWReference;
contact: CWContactReference;
company: CWCompanyReference;
role: CWReference;
notes: string;
referralFlag: boolean;
_info?: Record<string, string>;
}
export interface CWForecastItemCreate {
catalogItem?: { id: number };
forecastDescription?: string;
productDescription?: string;
quantity?: number;
status?: { id: number };
productClass?: string;
forecastType?: string;
revenue?: number;
cost?: number;
includeFlag?: boolean;
linkFlag?: boolean;
recurringFlag?: boolean;
taxableFlag?: boolean;
recurringRevenue?: number;
recurringCost?: number;
cycles?: number;
sequenceNumber?: number;
customFields?: Array<
Partial<Omit<CWCustomField, "connectWiseId" | "rowNum" | "podId">>
>;
}
export interface CWProcurementProductCreate {
opportunity?: { id: number };
catalogItem: { id: number };
description: string;
customerDescription?: string;
quantity?: number;
price?: number;
cost?: number;
taxableFlag?: boolean;
dropshipFlag?: boolean;
billableOption?: string;
customFields?: Array<
Partial<Omit<CWCustomField, "connectWiseId" | "rowNum" | "podId">>
>;
}
export interface CWProcurementProduct {
id: number;
forecastDetailId?: number;
description?: string;
customerDescription?: string;
quantity?: number;
price?: number;
cost?: number;
taxableFlag?: boolean;
specialOrderFlag?: boolean;
customFields?: CWCustomField[];
_info?: Record<string, string>;
}
export interface CWOpportunityUpdate {
name?: string;
notes?: string;
rating?: { id: number };
type?: { id: number };
stage?: { id: number };
status?: { id: number };
priority?: { id: number };
campaign?: { id: number };
primarySalesRep?: { id: number };
secondarySalesRep?: { id: number } | null;
company?: { id: number };
contact?: { id: number } | null;
site?: { id: number } | null;
expectedCloseDate?: string;
customerPO?: string | null;
source?: string | null;
locationId?: number;
businessUnitId?: number;
}
export interface CWOpportunityCreate {
name: string;
expectedCloseDate: string;
primarySalesRep: { id: number };
company: { id: number };
contact: { id: number };
type?: { id: number };
stage?: { id: number };
status?: { id: number };
priority?: { id: number };
campaign?: { id: number };
secondarySalesRep?: { id: number } | null;
site?: { id: number } | null;
notes?: string;
rating?: { id: number };
source?: string | null;
customerPO?: string | null;
locationId?: number;
businessUnitId?: number;
}
export interface CWOpportunitySummary {
id: number;
_info?: Record<string, string>;
}
@@ -0,0 +1,333 @@
/**
* Opportunity Collector Translation
*
* Maps opportunities from the collector (dalpuri) fetchOpportunities schema
* to the internal database/CW API schema for normalization and storage.
*/
/**
* Raw collector opportunity shape from fetchOpportunities.
* This matches the MSSQL source structure with snake_case fields.
*/
export interface CollectorOpportunity {
// Core IDs and identifiers
soRecId?: number | string;
recId?: number | string;
id?: number | string;
opportunityRecId?: number | string;
soNumber?: string;
// Main fields
soDescription?: string; // opportunity name
notes?: string;
customerPO?: string;
source?: string | null;
// Status and type
soPipeline?: { recId?: number; description?: string } | null;
soOppStatus?: { recId?: number; description?: string } | null;
soType?: { recId?: number; description?: string } | null;
soUrgency?: { recId?: number; description?: string } | null;
soInterest?: { recId?: number; description?: string } | null;
// Relationships
company?: {
recId?: number;
companyIdentifier?: string;
companyName?: string;
} | null;
contact?: { recId?: number; contactName?: string } | null;
companyAddress?: { addressRecId?: number; addressName?: string } | null;
// Sales refs
ownerLevel?: {
recId?: number;
memberName?: string;
memberIdentifier?: string;
} | null;
// Financial / admin
billingTerms?: { recId?: number; description?: string } | null;
taxCode?: { recId?: number; description?: string } | null;
currency?: { recId?: number; currencyCode?: string } | null;
// Dates
expectedCloseDate?: string | null;
dateBecameLead?: string | null;
closedDate?: string | null;
pipelineChangeDate?: string | null;
// Probability / closed
closedFlag?: boolean;
approvedByMember?: { recId?: number; memberName?: string } | null;
rejectedByMember?: { recId?: number; memberName?: string } | null;
// Other includes (less commonly mapped to Opportunity table)
billingUnit?: { recId?: number; description?: string } | null;
contractType?: { recId?: number; description?: string } | null;
pmProject?: { recId?: number; projectName?: string } | null;
marketingCampaign?: { recId?: number; description?: string } | null;
agrType?: { recId?: number; description?: string } | null;
srService?: { recId?: number; description?: string } | null;
shipToCompany?: unknown | null;
shipToContact?: unknown | null;
shipToCompanyAddress?: unknown | null;
billToCompany?: unknown | null;
billToContact?: unknown | null;
billToCompanyAddress?: unknown | null;
techContact?: unknown | null;
activities?: unknown[];
opportunityNotes?: unknown[];
forecastItems?: unknown[];
opportunityContacts?: unknown[];
contacts?: unknown[];
// Metadata
_info?: { lastUpdated?: string; dateEntered?: string };
}
type NumberLike = number | string | null | undefined;
/**
* Normalized opportunity shape that maps to the DB Opportunity table.
* Output from normalizeCollectorOpportunity().
*/
export interface NormalizedCollectorOpportunity {
// Core identifiers (required for upsert)
cwOpportunityId: number;
// Basic info
name: string;
notes: string | null;
customerPO: string | null;
source: string | null;
// Status / type references
typeName: string | null;
typeCwId: number | null;
stageName: string | null;
stageCwId: number | null;
statusName: string | null;
statusCwId: number | null;
priorityName: string | null;
priorityCwId: number | null;
ratingName: string | null;
ratingCwId: number | null;
// Sales rep references
primarySalesRepName: string | null;
primarySalesRepIdentifier: string | null;
primarySalesRepCwId: number | null;
secondarySalesRepName: string | null;
secondarySalesRepIdentifier: string | null;
secondarySalesRepCwId: number | null;
// Company / contact / site references
companyCwId: number | null;
companyName: string | null;
contactCwId: number | null;
contactName: string | null;
siteCwId: number | null;
siteName: string | null;
// Campaign / location references
campaignName: string | null;
campaignCwId: number | null;
locationName: string | null;
locationCwId: number | null;
departmentName: string | null;
departmentCwId: number | null;
// Dates
expectedCloseDate: Date | null;
pipelineChangeDate: Date | null;
dateBecameLead: Date | null;
closedDate: Date | null;
closedFlag: boolean;
closedByName: string | null;
closedByCwId: number | null;
// Financial
totalSalesTax: number;
probability: number | null;
// Metadata
cwLastUpdated: Date | null;
cwDateEntered: Date | null;
}
/**
* Helper: parse a date string to Date or return null.
*/
const parseDate = (dateString: string | null | undefined): Date | null => {
if (!dateString) return null;
try {
const d = new Date(dateString);
return isNaN(d.getTime()) ? null : d;
} catch {
return null;
}
};
const parseNumber = (value: NumberLike): number | null => {
if (typeof value === "number" && Number.isFinite(value)) return value;
if (typeof value === "string" && value.trim().length > 0) {
const parsed = Number(value);
return Number.isFinite(parsed) ? parsed : null;
}
return null;
};
/**
* Helper: extract ID from nested object.
*/
const getId = (obj: unknown): number | null => {
if (!obj || typeof obj !== "object") return null;
const id = (obj as Record<string, unknown>).recId;
const parsed = parseNumber(id as NumberLike);
if (!parsed) return null;
return parsed > 0 ? parsed : null;
};
const getOpportunityId = (item: CollectorOpportunity): number => {
const candidate =
parseNumber(item.soRecId) ??
parseNumber(item.recId) ??
parseNumber(item.id) ??
parseNumber(item.opportunityRecId) ??
0;
return candidate > 0 ? candidate : 0;
};
/**
* Helper: extract description/name from nested object.
*/
const getName = (obj: unknown, fallback = ""): string => {
if (!obj || typeof obj !== "object") return fallback;
const desc = (obj as Record<string, unknown>).description;
const name = (obj as Record<string, unknown>).memberName;
const result =
typeof desc === "string"
? desc
: typeof name === "string"
? name
: fallback;
return result || fallback;
};
/**
* Normalize a collector opportunity into the internal DB schema.
*
* Handles field mapping, type conversions, and null coercion.
*/
export const normalizeCollectorOpportunity = (
item: CollectorOpportunity,
): NormalizedCollectorOpportunity => {
const cwOpportunityId = getOpportunityId(item);
if (cwOpportunityId <= 0) {
throw new Error("Collector opportunity missing ID field (expected soRecId, recId, id, or opportunityRecId)");
}
// Build normalized object mapping collector fields to DB schema
return {
cwOpportunityId,
// Basic info
name: item.soDescription ?? "",
notes: item.notes ?? null,
customerPO: item.customerPO ?? null,
source: item.source ?? null,
// Status / type (collector uses soPipeline/soOppStatus as CW stage/status)
typeName: getName(item.soType),
typeCwId: getId(item.soType),
// Map soPipeline to stageName/stageCwId
stageName: getName(item.soPipeline),
stageCwId: getId(item.soPipeline),
// Map soOppStatus to statusName/statusCwId
statusName: getName(item.soOppStatus),
statusCwId: getId(item.soOppStatus),
// Priority / rating: soUrgency and soInterest can map here if needed
priorityName: getName(item.soUrgency),
priorityCwId: getId(item.soUrgency),
ratingName: getName(item.soInterest),
ratingCwId: getId(item.soInterest),
// Sales rep references (ownerLevel is primary in collector)
primarySalesRepName: item.ownerLevel
? ((item.ownerLevel as any).memberName ?? null)
: null,
primarySalesRepIdentifier: item.ownerLevel
? ((item.ownerLevel as any).memberIdentifier ?? null)
: null,
primarySalesRepCwId: getId(item.ownerLevel),
secondarySalesRepName: null,
secondarySalesRepIdentifier: null,
secondarySalesRepCwId: null,
// Company / contact / site references
companyCwId: getId(item.company),
companyName: item.company
? ((item.company as any).companyName ?? null)
: null,
contactCwId: getId(item.contact),
contactName: item.contact
? ((item.contact as any).contactName ?? null)
: null,
siteCwId: getId(item.companyAddress),
siteName: item.companyAddress
? ((item.companyAddress as any).addressName ?? null)
: null,
// Campaign / location (from includeRelations)
campaignName: getName(item.marketingCampaign),
campaignCwId: getId(item.marketingCampaign),
locationName: null,
locationCwId: null,
departmentName: null,
departmentCwId: null,
// Dates
expectedCloseDate: parseDate(item.expectedCloseDate),
pipelineChangeDate: parseDate(item.pipelineChangeDate),
dateBecameLead: parseDate(item.dateBecameLead),
closedDate: parseDate(item.closedDate),
closedFlag: item.closedFlag ?? false,
closedByName: item.approvedByMember
? ((item.approvedByMember as any).memberName ?? null)
: item.rejectedByMember
? ((item.rejectedByMember as any).memberName ?? null)
: null,
closedByCwId: getId(item.approvedByMember) ?? getId(item.rejectedByMember),
// Financial
totalSalesTax: 0, // Not provided by collector; will be 0
probability: null, // Not directly provided; would need to compute from status/stage
// Metadata
cwLastUpdated: parseDate(item._info?.lastUpdated),
cwDateEntered: parseDate(item._info?.dateEntered),
};
};
/**
* Normalize a collection of opportunities from the collector.
*/
export const normalizeCollectorOpportunities = (
items: CollectorOpportunity[],
): Map<number, NormalizedCollectorOpportunity> => {
const normalized = new Map<number, NormalizedCollectorOpportunity>();
for (const item of items) {
try {
const norm = normalizeCollectorOpportunity(item);
normalized.set(norm.cwOpportunityId, norm);
} catch (err) {
console.warn(
`[opportunityCollectorTranslation] Failed to normalize item: ${err instanceof Error ? err.message : String(err)}`,
);
}
}
return normalized;
};
@@ -0,0 +1,90 @@
import { CWOpportunity } from "./opportunity.types";
import { normalizeProbabilityPercent } from "../../sales-utils/normalizeProbability";
export type ProcessedOpportunity = ReturnType<
typeof processOpportunityResponse
>;
/**
* Processes raw CW opportunity data into a cleaner, normalized shape
* suitable for API responses and internal consumption.
*/
export const processOpportunityResponse = (opportunity: CWOpportunity) => ({
id: opportunity.id,
name: opportunity.name,
expectedCloseDate: opportunity.expectedCloseDate,
closedDate: opportunity.closedDate,
closedFlag: opportunity.closedFlag,
probability: normalizeProbabilityPercent(opportunity.probability?.name),
type: opportunity.type
? { id: opportunity.type.id, name: opportunity.type.name }
: null,
stage: opportunity.stage
? { id: opportunity.stage.id, name: opportunity.stage.name }
: null,
status: opportunity.status
? { id: opportunity.status.id, name: opportunity.status.name }
: null,
priority: opportunity.priority
? { id: opportunity.priority.id, name: opportunity.priority.name }
: null,
rating: opportunity.rating
? { id: opportunity.rating.id, name: opportunity.rating.name }
: null,
source: opportunity.source,
notes: opportunity.notes,
customerPO: opportunity.customerPO,
company: opportunity.company
? {
id: opportunity.company.id,
identifier: opportunity.company.identifier,
name: opportunity.company.name,
}
: null,
contact: opportunity.contact
? { id: opportunity.contact.id, name: opportunity.contact.name }
: null,
site: opportunity.site
? { id: opportunity.site.id, name: opportunity.site.name }
: null,
primarySalesRep: opportunity.primarySalesRep
? {
id: opportunity.primarySalesRep.id,
identifier: opportunity.primarySalesRep.identifier,
name: opportunity.primarySalesRep.name,
}
: null,
secondarySalesRep: opportunity.secondarySalesRep
? {
id: opportunity.secondarySalesRep.id,
identifier: opportunity.secondarySalesRep.identifier,
name: opportunity.secondarySalesRep.name,
}
: null,
closedBy: opportunity.closedBy
? {
id: opportunity.closedBy.id,
identifier: opportunity.closedBy.identifier,
name: opportunity.closedBy.name,
}
: null,
campaign: opportunity.campaign
? { id: opportunity.campaign.id, name: opportunity.campaign.name }
: null,
totalSalesTax: opportunity.totalSalesTax,
location: opportunity.location
? { id: opportunity.location.id, name: opportunity.location.name }
: null,
department: opportunity.department
? { id: opportunity.department.id, name: opportunity.department.name }
: null,
pipelineChangeDate: opportunity.pipelineChangeDate,
dateBecameLead: opportunity.dateBecameLead,
info: opportunity._info,
});
/**
* Processes an array of raw CW opportunities.
*/
export const processOpportunitiesResponse = (opportunities: CWOpportunity[]) =>
opportunities.map(processOpportunityResponse);
@@ -0,0 +1,232 @@
import { prisma } from "../../../constants";
import { events } from "../../globalEvents";
import { opportunities } from "../../../managers/opportunities";
import { opportunityCw } from "./opportunities";
import { OpportunityController } from "../../../controllers/OpportunityController";
import { invalidateAllOpportunityCaches } from "../../cache/opportunityCache";
/**
* Refresh Opportunities
*
* **Data-source strategy:**
* 1. Try to fetch from the collector (dalpuri) first
* 2. Fall back to ConnectWise API if collector fails or is unavailable
* 3. Normalize the result and upsert into the database
* 4. Reconcile orphaned items (records in DB but not in CW)
*
* Uses the same stale-check pattern as refreshCatalog:
* 1. Fetch lightweight summaries (id + _info.lastUpdated)
* 2. Compare against local cwLastUpdated timestamps
* 3. Full-fetch only stale/new records
* 4. Upsert stale items, optionally linking to internal Company
*/
export const refreshOpportunities = async (opts?: {
collectorFetch?: () => Promise<unknown[]>;
}) => {
events.emit("cw:opportunities:refresh:check");
// ── Step 1: Try collector first, then fall back to CW ──────────────
let cwSummaries: Map<number, any> = new Map();
let allCwItems: Map<number, any> = new Map();
let useCollector = false;
if (opts?.collectorFetch) {
try {
console.log("[refreshOpportunities] Attempting collector fetch");
const result = await opportunities.refreshOpportunitiesFromCollector({
collectorFetch: opts.collectorFetch,
});
if (result.fromCollector && result.upserted > 0) {
useCollector = true;
console.log(
`[refreshOpportunities] Collector provided ${result.upserted} opportunities`,
);
events.emit("cw:opportunities:refresh:completed", {
totalCw: result.upserted,
totalDb: result.upserted,
staleCount: result.upserted,
itemsUpdated: result.upserted,
orphanedCount: 0,
});
} else if (result.errors?.length) {
console.warn(
`[refreshOpportunities] Collector errors: ${result.errors.join("; ")}`,
);
console.log("[refreshOpportunities] Falling back to ConnectWise API");
}
} catch (err) {
console.warn(
`[refreshOpportunities] Collector fetch exception, falling back to CW: ${err instanceof Error ? err.message : String(err)}`,
);
}
}
// If collector didn't work, use traditional CW fetch
if (!useCollector) {
console.log(
"[refreshOpportunities] Fetching opportunities from ConnectWise",
);
// 1. Fetch lightweight summaries from CW
cwSummaries = await opportunityCw.fetchAllSummaries();
// 4. Full-fetch all opportunities for upserting
allCwItems = await opportunityCw.fetchAll();
}
// ── Step 2: Reconcile orphaned items ─────────────────────────────────
// 2. Fetch all DB items with their cwOpportunityId and cwLastUpdated
const dbItems = await prisma.opportunity.findMany({
select: {
id: true,
cwOpportunityId: true,
cwLastUpdated: true,
cwDateEntered: true,
},
});
const dbMap = new Map(dbItems.map((item) => [item.cwOpportunityId, item]));
if (!useCollector) {
// 3. Determine stale / new IDs (only if we fetched from CW)
const staleIds: number[] = [];
for (const [cwId, summary] of cwSummaries) {
const cwLastUpdated = summary._info?.lastUpdated
? new Date(summary._info.lastUpdated)
: null;
const dbItem = dbMap.get(cwId) ?? null;
const dbLastUpdated = dbItem?.cwLastUpdated ?? null;
// Treat as stale if never synced, CW has newer data, or cwDateEntered is missing (backfill)
if (
!dbLastUpdated ||
(cwLastUpdated && cwLastUpdated > dbLastUpdated) ||
!dbItem?.cwDateEntered
) {
staleIds.push(cwId);
}
}
// 3b. Reconcile — find local records that no longer exist in CW
const orphanedItems = dbItems.filter(
(item) => !cwSummaries.has(item.cwOpportunityId),
);
if (orphanedItems.length > 0) {
console.log(
`[refreshOpportunities] Reconciling ${orphanedItems.length} orphaned local record(s) not found in CW`,
);
await Promise.all(
orphanedItems.map(async (item) => {
await prisma.opportunity.delete({ where: { id: item.id } });
await invalidateAllOpportunityCaches(item.cwOpportunityId);
}),
);
events.emit("cw:opportunities:refresh:reconciled", {
orphanedCount: orphanedItems.length,
removedCwIds: orphanedItems.map((i) => i.cwOpportunityId),
});
}
if (staleIds.length === 0) {
events.emit("cw:opportunities:refresh:skipped", {
totalCw: cwSummaries.size,
totalDb: dbItems.length,
staleCount: 0,
orphanedCount: orphanedItems.length,
});
return;
}
events.emit("cw:opportunities:refresh:started", {
totalCw: cwSummaries.size,
totalDb: dbItems.length,
staleCount: staleIds.length,
});
// 5. Build a company CW ID → internal ID lookup for linking
const companies = await prisma.company.findMany({
select: { id: true, cw_CompanyId: true },
});
const companyMap = new Map(companies.map((c) => [c.cw_CompanyId, c.id]));
// 6. Upsert stale/new items (only if we fetched from CW)
const updatedCount = (
await Promise.all(
staleIds.map(async (cwId) => {
const item = allCwItems.get(cwId);
if (!item) return null;
const mapped = OpportunityController.mapCwToDb(item);
const companyId = item.company?.id
? (companyMap.get(item.company.id) ?? null)
: null;
return prisma.opportunity.upsert({
where: { cwOpportunityId: cwId },
create: {
cwOpportunityId: cwId,
...mapped,
companyId,
},
update: {
...mapped,
companyId,
},
});
}),
)
).filter(Boolean).length;
events.emit("cw:opportunities:refresh:completed", {
totalCw: cwSummaries.size,
totalDb: dbItems.length,
staleCount: staleIds.length,
itemsUpdated: updatedCount,
orphanedCount: orphanedItems.length,
});
} else {
// Collector-based refresh: still reconcile orphaned items but skip stale-check
console.log(
"[refreshOpportunities] Collector-based refresh: skipping stale-check, performing orphan reconciliation",
);
// Fetch list of CW opp IDs from cache or a quick count
// For now, reconcile only items older than a threshold
const twentyFourHoursAgo = new Date(Date.now() - 24 * 60 * 60 * 1000);
const orphanedItems = dbItems.filter((item) => {
const lastUpdated = item.cwLastUpdated ?? item.cwDateEntered;
// Only reconcile items that were last updated more than a day ago
return lastUpdated && lastUpdated < twentyFourHoursAgo;
});
if (orphanedItems.length > 0) {
console.log(
`[refreshOpportunities] Collector reconciling ${orphanedItems.length} stale orphan record(s)`,
);
await Promise.all(
orphanedItems.map(async (item) => {
await prisma.opportunity.delete({ where: { id: item.id } });
await invalidateAllOpportunityCaches(item.cwOpportunityId);
}),
);
events.emit("cw:opportunities:refresh:reconciled", {
orphanedCount: orphanedItems.length,
removedCwIds: orphanedItems.map((i) => i.cwOpportunityId),
});
}
events.emit("cw:opportunities:refresh:completed", {
totalCw: dbItems.length,
totalDb: dbItems.length,
staleCount: 0,
itemsUpdated: 0,
orphanedCount: orphanedItems.length,
});
}
};
@@ -0,0 +1,132 @@
import { Collection } from "@discordjs/collection";
import { connectWiseApi } from "../../../constants";
import { runCollector } from "../../collector-client/runCollector";
import { CatalogItem } from "./catalog.types.ts";
import {
normalizeCollectorProducts,
NormalizedCatalogCollectorItem,
} from "./catalogCollectorTranslation";
export interface CatalogSummary {
id: number;
_info?: Record<string, string>;
}
export interface InventoryEntry {
id: number;
onHand: number;
}
export const catalogCw = {
fetchAllProductsFromCollector: async (): Promise<
Collection<number, NormalizedCatalogCollectorItem>
> => {
const startedAt = Date.now();
console.info("[catalog-refresh] Collector fetchProducts started");
const payload = await runCollector<unknown[]>("fetchProducts", {
include: ["subcategory", "manufacturer", "inventory", "itemVendors"],
});
console.info(
`[catalog-refresh] Collector fetchProducts received payload in ${Date.now() - startedAt}ms`,
);
if (!Array.isArray(payload)) {
throw new Error("Collector payload was not an array");
}
console.info(`[catalog-refresh] Collector payload rows: ${payload.length}`);
const normalizeStartedAt = Date.now();
const normalized = normalizeCollectorProducts(payload);
console.info(
`[catalog-refresh] Collector normalization completed in ${Date.now() - normalizeStartedAt}ms (${normalized.size} valid rows)`,
);
if (normalized.size === 0) {
throw new Error(
"Collector payload did not contain valid product records",
);
}
return new Collection<number, NormalizedCatalogCollectorItem>(normalized);
},
countItems: async (): Promise<number> => {
const response = await connectWiseApi.get("/procurement/catalog/count");
return response.data.count;
},
fetchAllCatalogSummary: async (): Promise<
Collection<number, CatalogSummary>
> => {
const allItems = new Collection<number, CatalogSummary>();
const pageSize = 1000;
const count = await catalogCw.countItems();
const totalPages = Math.ceil(count / pageSize);
for (let page = 0; page < totalPages; page++) {
const response = await connectWiseApi.get(
`/procurement/catalog?page=${page + 1}&pageSize=${pageSize}&fields=id,_info`,
);
const items: CatalogSummary[] = response.data;
for (const item of items) {
allItems.set(item.id, item);
}
}
return allItems;
},
fetchInventoryOnHand: async (cwCatalogId: number): Promise<number> => {
const response = await connectWiseApi.get(
`/procurement/catalog/${cwCatalogId}/inventory?fields=onHand`,
);
const entries: InventoryEntry[] = response.data;
return entries.reduce((sum, e) => sum + (e.onHand || 0), 0);
},
fetchAllItemsFromCw: async (): Promise<Collection<number, CatalogItem>> => {
const allItems = new Collection<number, CatalogItem>();
const pageSize = 1000;
const count = await catalogCw.countItems();
const totalPages = Math.ceil(count / pageSize);
for (let page = 0; page < totalPages; page++) {
const response = await connectWiseApi.get(
`/procurement/catalog?page=${page + 1}&pageSize=${pageSize}`,
);
const items: CatalogItem[] = response.data;
for (const item of items) {
allItems.set(item.id, item);
}
}
return allItems;
},
fetchByCatalogId: async (cwCatalogId: number): Promise<CatalogItem> => {
try {
const response = await connectWiseApi.get(
`/procurement/catalog/${cwCatalogId}`,
);
return response.data;
} catch {
const fallback = await connectWiseApi.get(
`/procurement/catalog/items/${cwCatalogId}`,
);
return fallback.data;
}
},
fetch: async (id: string): Promise<CatalogItem> => {
const numericId = Number(id);
if (!Number.isFinite(numericId)) {
const response = await connectWiseApi.get(
`/procurement/catalog/items/${id}`,
);
return response.data;
}
return catalogCw.fetchByCatalogId(numericId);
},
};
@@ -0,0 +1,75 @@
interface CWReference {
id: number;
name: string;
_info?: Record<string, string>;
}
interface CWVendorReference {
id: number;
identifier: string;
name: string;
_info?: Record<string, string>;
}
interface CWCustomField {
id: number;
caption: string;
type: string;
entryMethod: string;
numberOfDecimals: number;
value: unknown;
connectWiseId: string;
rowNum: number;
userDefinedFieldRecId: number;
podId: string;
}
export interface CatalogItem {
id: number;
identifier: string;
description: string;
inactiveFlag: boolean;
subcategory: CWReference;
type: CWReference;
productClass: string;
serializedFlag: boolean;
serializedCostFlag: boolean;
phaseProductFlag: boolean;
unitOfMeasure: CWReference;
minStockLevel: number;
price: number;
cost: number;
priceAttribute: string;
taxableFlag: boolean;
dropShipFlag: boolean;
specialOrderFlag: boolean;
customerDescription: string;
manufacturer: CWReference;
manufacturerPartNumber: string;
vendor: CWVendorReference;
vendorSku: string;
notes: string;
integrationXRef: string;
sla: CWReference;
entityType: CWReference;
recurringFlag: boolean;
recurringRevenue: number;
recurringCost: number;
recurringOneTimeFlag: boolean;
recurringBillCycle: CWReference;
recurringCycleType: string;
calculatedPriceFlag: boolean;
calculatedCostFlag: boolean;
category: CWReference;
calculatedPrice: number;
calculatedCost: number;
billableOption: string;
connectWiseID: string;
agreementType: CWReference;
markupPercentage: number;
markupFlag: boolean;
autoUpdateUnitCostFlag: boolean;
autoUpdateUnitPriceFlag: boolean;
_info?: Record<string, string>;
customFields?: CWCustomField[];
}
@@ -0,0 +1,281 @@
/**
* Catalog Collector Translation
*
* Maps products from the collector (dalpuri) fetchProducts schema
* to the internal database schema for normalization and storage.
*/
type NumberLike = number | string | null | undefined;
interface CollectorReference {
recId?: number;
description?: string;
name?: string;
}
interface CollectorVendorReference {
recId?: number;
identifier?: string;
description?: string;
name?: string;
}
interface CollectorInventory {
onHand?: NumberLike;
}
interface CollectorItemVendor {
vendor?: CollectorVendorReference | null;
}
/**
* Raw collector product shape from fetchProducts.
* This matches the MSSQL source structure with appropriate field names.
*/
export interface CollectorProduct {
// Current collector fields
catalogRecId?: number;
itemId?: string;
description?: string;
longDescription?: string | null;
notes?: string | null;
categoryRecId?: NumberLike;
category?: CollectorReference | null;
subcategoryRecId?: NumberLike;
subcategory?: CollectorReference | null;
manufacturerRecId?: NumberLike;
manufacturerPartNum?: string | null;
manufacturer?: CollectorReference | null;
vendorRecId?: NumberLike;
vendorSku?: string | null;
itemVendors?: CollectorItemVendor[] | null;
listPrice?: NumberLike;
currentCost?: NumberLike;
inventory?: CollectorInventory[] | null;
inactiveFlag?: boolean;
taxableFlag?: boolean;
lastUpdatedUtc?: string | null;
lastUpdate?: string | null;
dateEnteredUtc?: string | null;
// Legacy collector fields retained for backward compatibility
pId?: number;
pIdentifier?: string;
pNumber?: string;
pName?: string;
pDescription?: string;
pCustomerDescription?: string;
pInternalNotes?: string;
pCategory?: CollectorReference | null;
pSubcategory?: CollectorReference | null;
pManufacturer?: CollectorReference | null;
pManufacturerPartNumber?: string;
pVendor?: CollectorVendorReference | null;
pVendorSku?: string;
pPrice?: NumberLike;
pCost?: NumberLike;
pOnHand?: NumberLike;
pInactive?: boolean;
pSalesTaxable?: boolean;
_info?: { lastUpdated?: string; dateEntered?: string };
}
/**
* Normalized product shape that maps to the DB CatalogItem table.
* Output from normalizeCollectorProduct().
*/
export interface NormalizedCatalogCollectorItem {
// Core identifiers (required for upsert)
cwCatalogId: number;
// Basic info
identifier: string;
name: string;
description: string | null;
customerDescription: string | null;
internalNotes: string | null;
// Categorization
category: string | null;
categoryCwId: number | null;
subcategory: string | null;
subcategoryCwId: number | null;
// Vendor / Manufacturer
manufacturer: string | null;
manufactureCwId: number | null;
partNumber: string | null;
vendorName: string | null;
vendorSku: string | null;
vendorCwId: number | null;
// Pricing & Inventory
price: number;
cost: number;
onHand: number;
inactive: boolean;
salesTaxable: boolean;
// Metadata
cwLastUpdated: Date | null;
cwDateEntered: Date | null;
}
/**
* Helper: parse a date string to Date or return null.
*/
const parseDate = (dateString: string | null | undefined): Date | null => {
if (!dateString) return null;
try {
const d = new Date(dateString);
return isNaN(d.getTime()) ? null : d;
} catch {
return null;
}
};
const parseNumber = (value: NumberLike): number | null => {
if (typeof value === "number" && Number.isFinite(value)) return value;
if (typeof value === "string" && value.trim().length > 0) {
const parsed = Number(value);
if (Number.isFinite(parsed)) return parsed;
}
return null;
};
/**
* Helper: extract ID from nested object.
*/
const getId = (obj: unknown): number | null => {
if (!obj || typeof obj !== "object") return null;
const id = (obj as Record<string, unknown>).recId;
const parsed = parseNumber(id as NumberLike);
if (!parsed) return null;
return parsed > 0 ? parsed : null;
};
/**
* Helper: extract description/name from nested object.
*/
const getName = (obj: unknown, fallback = ""): string => {
if (!obj || typeof obj !== "object") return fallback;
const source = obj as Record<string, unknown>;
const desc = source.description;
const name = source.name;
const candidate = typeof desc === "string" ? desc : name;
const result = typeof candidate === "string" ? candidate : fallback;
return result || fallback;
};
const getVendorName = (item: CollectorProduct): string | null => {
const directVendor = getName(item.pVendor ?? null);
if (directVendor) return directVendor;
const firstVendor = item.itemVendors?.[0]?.vendor ?? null;
const nestedVendor = getName(firstVendor);
if (nestedVendor) return nestedVendor;
return null;
};
const getOnHand = (item: CollectorProduct): number => {
const inventory = item.inventory;
if (Array.isArray(inventory) && inventory.length > 0) {
return inventory.reduce((sum, entry) => {
const onHand = parseNumber(entry?.onHand);
return sum + (onHand ?? 0);
}, 0);
}
return parseNumber(item.pOnHand) ?? 0;
};
/**
* Normalize a collector product into the internal DB schema.
*
* Handles field mapping, type conversions, and null coercion.
*/
export const normalizeCollectorProduct = (
item: CollectorProduct,
): NormalizedCatalogCollectorItem => {
const cwCatalogId =
parseNumber(item.catalogRecId) ?? parseNumber(item.pId) ?? 0;
if (cwCatalogId <= 0) {
throw new Error("Collector product missing catalogRecId");
}
// Build normalized object mapping collector fields to DB schema
return {
cwCatalogId,
// Basic info
identifier: item.itemId ?? item.pIdentifier ?? item.pNumber ?? `product_${cwCatalogId}`,
name: item.description ?? item.pName ?? "",
description: item.longDescription ?? item.pDescription ?? null,
customerDescription: item.pCustomerDescription ?? null,
internalNotes: item.notes ?? item.pInternalNotes ?? null,
// Categorization
category: getName(item.category ?? item.pCategory),
categoryCwId:
parseNumber(item.categoryRecId) ?? getId(item.category ?? item.pCategory),
subcategory: getName(item.subcategory ?? item.pSubcategory),
subcategoryCwId:
parseNumber(item.subcategoryRecId) ??
getId(item.subcategory ?? item.pSubcategory),
// Vendor / Manufacturer
manufacturer: getName(item.manufacturer ?? item.pManufacturer),
manufactureCwId:
parseNumber(item.manufacturerRecId) ??
getId(item.manufacturer ?? item.pManufacturer),
partNumber: item.manufacturerPartNum ?? item.pManufacturerPartNumber ?? null,
vendorName: getVendorName(item),
vendorSku: item.vendorSku ?? item.pVendorSku ?? null,
vendorCwId:
parseNumber(item.vendorRecId) ??
getId(item.itemVendors?.[0]?.vendor ?? item.pVendor),
// Pricing & Inventory
price: parseNumber(item.listPrice) ?? parseNumber(item.pPrice) ?? 0,
cost: parseNumber(item.currentCost) ?? parseNumber(item.pCost) ?? 0,
onHand: getOnHand(item),
inactive: item.inactiveFlag ?? item.pInactive ?? false,
salesTaxable: item.taxableFlag ?? item.pSalesTaxable ?? false,
// Metadata
cwLastUpdated: parseDate(
item.lastUpdatedUtc ?? item.lastUpdate ?? item._info?.lastUpdated,
),
cwDateEntered: parseDate(item.dateEnteredUtc ?? item._info?.dateEntered),
};
};
/**
* Normalize a collection of products from the collector.
*/
export const normalizeCollectorProducts = (
items: unknown[],
): Map<number, NormalizedCatalogCollectorItem> => {
const normalized = new Map<number, NormalizedCatalogCollectorItem>();
for (const item of items) {
try {
const norm = normalizeCollectorProduct(item as CollectorProduct);
normalized.set(norm.cwCatalogId, norm);
} catch (err) {
console.warn(
`[catalogCollectorTranslation] Failed to normalize item: ${err instanceof Error ? err.message : String(err)}`,
);
}
}
return normalized;
};
@@ -0,0 +1,469 @@
import { prisma, redis, connectWiseApi } from "../../../constants";
import { withCwRetry } from "../withCwRetry";
import { catalogCw } from "./catalog";
import { CatalogItem } from "./catalog.types";
type JsonObject = Record<string, unknown>;
type TrackedProduct = {
cwCatalogId: number;
product: string;
onHand: string;
inventory: string;
key: string;
};
type AdjustmentSnapshot = {
key: string;
trackedRows: TrackedProduct[];
signature: string;
};
const ADJUSTMENTS_ENDPOINT = "/procurement/adjustments?pageSize=1000";
const CATALOG_ITEM_CACHE_PREFIX = "catalog:item:cw:";
const CATALOG_ITEM_CACHE_TTL_SECONDS = 20 * 60;
const MAX_SYNC_PER_CYCLE = Number(
process.env.CW_ADJUSTMENT_SYNC_MAX_PER_CYCLE ?? "50",
);
const SYNC_COOLDOWN_MS = Number(
process.env.CW_ADJUSTMENT_SYNC_COOLDOWN_MS ?? `${10 * 60 * 1000}`,
);
let previous = new Map<string, AdjustmentSnapshot>();
let previousProductState = new Map<number, string>();
const lastSyncedAt = new Map<number, number>();
let inFlight = false;
const isObject = (value: unknown): value is JsonObject =>
typeof value === "object" && value !== null && !Array.isArray(value);
const toObject = (value: unknown): JsonObject => {
if (!isObject(value)) return {};
return value;
};
const stableStringify = (value: unknown): string => {
if (Array.isArray(value)) {
const entries = value.map((entry) => stableStringify(entry)).sort();
return `[${entries.join(",")}]`;
}
if (isObject(value)) {
const keys = Object.keys(value).sort();
const pairs = keys.map(
(key) => `${JSON.stringify(key)}:${stableStringify(value[key])}`,
);
return `{${pairs.join(",")}}`;
}
return JSON.stringify(value);
};
const readPathValue = (obj: JsonObject, path: string): unknown => {
const parts = path.split(".");
let current: unknown = obj;
for (const part of parts) {
if (!isObject(current)) return null;
current = current[part];
}
return current;
};
const firstValue = (obj: JsonObject, paths: string[]): unknown => {
for (const path of paths) {
const value = readPathValue(obj, path);
if (value === null || value === undefined || value === "") continue;
return value;
}
return null;
};
const asNumber = (value: unknown): number | null => {
if (typeof value === "number" && Number.isFinite(value)) return value;
if (typeof value === "string" && value.length > 0) {
const parsed = Number(value);
if (Number.isFinite(parsed)) return parsed;
}
return null;
};
const asText = (value: unknown): string => {
if (value === null || value === undefined || value === "") return "-";
if (
typeof value === "string" ||
typeof value === "number" ||
typeof value === "boolean"
) {
return String(value);
}
if (Array.isArray(value)) {
return `[${value.map((entry) => asText(entry)).join(",")}]`;
}
if (!isObject(value)) return String(value);
const preferredFields = ["name", "identifier", "id", "code", "value"];
for (const field of preferredFields) {
const fieldValue = readPathValue(value, field);
if (fieldValue === null || fieldValue === undefined || fieldValue === "")
continue;
if (typeof fieldValue === "object") continue;
return String(fieldValue);
}
return stableStringify(value);
};
const adjustmentKey = (adjustment: JsonObject): string => {
const keyPaths = [
"id",
"adjustmentId",
"procurementAdjustmentId",
"recordId",
"recId",
"_info.id",
"_info.href",
];
for (const path of keyPaths) {
const key = firstValue(adjustment, [path]);
const keyText = asText(key);
if (keyText !== "-") return keyText;
}
return `anon:${stableStringify(adjustment)}`;
};
const trackedRow = (detail: JsonObject): TrackedProduct | null => {
const cwCatalogId = asNumber(
firstValue(detail, [
"catalogItem.id",
"catalogItemId",
"catalog.id",
"catalogId",
"item.id",
"itemId",
"product.id",
"productId",
"id",
]),
);
if (!cwCatalogId) return null;
const onHand = asText(
firstValue(detail, [
"onHand",
"onHandQty",
"onHandQuantity",
"qtyOnHand",
"quantityOnHand",
"quantity.onHand",
]),
);
const inventory = asText(
firstValue(detail, [
"inventory",
"inventoryQty",
"inventoryLevel",
"quantity",
"qty",
]),
);
if (onHand === "-" && inventory === "-") return null;
const product = asText(
firstValue(detail, [
"product.name",
"product.identifier",
"item.name",
"item.identifier",
"catalogItem.name",
"catalogItem.identifier",
"productName",
"productIdentifier",
"sku",
"identifier",
]),
);
return {
cwCatalogId,
product,
onHand,
inventory,
key: `${cwCatalogId}|${product}|${onHand}|${inventory}`,
};
};
const trackedRows = (adjustment: JsonObject): TrackedProduct[] => {
const detailCandidates = [
readPathValue(adjustment, "adjustmentDetails"),
readPathValue(adjustment, "details"),
readPathValue(adjustment, "lineItems"),
];
for (const candidate of detailCandidates) {
if (!Array.isArray(candidate)) continue;
const rows = candidate
.map((entry) => trackedRow(toObject(entry)))
.filter((entry): entry is TrackedProduct => entry !== null)
.sort((a, b) => a.key.localeCompare(b.key));
if (rows.length > 0) return rows;
}
const root = trackedRow(adjustment);
if (!root) return [];
return [root];
};
const snapshot = (rows: unknown[]): Map<string, AdjustmentSnapshot> => {
const out = new Map<string, AdjustmentSnapshot>();
for (const entry of rows) {
const adjustment = toObject(entry);
const key = adjustmentKey(adjustment);
const rowsTracked = trackedRows(adjustment);
const signature = stableStringify(rowsTracked);
out.set(key, {
key,
trackedRows: rowsTracked,
signature,
});
}
return out;
};
const changedCatalogIds = (
before: Map<number, string>,
after: Map<number, string>,
): Set<number> => {
const changed = new Set<number>();
for (const [cwCatalogId, nextSignature] of after) {
const prevSignature = before.get(cwCatalogId);
if (!prevSignature) {
changed.add(cwCatalogId);
continue;
}
if (prevSignature === nextSignature) continue;
changed.add(cwCatalogId);
}
return changed;
};
const productState = (
adjustments: Map<string, AdjustmentSnapshot>,
): Map<number, string> => {
const grouped = new Map<number, Set<string>>();
for (const snapshot of adjustments.values()) {
for (const row of snapshot.trackedRows) {
const rows = grouped.get(row.cwCatalogId) ?? new Set<string>();
rows.add(row.key);
grouped.set(row.cwCatalogId, rows);
}
}
const state = new Map<number, string>();
for (const [cwCatalogId, rows] of grouped) {
state.set(cwCatalogId, stableStringify([...rows].sort()));
}
return state;
};
const applySyncGuards = (ids: number[]): number[] => {
const now = Date.now();
const cooledIds = ids.filter((cwCatalogId) => {
const last = lastSyncedAt.get(cwCatalogId);
if (!last) return true;
return now - last >= SYNC_COOLDOWN_MS;
});
if (cooledIds.length <= MAX_SYNC_PER_CYCLE) return cooledIds;
return cooledIds.slice(0, MAX_SYNC_PER_CYCLE);
};
const fetchAdjustments = async (): Promise<unknown[]> => {
const response = await withCwRetry(
() => connectWiseApi.get(ADJUSTMENTS_ENDPOINT),
{
label: "inventory-adjustments",
maxAttempts: 3,
},
);
const payload = response.data;
if (Array.isArray(payload)) return payload;
if (isObject(payload) && Array.isArray(payload.data)) return payload.data;
return [];
};
const cacheKey = (cwCatalogId: number) =>
`${CATALOG_ITEM_CACHE_PREFIX}${cwCatalogId}`;
const cwLastUpdated = (item: CatalogItem): Date => {
const value = item._info?.lastUpdated;
if (!value) return new Date();
const parsed = new Date(value);
const invalidDate = Number.isNaN(parsed.getTime());
if (invalidDate) return new Date();
return parsed;
};
const syncCatalogItem = async (cwCatalogId: number): Promise<boolean> => {
try {
const item = await withCwRetry(
() => catalogCw.fetchByCatalogId(cwCatalogId),
{
label: `catalog-item:${cwCatalogId}`,
maxAttempts: 3,
},
);
const onHand = await withCwRetry(
() => catalogCw.fetchInventoryOnHand(cwCatalogId),
{
label: `catalog-onhand:${cwCatalogId}`,
maxAttempts: 3,
},
);
const persisted = await prisma.catalogItem.upsert({
where: { cwCatalogId },
create: {
cwCatalogId,
identifier: item.identifier,
name: item.description,
description: item.description,
customerDescription: item.customerDescription,
internalNotes: item.notes,
category: item.category?.name,
categoryCwId: item.category?.id,
subcategory: item.subcategory?.name,
subcategoryCwId: item.subcategory?.id,
manufacturer: item.manufacturer?.name,
manufactureCwId: item.manufacturer?.id,
partNumber: item.manufacturerPartNumber,
vendorName: item.vendor?.name,
vendorSku: item.vendorSku,
vendorCwId: item.vendor?.id,
price: item.price,
cost: item.cost,
inactive: item.inactiveFlag,
salesTaxable: item.taxableFlag,
onHand,
cwLastUpdated: cwLastUpdated(item),
},
update: {
identifier: item.identifier,
name: item.description,
description: item.description,
customerDescription: item.customerDescription,
internalNotes: item.notes,
category: item.category?.name,
categoryCwId: item.category?.id,
subcategory: item.subcategory?.name,
subcategoryCwId: item.subcategory?.id,
manufacturer: item.manufacturer?.name,
manufactureCwId: item.manufacturer?.id,
partNumber: item.manufacturerPartNumber,
vendorName: item.vendor?.name,
vendorSku: item.vendorSku,
vendorCwId: item.vendor?.id,
price: item.price,
cost: item.cost,
inactive: item.inactiveFlag,
salesTaxable: item.taxableFlag,
onHand,
cwLastUpdated: cwLastUpdated(item),
},
});
await redis.set(
cacheKey(cwCatalogId),
JSON.stringify({
cwCatalogId,
onHand,
cwItem: item,
dbItem: persisted,
syncedAt: new Date().toISOString(),
}),
"EX",
CATALOG_ITEM_CACHE_TTL_SECONDS,
);
return true;
} catch (err) {
console.error(
`[inventory-adjustments] failed to sync catalog item ${cwCatalogId}`,
err,
);
return false;
}
};
export const listenInventoryAdjustments = async (): Promise<void> => {
if (inFlight) return;
inFlight = true;
try {
const rows = await fetchAdjustments();
const current = snapshot(rows);
const currentProductState = productState(current);
if (previous.size === 0) {
previous = current;
previousProductState = currentProductState;
console.log(
`[inventory-adjustments] baseline captured (${current.size} adjustments, ${currentProductState.size} products)`,
);
return;
}
const changedIds = [
...changedCatalogIds(previousProductState, currentProductState),
].sort((a, b) => a - b);
const guardedIds = applySyncGuards(changedIds);
previous = current;
previousProductState = currentProductState;
if (guardedIds.length === 0) return;
let successCount = 0;
for (const cwCatalogId of guardedIds) {
const ok = await syncCatalogItem(cwCatalogId);
if (!ok) continue;
lastSyncedAt.set(cwCatalogId, Date.now());
successCount += 1;
}
const skippedByCooldown = changedIds.length - guardedIds.length;
console.log(
`[inventory-adjustments] inventory changed for ${changedIds.length} products, queued ${guardedIds.length}, synced ${successCount}, cooldown/cap skipped ${skippedByCooldown}`,
);
} catch (err) {
console.error("[inventory-adjustments] listener failed", err);
} finally {
inFlight = false;
}
};
@@ -0,0 +1,429 @@
import { prisma } from "../../../constants";
import { events } from "../../globalEvents";
import { catalogCw } from "./catalog";
import { NormalizedCatalogCollectorItem } from "./catalogCollectorTranslation";
const CONCURRENCY = 6;
const BATCH_DELAY_MS = 250;
const UPSERT_BATCH_SIZE = 50;
const MAX_ROW_ERROR_LOGS = 10;
const sleep = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms));
const truncate = (value: string, max = 400): string =>
value.length <= max ? value : `${value.slice(0, max - 3)}...`;
const summarizePrismaError = (err: unknown): string => {
const e = err as Record<string, unknown>;
const code = typeof e?.code === "string" && e.code.length > 0 ? e.code : null;
const message =
typeof e?.message === "string" && e.message.length > 0
? e.message
: String(err);
const clientVersion =
typeof e?.clientVersion === "string" ? e.clientVersion : null;
const meta =
e?.meta && typeof e.meta === "object"
? (e.meta as Record<string, unknown>)
: null;
const modelName = typeof meta?.modelName === "string" ? meta.modelName : null;
const targetRaw = meta?.target;
const target = Array.isArray(targetRaw)
? targetRaw.join(",")
: typeof targetRaw === "string"
? targetRaw
: null;
const cause = typeof meta?.cause === "string" ? meta.cause : null;
const lines: string[] = [
code ? `Prisma ${code}: ${truncate(message, 600)}` : truncate(message, 600),
];
if (modelName) lines.push(`model=${modelName}`);
if (target) lines.push(`target=${target}`);
if (cause) lines.push(`cause=${truncate(cause, 400)}`);
if (clientVersion) lines.push(`clientVersion=${clientVersion}`);
return lines.join(" | ");
};
const createCatalogErrorLogger = () => {
let loggedRowErrors = 0;
return (context: string, err: unknown, detail?: Record<string, unknown>) => {
if (loggedRowErrors >= MAX_ROW_ERROR_LOGS) return;
const detailStr = detail ? JSON.stringify(detail) : "";
const payload = detailStr ? ` | detail=${truncate(detailStr, 120)}` : "";
console.error(
`[catalog-refresh] ${context}: ${summarizePrismaError(err)}${payload}`,
);
loggedRowErrors += 1;
if (loggedRowErrors === MAX_ROW_ERROR_LOGS) {
console.error(
`[catalog-refresh] Reached ${MAX_ROW_ERROR_LOGS} row-level error logs; suppressing additional row errors for this refresh cycle`,
);
}
};
};
const runSlowParallel = async (
tasks: Array<() => Promise<void>>,
): Promise<number> => {
let failureCount = 0;
for (let i = 0; i < tasks.length; i += CONCURRENCY) {
const batch = tasks.slice(i, i + CONCURRENCY);
const results = await Promise.allSettled(batch.map((task) => task()));
for (const result of results) {
if (result.status === "rejected") failureCount += 1;
}
if (i + CONCURRENCY >= tasks.length) continue;
await sleep(BATCH_DELAY_MS);
}
return failureCount;
};
const runBatchUpserts = async (
tasks: Array<() => Promise<void>>,
): Promise<number> => {
let failureCount = 0;
for (let i = 0; i < tasks.length; i += UPSERT_BATCH_SIZE) {
const batch = tasks.slice(i, i + UPSERT_BATCH_SIZE);
const results = await Promise.allSettled(batch.map((task) => task()));
for (const result of results) {
if (result.status === "rejected") {
failureCount += 1;
}
}
}
return failureCount;
};
export const refreshCatalog = async () => {
const refreshStartedAt = Date.now();
const logCatalogError = createCatalogErrorLogger();
events.emit("cw:catalog:refresh:check");
console.info("[catalog-refresh] Refresh cycle started");
try {
console.info(
"[catalog-refresh] Attempting collector-first catalog refresh",
);
const collectorItems = await catalogCw.fetchAllProductsFromCollector();
console.info(
`[catalog-refresh] Collector returned ${collectorItems.size} products`,
);
events.emit("cw:catalog:refresh:started", {
totalCw: collectorItems.size,
totalDb: null,
staleCount: collectorItems.size,
});
const upsertStartedAt = Date.now();
const updatedCount = await upsertCollectorItems(
collectorItems,
logCatalogError,
);
console.info(
`[catalog-refresh] Collector upserts completed in ${Date.now() - upsertStartedAt}ms (${updatedCount} rows updated)`,
);
events.emit("cw:catalog:refresh:completed", {
totalCw: collectorItems.size,
totalDb: collectorItems.size,
staleCount: collectorItems.size,
itemsUpdated: updatedCount,
});
console.info(
`[catalog-refresh] Refresh cycle completed via collector in ${Date.now() - refreshStartedAt}ms`,
);
return;
} catch (err) {
console.warn(
`[catalog-refresh] Collector fetchProducts failed, falling back to CW API: ${err instanceof Error ? err.message : String(err)}`,
);
}
const fallbackStartedAt = Date.now();
console.info("[catalog-refresh] Starting CW fallback catalog refresh");
try {
await refreshCatalogFromCw(logCatalogError);
} catch (err) {
console.error(
`[catalog-refresh] CW fallback failed: ${summarizePrismaError(err)}`,
);
throw err;
}
console.info(
`[catalog-refresh] CW fallback refresh completed in ${Date.now() - fallbackStartedAt}ms (${Date.now() - refreshStartedAt}ms total cycle)`,
);
};
const upsertCollectorItems = async (
collectorItems: Map<number, NormalizedCatalogCollectorItem>,
logCatalogError: (
context: string,
err: unknown,
detail?: Record<string, unknown>,
) => void,
): Promise<number> => {
let updatedCount = 0;
const totalItems = collectorItems.size;
let processedCount = 0;
const upsertTasks: Array<() => Promise<void>> = [];
for (const item of collectorItems.values()) {
upsertTasks.push(async () => {
try {
await prisma.catalogItem.upsert({
where: { cwCatalogId: item.cwCatalogId },
create: {
cwCatalogId: item.cwCatalogId,
identifier: item.identifier,
name: item.name,
description: item.description,
customerDescription: item.customerDescription,
internalNotes: item.internalNotes,
category: item.category,
categoryCwId: item.categoryCwId,
subcategory: item.subcategory,
subcategoryCwId: item.subcategoryCwId,
manufacturer: item.manufacturer,
manufactureCwId: item.manufactureCwId,
partNumber: item.partNumber,
vendorName: item.vendorName,
vendorSku: item.vendorSku,
vendorCwId: item.vendorCwId,
price: item.price,
cost: item.cost,
inactive: item.inactive,
salesTaxable: item.salesTaxable,
onHand: item.onHand,
cwLastUpdated: item.cwLastUpdated,
},
update: {
identifier: item.identifier,
name: item.name,
description: item.description,
customerDescription: item.customerDescription,
internalNotes: item.internalNotes,
category: item.category,
categoryCwId: item.categoryCwId,
subcategory: item.subcategory,
subcategoryCwId: item.subcategoryCwId,
manufacturer: item.manufacturer,
manufactureCwId: item.manufactureCwId,
partNumber: item.partNumber,
vendorName: item.vendorName,
vendorSku: item.vendorSku,
vendorCwId: item.vendorCwId,
price: item.price,
cost: item.cost,
inactive: item.inactive,
salesTaxable: item.salesTaxable,
onHand: item.onHand,
cwLastUpdated: item.cwLastUpdated,
},
});
} catch (err) {
logCatalogError("collector upsert failed", err, {
id: item.cwCatalogId,
});
throw err;
}
updatedCount += 1;
processedCount += 1;
const shouldLogProgress =
processedCount <= 5 ||
processedCount % 250 === 0 ||
processedCount === totalItems;
if (shouldLogProgress) {
console.info(
`[catalog-refresh] Collector upsert progress: ${processedCount}/${totalItems}`,
);
}
});
}
const upsertFailures = await runBatchUpserts(upsertTasks);
if (upsertFailures > 0) {
console.warn(
`[catalog-refresh] ${upsertFailures} collector upsert task(s) failed; remaining items will retry next cycle`,
);
}
return updatedCount;
};
const refreshCatalogFromCw = async (
logCatalogError: (
context: string,
err: unknown,
detail?: Record<string, unknown>,
) => void,
) => {
// 1. Fetch lightweight summaries from CW (id + _info with lastUpdated)
const cwSummaries = await catalogCw.fetchAllCatalogSummary();
// 2. Fetch all DB items with their cwCatalogId and cwLastUpdated
const dbItems = await prisma.catalogItem.findMany({
select: { cwCatalogId: true, cwLastUpdated: true },
});
const dbMap = new Map(
dbItems.map((item) => [item.cwCatalogId, item.cwLastUpdated]),
);
// 3. Compare CW lastUpdated vs DB cwLastUpdated — collect IDs that are stale or new
const staleIds: number[] = [];
for (const [cwId, summary] of cwSummaries) {
const cwLastUpdated = summary._info?.lastUpdated
? new Date(summary._info.lastUpdated)
: null;
const dbLastUpdated = dbMap.get(cwId) ?? null;
// New item (not in DB) or CW has a newer timestamp
if (!dbLastUpdated || (cwLastUpdated && cwLastUpdated > dbLastUpdated)) {
staleIds.push(cwId);
}
}
if (staleIds.length === 0) {
events.emit("cw:catalog:refresh:skipped", {
totalCw: cwSummaries.size,
totalDb: dbItems.length,
staleCount: 0,
});
return;
}
events.emit("cw:catalog:refresh:started", {
totalCw: cwSummaries.size,
totalDb: dbItems.length,
staleCount: staleIds.length,
});
// 4. Fetch full CW item data for stale IDs using slow, bounded concurrency
const cwItemMap = new Map<number, any>();
const itemFetchTasks: Array<() => Promise<void>> = staleIds.map(
(cwId) => async () => {
const item = await catalogCw.fetchByCatalogId(cwId);
cwItemMap.set(cwId, item);
},
);
const itemFetchFailures = await runSlowParallel(itemFetchTasks);
// 5. Fetch inventory onHand for stale IDs using the same slow parallel strategy
const onHandMap = new Map<number, number>();
const inventoryTasks: Array<() => Promise<void>> = staleIds.map(
(cwId) => async () => {
try {
const onHand = await catalogCw.fetchInventoryOnHand(cwId);
onHandMap.set(cwId, onHand);
} catch {
onHandMap.set(cwId, 0);
}
},
);
const inventoryFailures = await runSlowParallel(inventoryTasks);
// 6. Upsert stale/new items with bounded slow parallel execution
let updatedCount = 0;
const upsertTasks: Array<() => Promise<void>> = staleIds.map(
(cwId) => async () => {
const item = cwItemMap.get(cwId);
if (!item) return;
const cwLastUpdated = item._info?.lastUpdated
? new Date(item._info.lastUpdated)
: new Date();
const onHand = onHandMap.get(cwId) ?? 0;
try {
await prisma.catalogItem.upsert({
where: { cwCatalogId: cwId },
create: {
cwCatalogId: cwId,
identifier: item.identifier,
name: item.description,
description: item.description,
customerDescription: item.customerDescription,
internalNotes: item.notes,
category: item.category?.name,
categoryCwId: item.category?.id,
subcategory: item.subcategory?.name,
subcategoryCwId: item.subcategory?.id,
manufacturer: item.manufacturer?.name,
manufactureCwId: item.manufacturer?.id,
partNumber: item.manufacturerPartNumber,
vendorName: item.vendor?.name,
vendorSku: item.vendorSku,
vendorCwId: item.vendor?.id,
price: item.price,
cost: item.cost,
inactive: item.inactiveFlag,
salesTaxable: item.taxableFlag,
onHand,
cwLastUpdated,
},
update: {
name: item.description,
identifier: item.identifier,
description: item.description,
customerDescription: item.customerDescription,
internalNotes: item.notes,
category: item.category?.name,
categoryCwId: item.category?.id,
subcategory: item.subcategory?.name,
subcategoryCwId: item.subcategory?.id,
manufacturer: item.manufacturer?.name,
manufactureCwId: item.manufacturer?.id,
partNumber: item.manufacturerPartNumber,
vendorName: item.vendor?.name,
vendorSku: item.vendorSku,
vendorCwId: item.vendor?.id,
price: item.price,
cost: item.cost,
inactive: item.inactiveFlag,
salesTaxable: item.taxableFlag,
onHand,
cwLastUpdated,
},
});
} catch (err) {
logCatalogError("CW fallback upsert failed", err, {
id: cwId,
});
throw err;
}
updatedCount += 1;
},
);
const upsertFailures = await runBatchUpserts(upsertTasks);
const failedTasks = itemFetchFailures + inventoryFailures + upsertFailures;
if (failedTasks > 0) {
console.warn(
`[catalog-refresh] ${failedTasks} slow-parallel task(s) failed; remaining items will retry next cycle`,
);
}
events.emit("cw:catalog:refresh:completed", {
totalCw: cwSummaries.size,
totalDb: dbItems.length,
staleCount: staleIds.length,
itemsUpdated: updatedCount,
});
};
@@ -0,0 +1,92 @@
import { prisma } from "../../../constants";
import { events } from "../../globalEvents";
import { catalogCw } from "./catalog";
const CONCURRENCY = 6;
const BATCH_DELAY_MS = 250;
const sleep = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms));
export const refreshInventory = async () => {
events.emit("cw:inventory:refresh:check");
// 1. Get all active catalog items from DB
const dbItems = await prisma.catalogItem.findMany({
where: { inactive: false },
select: { cwCatalogId: true, onHand: true },
});
if (dbItems.length === 0) {
events.emit("cw:inventory:refresh:skipped", {
totalItems: 0,
updatedCount: 0,
});
return;
}
events.emit("cw:inventory:refresh:started", {
totalItems: dbItems.length,
});
// 2. Slow-parallel fetch inventory onHand for all items
const onHandMap = new Map<number, number>();
let failedCount = 0;
for (let i = 0; i < dbItems.length; i += CONCURRENCY) {
const batch = dbItems.slice(i, i + CONCURRENCY);
const results = await Promise.allSettled(
batch.map(async (item) => {
try {
const onHand = await catalogCw.fetchInventoryOnHand(item.cwCatalogId);
onHandMap.set(item.cwCatalogId, onHand);
} catch {
onHandMap.set(item.cwCatalogId, item.onHand);
}
}),
);
for (const result of results) {
if (result.status === "rejected") failedCount += 1;
}
if (i + CONCURRENCY >= dbItems.length) continue;
await sleep(BATCH_DELAY_MS);
}
// 3. Only update items where onHand has changed
const updates = dbItems.filter((item) => {
const newOnHand = onHandMap.get(item.cwCatalogId) ?? item.onHand;
return newOnHand !== item.onHand;
});
if (updates.length === 0) {
events.emit("cw:inventory:refresh:skipped", {
totalItems: dbItems.length,
updatedCount: 0,
});
return;
}
const updatedCount = (
await Promise.all(
updates.map(async (item) => {
const newOnHand = onHandMap.get(item.cwCatalogId) ?? item.onHand;
return await prisma.catalogItem.update({
where: { cwCatalogId: item.cwCatalogId },
data: { onHand: newOnHand },
});
}),
)
).length;
events.emit("cw:inventory:refresh:completed", {
totalItems: dbItems.length,
updatedCount,
});
if (failedCount > 0) {
console.warn(
`[inventory-refresh] ${failedCount} task(s) failed; fallback values were used and will retry next sweep`,
);
}
};
@@ -0,0 +1,46 @@
import { connectWiseApi, prisma } from "../../constants";
import { events } from "../globalEvents";
import { fetchAllCwCompanies } from "./fetchAllCompanies";
export const refreshCompanies = async () => {
events.emit("cw:companies:refresh:check");
const internalCompanyCount = await prisma.company.count();
events.emit("cw:companies:refresh:started");
const allCompanies = await fetchAllCwCompanies();
const externalCompanyCount = allCompanies.size;
// Batch upserts to avoid exhausting the database connection pool
const batchSize = 50;
let updatedCount = 0;
const companiesArray = Array.from(allCompanies.values());
for (let i = 0; i < companiesArray.length; i += batchSize) {
const batch = companiesArray.slice(i, i + batchSize);
const results = await Promise.all(
batch.map((company) =>
prisma.company.upsert({
where: { cw_CompanyId: company.id },
create: {
cw_CompanyId: company.id,
cw_Identifier: company.identifier,
name: company.name,
},
update: {
name: company.name,
},
}),
),
);
updatedCount += results.length;
}
events.emit("cw:companies:refresh:completed", {
internalCompaniesCount: internalCompanyCount,
externalCompaniesCount: externalCompanyCount,
companiesUpdated: updatedCount,
});
};
@@ -0,0 +1,79 @@
import { connectWiseApi } from "../../../constants";
export interface CWCompanySite {
id: number;
name: string;
addressLine1: string;
addressLine2?: string;
city: string;
stateReference: { id: number; identifier: string; name: string } | null;
zip: string;
country: { id: number; name: string } | null;
phoneNumber: string;
faxNumber: string;
taxCodeId: number | null;
expenseReimbursement: number;
primaryAddressFlag: boolean;
defaultShippingFlag: boolean;
defaultBillingFlag: boolean;
defaultMailingFlag: boolean;
mobileGuid: string;
calendar: { id: number; name: string } | null;
timeZone: { id: number; name: string } | null;
company: { id: number; identifier: string; name: string };
_info: Record<string, string>;
}
/**
* Fetch all sites for a ConnectWise company.
*
* @param cwCompanyId - The ConnectWise company ID
* @returns Array of CW company sites
*/
export const fetchCompanySites = async (
cwCompanyId: number,
): Promise<CWCompanySite[]> => {
const response = await connectWiseApi.get(
`/company/companies/${cwCompanyId}/sites?pageSize=1000`,
);
return response.data;
};
/**
* Fetch a single site by CW site ID for a given company.
*
* @param cwCompanyId - The ConnectWise company ID
* @param cwSiteId - The ConnectWise site ID
* @returns The CW company site
*/
export const fetchCompanySite = async (
cwCompanyId: number,
cwSiteId: number,
): Promise<CWCompanySite> => {
const response = await connectWiseApi.get(
`/company/companies/${cwCompanyId}/sites/${cwSiteId}`,
);
return response.data;
};
/**
* Serialize a CW site into a clean API-friendly object.
*/
export const serializeCwSite = (site: CWCompanySite) => ({
id: site.id,
name: site.name,
address: {
line1: site.addressLine1,
line2: site.addressLine2 ?? null,
city: site.city,
state: site.stateReference?.name ?? null,
zip: site.zip,
country: site.country?.name ?? "United States",
},
phoneNumber: site.phoneNumber || null,
faxNumber: site.faxNumber || null,
primaryAddressFlag: site.primaryAddressFlag,
defaultShippingFlag: site.defaultShippingFlag,
defaultBillingFlag: site.defaultBillingFlag,
defaultMailingFlag: site.defaultMailingFlag,
});
+24
View File
@@ -0,0 +1,24 @@
import { Company } from "../../../generated/prisma/client";
import { prisma } from "../../constants";
import { fetchCwCompanyById } from "./fetchCompany";
export const updateCwInternalCompany = async (
companyId: number,
): Promise<Company | null> => {
const cwCompany = await fetchCwCompanyById(companyId);
if (!cwCompany) return null;
const updatedCompany = await prisma.company.upsert({
where: { cw_CompanyId: cwCompany.id },
create: {
cw_CompanyId: cwCompany.id,
cw_Identifier: cwCompany.identifier,
name: cwCompany.name,
},
update: {
name: cwCompany.name,
},
});
return updatedCompany;
};
@@ -0,0 +1,6 @@
export { userDefinedFieldsCw } from "./userDefinedFields";
export type {
CWUserDefinedField,
CWUserDefinedFieldOption,
CWUserDefinedFieldInfo,
} from "./udf.types";
@@ -0,0 +1,34 @@
export interface CWUserDefinedFieldOption {
id: number;
optionValue: string;
defaultFlag: boolean;
inactiveFlag: boolean;
sortOrder: number;
}
export interface CWUserDefinedFieldInfo {
lastUpdated: string;
updatedBy: string;
}
export interface CWUserDefinedField {
id: number;
podId: number;
caption: string;
sequenceNumber: number;
screenId: string;
helpText?: string;
fieldTypeIdentifier: string;
numberDecimals: number;
entryTypeIdentifier: string;
requiredFlag: boolean;
displayOnScreenFlag: boolean;
readOnlyFlag: boolean;
listViewFlag: boolean;
options?: CWUserDefinedFieldOption[];
businessUnitIds: number[];
locationIds: number[];
connectWiseID: string;
dateCreated: string;
_info: CWUserDefinedFieldInfo;
}
@@ -0,0 +1,119 @@
import { Collection } from "@discordjs/collection";
import { connectWiseApi, redis } from "../../../constants";
import { events } from "../../globalEvents";
import { CWUserDefinedField } from "./udf.types";
const REDIS_KEY = "cw:userDefinedFields";
/** In-memory cache of all CW User Defined Fields, keyed by UDF id */
let cache: Collection<number, CWUserDefinedField> = new Collection();
export const userDefinedFieldsCw = {
/**
* Get Cache
*
* Returns the current in-memory Collection of all User Defined Fields.
* If the cache is empty, it will attempt to hydrate from Redis first,
* then fall back to a live API fetch.
*/
get: async (): Promise<Collection<number, CWUserDefinedField>> => {
if (cache.size > 0) return cache;
// Try hydrating from Redis
const stored = await redis.get(REDIS_KEY);
if (stored) {
const parsed: CWUserDefinedField[] = JSON.parse(stored);
cache = new Collection(parsed.map((udf) => [udf.id, udf]));
return cache;
}
// Nothing cached anywhere — do a live fetch
return userDefinedFieldsCw.refresh();
},
/**
* Fetch All User Defined Fields
*
* Fetches all UDFs from the ConnectWise API.
* Does NOT update the cache — use `refresh()` for that.
*/
fetchAll: async (): Promise<Collection<number, CWUserDefinedField>> => {
const allItems = new Collection<number, CWUserDefinedField>();
const pageSize = 1000;
const response = await connectWiseApi.get(
`/system/userDefinedFields?pageSize=${pageSize}`,
);
const items: CWUserDefinedField[] = response.data;
for (const item of items) {
allItems.set(item.id, item);
}
return allItems;
},
/**
* Refresh
*
* Fetches all UDFs from ConnectWise, replaces the in-memory cache
* and persists the snapshot to Redis.
*/
refresh: async (): Promise<Collection<number, CWUserDefinedField>> => {
events.emit("cw:udf:refresh:started");
const allItems = await userDefinedFieldsCw.fetchAll();
cache = allItems;
// Persist to Redis
await redis.set(REDIS_KEY, JSON.stringify([...allItems.values()]));
events.emit("cw:udf:refresh:completed", { count: allItems.size });
return cache;
},
/**
* Find by ID
*
* Returns a single UDF by its ConnectWise ID from the cache.
*/
findById: async (id: number): Promise<CWUserDefinedField | undefined> => {
const items = await userDefinedFieldsCw.get();
return items.get(id);
},
/**
* Find by Caption
*
* Returns the first UDF matching the given caption (case-insensitive).
*/
findByCaption: async (
caption: string,
): Promise<CWUserDefinedField | undefined> => {
const items = await userDefinedFieldsCw.get();
const lowerCaption = caption.toLowerCase();
return items.find((udf) => udf.caption.toLowerCase() === lowerCaption);
},
/**
* Find by Screen ID
*
* Returns all UDFs associated with a given screenId.
*/
findByScreenId: async (
screenId: string,
): Promise<Collection<number, CWUserDefinedField>> => {
const items = await userDefinedFieldsCw.get();
return items.filter((udf) => udf.screenId === screenId);
},
/**
* Invalidate
*
* Clears the in-memory cache and removes the Redis key.
*/
invalidate: async (): Promise<void> => {
cache = new Collection();
await redis.del(REDIS_KEY);
},
};
+90
View File
@@ -0,0 +1,90 @@
/**
* Generic retry wrapper for ConnectWise API calls.
*
* Retries on transient errors (ECONNABORTED, ECONNRESET, ETIMEDOUT,
* ECONNREFUSED, ERR_NETWORK) with exponential backoff. Non-transient
* errors (e.g. 404, 400) are re-thrown immediately.
*/
const TRANSIENT_CODES = new Set([
"ECONNABORTED",
"ECONNRESET",
"ETIMEDOUT",
"ECONNREFUSED",
"ERR_NETWORK",
"ENETUNREACH",
]);
export interface CwRetryOptions {
/** Maximum number of attempts (including the first). Default: 3 */
maxAttempts?: number;
/** Base delay in ms before the first retry. Doubles each retry. Default: 1000 */
baseDelayMs?: number;
/** Optional label for log messages. */
label?: string;
}
/**
* Execute `fn` and retry up to `maxAttempts - 1` times on transient
* Axios / network errors.
*/
export async function withCwRetry<T>(
fn: () => Promise<T>,
opts: CwRetryOptions = {},
): Promise<T> {
const { maxAttempts = 3, baseDelayMs = 1_000, label } = opts;
let lastError: unknown;
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
try {
return await fn();
} catch (err) {
lastError = err;
if (!isRetryable(err) || attempt === maxAttempts) throw err;
const delay = baseDelayMs * 2 ** (attempt - 1); // 1s, 2s, 4s …
const tag = label ? `[${label}] ` : "";
console.warn(
`${tag}CW transient error (attempt ${attempt}/${maxAttempts}), retrying in ${delay}ms — ${describeErr(err)}`,
);
await sleep(delay);
}
}
// Should never reach here, but satisfy TS:
throw lastError;
}
// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------
function isRetryable(err: unknown): boolean {
if (typeof err !== "object" || err === null) return false;
const e = err as Record<string, any>;
if (!e.isAxiosError) return false;
// Retry on known transient codes
if (TRANSIENT_CODES.has(e.code)) return true;
// Also retry on 5xx server errors from CW
const status = e.response?.status;
if (typeof status === "number" && status >= 500) return true;
return false;
}
function describeErr(err: unknown): string {
if (typeof err !== "object" || err === null) return String(err);
const e = err as Record<string, any>;
if (e.isAxiosError) {
const method = (e.config?.method ?? "?").toUpperCase();
const url = e.config?.url ?? "unknown";
return `${method} ${url}${e.code ?? `HTTP ${e.response?.status}`}`;
}
return (e as Error).message ?? String(err);
}
const sleep = (ms: number) => new Promise((r) => setTimeout(r, ms));
+33
View File
@@ -0,0 +1,33 @@
import { MicrosoftGraphUser } from "../types/MSAuthTypes";
/**
* Fetch Microsoft User
*
* This function fetches user data from Microsoft Graph API using the provided access token.
* It makes a GET request to the `/me` endpoint and returns the user data in JSON format.
*
* @param accessToken - This is the access token provided by Microsoft.
* @returns - Raw API Data from Microsoft.
*/
export const fetchMicrosoftUser = async (
accessToken: string,
): Promise<MicrosoftGraphUser> => {
const graphEndpoint = "https://graph.microsoft.com/v1.0/me";
const response = await fetch(graphEndpoint, {
method: "GET",
headers: {
Authorization: `Bearer ${accessToken}`,
},
});
if (!response.ok) {
throw new Error(
`Graph request failed: ${response.status} ${response.statusText}`,
);
}
const data = (await response.json()) as MicrosoftGraphUser;
return data;
};
+275
View File
@@ -0,0 +1,275 @@
import { Eventra } from "@duxcore/eventra";
import UserController from "../controllers/UserController";
import {
SessionController,
SessionTokensObject,
} from "../controllers/SessionController";
import { RoleController } from "../controllers/RoleController";
import { CompanyController } from "../controllers/CompanyController";
import { JsonWebTokenError } from "jsonwebtoken";
import { User, Company } from "../../generated/prisma/client";
import { DefaultEventsMap, Socket } from "socket.io";
import { WorkerQueue } from "./workers/queues";
export type JobLogLevel = "INFO" | "WARN" | "ERROR" | "DEBUG";
export interface EventTypes {
// API Lifecycle
"api:started": () => void;
// User Events
"user:created": (user: UserController) => void;
"user:updated": (data: {
user: UserController;
updatedValues: Partial<User>;
}) => void;
"user:deleted": (data: { id: string }) => void;
"user:authenticated": (data: {
user: UserController;
tokens: SessionTokensObject;
}) => void;
"user:role:assigned": (data: {
user: UserController;
role: RoleController;
}) => void;
"user:role:removed": (data: {
user: UserController;
role: RoleController;
}) => void;
// Session Events
"session:created": (data: {
user: UserController;
session: SessionController;
}) => void;
"session:tokens_generated": (data: {
session: SessionController;
tokens: SessionTokensObject;
}) => void;
"session:token_refresh": (data: {
session: SessionController;
tokens: SessionTokensObject;
}) => void;
"session:invalidated": (session: SessionController) => void;
"session:terminated": (session: SessionController) => void;
// Role Events
"role:created": (role: RoleController) => void;
"role:deleted": (role: RoleController) => void;
"role:updated": (data: {
role: RoleController;
updateData: Parameters<typeof RoleController.prototype.update>["0"];
}) => void;
"role:permissions:set": (data: {
previous: string[];
previousSigned: string;
current: string[];
currentSigned: string;
role: RoleController;
}) => void;
"role:permissions:added": (data: {
previous: string[];
previousSigned: string;
added: string[];
currentSigned: string;
role: RoleController;
}) => void;
"role:permissions:removed": (data: {
previous: string[];
previousSigned: string;
removed: string[];
currentSigned: string;
role: RoleController;
}) => void;
"role:permissions:verification_error": (data: {
currentSigned: string;
attemptedVerification: string;
err: Error;
role: RoleController;
}) => void;
// Company Events
"company:fetched": (company: CompanyController) => void;
"company:refreshed_from_cw": (company: CompanyController) => void;
"company:configurations_fetched": (data: {
company: CompanyController;
configurationCount: number;
}) => void;
// ConnectWise Integration Events
"cw:companies:refresh:check": () => void;
"cw:companies:refresh:started": () => void;
"cw:companies:refresh:completed": (data: {
internalCompaniesCount: number;
externalCompaniesCount: number;
companiesUpdated: number;
}) => void;
"cw:companies:refresh:skipped": (data: {
internalCompaniesCount: number;
externalCompaniesCount: number;
}) => void;
"cw:company:data:updated": (data: {
company: CompanyController;
updatedFields: Partial<Company>;
}) => void;
// ConnectWise Catalog Events
"cw:catalog:refresh:check": () => void;
"cw:catalog:refresh:started": (data: {
totalCw: number;
totalDb: number;
staleCount: number;
}) => void;
"cw:catalog:refresh:completed": (data: {
totalCw: number;
totalDb: number;
staleCount: number;
itemsUpdated: number;
}) => void;
"cw:catalog:refresh:skipped": (data: {
totalCw: number;
totalDb: number;
staleCount: number;
}) => void;
// UniFi Events
"unifi:login:ok": (data: {
type: "unifi-os" | "legacy";
status: number;
}) => void;
"unifi:login:fallback": () => void;
"unifi:reauth": () => void;
"unifi:sites:sync:started": () => void;
"unifi:sites:sync:completed": (data: {
total: number;
created: number;
updated: number;
}) => void;
"unifi:wlan:fetched": (data: { path: string }) => void;
"unifi:wlan:fetch_failed": (data: {
path: string;
status: number | unknown;
}) => void;
// ConnectWise Inventory Events
"cw:inventory:refresh:check": () => void;
"cw:inventory:refresh:started": (data: { totalItems: number }) => void;
"cw:inventory:refresh:completed": (data: {
totalItems: number;
updatedCount: number;
}) => void;
"cw:inventory:refresh:skipped": (data: {
totalItems: number;
updatedCount: number;
}) => void;
// ConnectWise Opportunities Events
"cw:opportunities:refresh:check": () => void;
"cw:opportunities:refresh:started": (data: {
totalCw: number;
totalDb: number;
staleCount: number;
}) => void;
"cw:opportunities:refresh:completed": (data: {
totalCw: number;
totalDb: number;
staleCount: number;
itemsUpdated: number;
orphanedCount: number;
}) => void;
"cw:opportunities:refresh:reconciled": (data: {
orphanedCount: number;
removedCwIds: number[];
}) => void;
"cw:opportunities:refresh:skipped": (data: {
totalCw: number;
totalDb: number;
staleCount: number;
orphanedCount: number;
}) => void;
// Cache Events
"cache:opportunities:refresh:started": (data: {
totalOpportunities: number;
}) => void;
"cache:opportunities:refresh:completed": (data: {
totalOpportunities: number;
activitiesRefreshed: number;
companiesRefreshed: number;
notesRefreshed: number;
contactsRefreshed: number;
productsRefreshed: number;
oppCwDataRefreshed: number;
skipped: number;
}) => void;
"cache:opportunities:refresh:error": (data: { error: unknown }) => void;
// Sales Metrics Cache Events
"cache:salesMetrics:refresh:started": (data: {
activeMemberCount: number;
opportunityCount: number;
}) => void;
"cache:salesMetrics:refresh:completed": (data: {
activeMemberCount: number;
opportunityCount: number;
memberMetricsWritten: number;
cacheHitCount: number;
cacheMissCount: number;
durationMs: number;
}) => void;
"cache:salesMetrics:refresh:error": (data: {
error: unknown;
durationMs: number;
}) => void;
// ConnectWise User Defined Fields Events
"cw:udf:refresh:started": () => void;
"cw:udf:refresh:completed": (data: { count: number }) => void;
// ConnectWise Members Events
"cw:members:refresh:started": () => void;
"cw:members:refresh:completed": (data: {
totalMembers: number;
totalUsers: number;
usersUpdated: number;
}) => void;
// ConnectWise Members DB Sync Events
"cw:members:db:refresh:check": () => void;
"cw:members:db:refresh:started": (data: {
totalCw: number;
totalDb: number;
staleCount: number;
}) => void;
"cw:members:db:refresh:completed": (data: {
totalCw: number;
totalDb: number;
staleCount: number;
itemsUpdated: number;
}) => void;
"cw:members:db:refresh:skipped": (data: {
totalCw: number;
totalDb: number;
staleCount: number;
}) => void;
"worker:error": (data: { error: unknown; context?: string }) => void;
"worker:io:connection": (data: { socket: Socket }) => void;
"worker:io:disconnect": (data: { socket: Socket }) => void;
"job:started": (data: { workerId: string; queueType: WorkerQueue }) => void;
"job:log": (data: {
message: string;
level: JobLogLevel;
timestamp: string;
workerId: string;
queueType: WorkerQueue;
}) => void;
"job:error": (data: {
error: unknown;
context?: string;
workerId: string;
queueType: WorkerQueue;
}) => void;
"job:finished": (data: { workerId: string; queueType: WorkerQueue }) => void;
}
export const events = new Eventra<EventTypes>();
+390
View File
@@ -0,0 +1,390 @@
# Event Debugger
Real-time interactive terminal dashboard for monitoring application events, job flows, and background workers.
## Overview
The Event Debugger provides a full-screen, scrollable terminal interface that displays:
- **Active flows** — ongoing jobs and operations with live updates
- **Completed flows** — recently finished operations with final status
- **Nested events** — global events automatically nested under their parent job
- **Error details** — full stack traces for failed operations
- **Job logging** — log-level-aware output from background workers
## Features
### Full-Screen Dashboard
- **Alternate screen buffer** — takes over the terminal like `vim` or `htop`
- **No scrollback pollution** — exits cleanly, restoring your original terminal
- **Hidden cursor** — clean display without cursor flicker
- **Auto-refresh** — updates every 80ms with smooth spinners
### Console Capture
- **Intercepts all console output** — `console.log`, `console.info`, `console.warn`, `console.error`, `console.debug`
- **Displays in dedicated section** — console logs appear below flows with timestamps
- **Color-coded by level** — errors in red, warnings in yellow, info in cyan, etc.
- **Formatted output** — objects are JSON-stringified, errors show name and message
- **Circular buffer** — keeps last 100 console logs, automatically pruning old entries
- **Clearable** — press `c` to clear all console logs
### Interactive Scrolling
- **Keyboard navigation** — vim-style and arrow key controls
- **Viewport windowing** — only renders visible content
- **Scroll indicator** — shows current position and available navigation
- **Bounded scrolling** — automatically clamps to valid ranges
### Smart Flow Tracking
- **Hierarchical flows** — groups related events under a single header
- **Job-aware routing** — associates global events with their parent job
- **Per-worker isolation** — each job gets its own flow keyed by `workerId`
- **Terminal event detection** — flows complete on `DONE`, `SKIP`, `ERROR`, or `job:finished`
- **Late event handling** — socket events arriving after job completion are appended to completed flows
### Log Level Support
Jobs can emit logs with different severity levels:
- `INFO` (cyan) — informational messages
- `WARN` (yellow) — warnings
- `ERROR` (red) — errors
- `DEBUG` (gray) — debug output
## Usage
### Setup
```typescript
import { setupEventDebugger } from "./modules/logging/eventDebugger";
// Enable in development
if (Bun.env.NODE_ENV === "development") {
setupEventDebugger();
}
```
### Emitting Events
```typescript
import { events } from "./modules/globalEvents";
// Start a job (creates a flow)
events.emit("job:started", {
workerId: "abc123",
queueType: WorkerQueue.REFRESH_ACTIVE_OPPORTUNITIES,
});
// Log from within a job
events.emit("job:log", {
message: "Processing 500 opportunities",
level: "INFO",
timestamp: new Date().toISOString(),
workerId: "abc123",
queueType: WorkerQueue.REFRESH_ACTIVE_OPPORTUNITIES,
});
// Emit global events (automatically nested under the job)
events.emit("cache:opportunities:refresh:started", { totalOpportunities: 500 });
// Finish the job (terminates the flow)
events.emit("job:finished", {
workerId: "abc123",
queueType: WorkerQueue.REFRESH_ACTIVE_OPPORTUNITIES,
});
```
### Worker Logging
```typescript
import { workerLog } from "./modules/workers/jobFactory";
async function myWorker(workerSocket: Socket) {
workerLog(workerSocket, "Starting refresh", "INFO");
workerLog(workerSocket, "Cache miss detected", "WARN");
workerLog(workerSocket, "Failed to fetch data", "ERROR");
workerLog(workerSocket, "Detailed trace info", "DEBUG");
}
```
### Console Logging
All console output is automatically captured and displayed:
```typescript
console.log("Processing user request");
console.info("Connected to database");
console.warn("Rate limit approaching");
console.error("Database connection failed");
console.debug("Query execution time: 45ms");
```
These appear in the `CONSOLE OUTPUT` section below all flows.
## Keyboard Controls
| Key | Action |
| --------------- | ------------------------- |
| `↑` or `k` | Scroll up one line |
| `↓` or `j` | Scroll down one line |
| `Page Up` | Scroll up one page |
| `Page Down` | Scroll down one page |
| `Home` or `g` | Jump to top |
| `End` or `G` | Jump to bottom |
| `c` | Clear console logs |
| `q` or `Ctrl+C` | Exit and restore terminal |
## Configuration
Environment variables control display behavior:
```bash
# Disable dynamic rendering (fallback to simple line-by-line logging)
export EVENT_DEBUGGER_DYNAMIC=false
# Maximum children shown per active flow (default: 20)
export EVENT_DEBUGGER_MAX_FLOW_CHILDREN=50
# Maximum children shown per completed flow (default: 4)
export EVENT_DEBUGGER_MAX_COMPLETED_CHILDREN=10
# Event name column width (default: 96)
export EVENT_DEBUGGER_EVENT_WIDTH=120
# Summary column width (default: 180)
export EVENT_DEBUGGER_SUMMARY_WIDTH=200
```
## Display Format
### Flow Header
```
⠋ 12:34:56 [START] cache > opportunities > refresh > active (2.3s)
```
- **Spinner** — rotating while active, status icon when complete (✓ ✗ ⊘)
- **Timestamp** — HH:MM:SS of last update
- **Status** — current label (START, UPDATE, DONE, ERROR, etc.)
- **Title** — human-readable flow name
- **Elapsed** — time since flow started
### Job Flow Header
```
⠋ 12:34:56 [START] [JOB] cache > opportunities > refresh > active #a3f1b2c8 (2.3s)
```
- `[JOB]` prefix
- Queue type as human-readable path
- First 8 characters of workerId
### Child Events
```
- 12:34:56 INFO cache:opportunities:refresh:started -> totalOpportunities=500
- 12:34:57 WARN job:log -> message=Cache miss detected level=WARN workerId=abc123
```
- Indented under parent flow
- Timestamp + Label + Event name + Summary
- Color-coded by log level (for job logs) or event type
### Completed Flows
```
✓ 12:35:00 [DONE] cache > opportunities > refresh > active (14.6s)
- final: 12:34:59 DONE cache:opportunities:refresh:completed -> totalOpportunities=500 updated=472
... 18 older event(s) hidden
```
- Only last 4 children shown (configurable)
- "final:" prefix on children
- Count of hidden events if truncated
### Console Output Section
```
━━━ CONSOLE OUTPUT ━━━
12:34:56 INFO Connected to database
12:34:57 WARN Rate limit approaching: 95/100
12:34:58 ERROR Database connection failed
12:34:59 DEBUG Query execution time: 45ms
12:35:00 LOG Processing user request
```
- Appears below all flows
- Horizontal separator with section title
- Timestamp + Level + Message
- Objects are JSON-stringified
- Errors show `ErrorName: message`
- Last 100 logs kept (circular buffer)
- Press `c` to clear
## Flow Lifecycle
### 1. Flow Creation
A flow is created when:
- A `job:started` event is emitted
- Any event with a `START`, `CHECK`, or flow-tracked label arrives
### 2. Event Nesting
Events are nested under a flow if:
- Their event name starts with `job:` and shares the same `workerId`
- Their event name matches a registered job prefix (e.g., `cache:opportunities:refresh:*`)
- They have a flow-tracked label (`UPDATE`, `LOG`, etc.)
### 3. Flow Completion
A flow terminates when:
- A `job:finished` or `job:error` event arrives (for job flows)
- Any event with a terminal label (`DONE`, `SKIP`, `ERROR`) arrives (for non-job flows)
Completed flows are:
- Moved to the completed section
- Displayed for 5 minutes (or until `ERROR` status)
- Automatically pruned after TTL expires
## Technical Details
### Alternate Screen Buffer
Uses ANSI escape sequences to take over the terminal:
- `\x1b[?1049h` — enter alternate screen
- `\x1b[?1049l` — exit alternate screen
- `\x1b[?25l` — hide cursor
- `\x1b[?25h` — show cursor
- `\x1b[H` — move cursor to home (0,0)
- `\x1b[J` — clear from cursor to end of screen
### Raw Mode Input
Enables character-by-character input without buffering:
- Captures arrow keys, Page Up/Down, Home/End
- Supports vim-style navigation (`j`, `k`, `g`, `G`)
- Processes `Ctrl+C` and `q` for clean exit
### Event Prefix Mapping
When a job flow starts, its queue type is converted to an event prefix:
```
cache/opportunities/refresh/active → cache:opportunities:refresh
```
Any event matching that prefix is automatically nested under the job flow while it's active.
### Late Event Handling
Socket.io messages (`job:log`) arrive asynchronously. If they arrive after `job:finished` has already terminated the flow:
- The completed flow is located in `dashboardState.completed`
- The late event is appended to the completed flow's children
- The dashboard re-renders to show the updated flow
- No orphaned active flows are created
## Non-Interactive Mode
When TTY is not available or `EVENT_DEBUGGER_DYNAMIC=false`:
- Falls back to simple line-by-line logging
- No alternate screen buffer
- No scrolling or keyboard input
- Same event format, just appended to stdout
## Exit Behavior
On normal exit or `Ctrl+C`:
1. Raw mode is disabled
2. Cursor is shown
3. Alternate screen buffer is exited
4. Original terminal content is restored
5. Process exits cleanly
## Troubleshooting
**Dashboard doesn't appear**
- Check that `process.stdout.isTTY` is `true`
- Verify `EVENT_DEBUGGER_DYNAMIC` is not set to `false`
- Ensure you're running in a real terminal, not piping output
**Scroll doesn't work**
- Ensure `process.stdin.isTTY` is `true`
- Check that raw mode is enabled (no errors on startup)
- Try arrow keys and vim keys (`j`/`k`)
**Events not nesting under jobs**
- Verify `job:started` fires before any global events
- Check that `queueTypeToEventPrefix()` returns the correct prefix for your queue type
- Ensure global events match the prefix pattern (e.g., `cache:opportunities:refresh:*`)
**Ghost flows appear**
- This was fixed — late socket events are now routed to completed flows
- Ensure you're using the latest version of the event debugger
## Examples
### Simple Job Flow
```typescript
// 1. Start job
events.emit("job:started", { workerId: "worker-123", queueType: "my/queue" });
// 2. Log some work
events.emit("job:log", {
message: "Processing items",
level: "INFO",
timestamp: new Date().toISOString(),
workerId: "worker-123",
queueType: "my/queue",
});
// 3. Complete job
events.emit("job:finished", { workerId: "worker-123", queueType: "my/queue" });
```
### Job with Nested Global Events
```typescript
// 1. Start job
events.emit("job:started", {
workerId: "w1",
queueType: "cache/opportunities/refresh/active",
});
// 2. Emit global events (auto-nested because prefix matches)
events.emit("cache:opportunities:refresh:started", { total: 100 });
events.emit("cache:opportunities:refresh:progress", { processed: 50 });
events.emit("cache:opportunities:refresh:completed", { updated: 48 });
// 3. Complete job
events.emit("job:finished", {
workerId: "w1",
queueType: "cache/opportunities/refresh/active",
});
```
Result: all `cache:opportunities:refresh:*` events are nested under the job flow.
## See Also
- `src/modules/globalEvents.ts` — Event bus and type definitions
- `src/modules/workers/jobFactory.ts` — Worker job creation and `workerLog()` helper
- `src/modules/workers/coms.ts` — Socket.io server for worker communication
File diff suppressed because it is too large Load Diff
+784
View File
@@ -0,0 +1,784 @@
import PdfPrinter from "pdfmake/src/Printer";
import { readFileSync } from "node:fs";
import { join } from "node:path";
export interface QuoteLineItem {
qty: number;
description: string;
unitPrice: number;
narrative?: string;
}
export interface CustomerInfo {
name: string;
company?: string;
attention?: string;
address: string[];
}
export interface CustomerContact {
email?: string;
phone?: string;
}
export interface QuoteDetails {
quoteNumber: string;
date: string;
description: string;
}
export interface TaxConfig {
rate: number;
label: string;
}
export interface SalesRepInfo {
name: string;
email?: string;
}
export interface QuoteMetadata {
quoteId?: string;
createdById?: string;
createdByName?: string;
createdByEmail?: string;
createdAt?: string;
downloadedAt?: string;
downloadedById?: string;
downloadedByName?: string;
downloadedByEmail?: string;
}
export interface QuoteData {
customer: CustomerInfo;
contact: CustomerContact;
quote: QuoteDetails;
lineItems: QuoteLineItem[];
taxableSubtotal?: number;
tax: TaxConfig;
salesRep?: SalesRepInfo;
quoteNarrative?: string;
isPreview?: boolean;
showLineItemPricing?: boolean;
metadata?: QuoteMetadata;
}
export interface QuoteTheme {
brandPrimary: string;
brandDark: string;
brandLight: string;
accent: string;
headerBg: string;
footerBg: string;
}
const DEFAULT_THEME: QuoteTheme = {
brandPrimary: "#8B5E0B",
brandDark: "#5C3D07",
brandLight: "#F5EDE0",
accent: "#C67F17",
headerBg: "#2D2317",
footerBg: "#F5EDE0",
};
const SLATE = "#3A3A3A";
const SLATE_MID = "#636363";
const SLATE_LIGHT = "#8E8E8E";
const WHITE = "#FFFFFF";
const ROW_ALT = "#FAF7F2";
const DIVIDER = "#D4C5A9";
const PAGE_H = 792;
const PAGE_W = 612;
const MARGIN_L = 40;
const MARGIN_R = 40;
const MARGIN_TOP = 26;
const MARGIN_BOTTOM = 65;
const CONTENT_W = PAGE_W - MARGIN_L - MARGIN_R;
const DEFAULT_DISCLAIMER =
"Prices valid for 30 days from quote date. Taxes invoiced per jurisdiction regardless of presence on this quote.";
const COMPANY = {
name: "Total Tech Solutions LLC",
contactPerson: "Courtney Stevens",
address: ["PO Box 331", "Murray, KY 42071"],
phone: "(270) 761-8324",
email: "courtney.stevens@totaltech.net",
licenseInfo: "Licensed in Kentucky & Tennessee · TN License #2173",
} as const;
const DEFAULT_LOGO_PATH = join(process.cwd(), "logo.png");
const fontDir = join(process.cwd(), "node_modules/pdfmake/build/fonts/Roboto");
const fonts = {
Roboto: {
normal: join(fontDir, "Roboto-Regular.ttf"),
bold: join(fontDir, "Roboto-Medium.ttf"),
italics: join(fontDir, "Roboto-Italic.ttf"),
bolditalics: join(fontDir, "Roboto-MediumItalic.ttf"),
},
};
const printer = new PdfPrinter(fonts as never);
const fmt = (n: number) =>
"$" + n.toFixed(2).replace(/\B(?=(\d{3})+(?!\d))/g, ",");
const hr = (color = DIVIDER, weight = 0.75) => ({
canvas: [
{
type: "line",
x1: 0,
y1: 0,
x2: CONTENT_W,
y2: 0,
lineWidth: weight,
lineColor: color,
},
],
});
function loadLogoDataUrl(logoPath: string): string | null {
try {
const raw = readFileSync(logoPath);
const ext = logoPath.toLowerCase().endsWith(".png") ? "png" : "jpeg";
return `data:image/${ext};base64,${raw.toString("base64")}`;
} catch {
return null;
}
}
export async function generateQuote(
data: QuoteData,
theme: Partial<QuoteTheme> = {},
logoPath = DEFAULT_LOGO_PATH,
): Promise<Buffer> {
const t: QuoteTheme = { ...DEFAULT_THEME, ...theme };
const subTotal = data.lineItems.reduce(
(sum, item) => sum + item.qty * item.unitPrice,
0,
);
const taxableSubTotal = Math.max(0, data.taxableSubtotal ?? subTotal);
const taxAmount = taxableSubTotal * data.tax.rate;
const total = subTotal + taxAmount;
const logoDataUrl = loadLogoDataUrl(logoPath);
const showPricing = data.showLineItemPricing ?? false;
const tableHeader = [
{ text: "Qty", style: "thCell", alignment: "center" },
{ text: "Description", style: "thCell" },
...(showPricing
? [
{ text: "Unit Price", style: "thCell", alignment: "right" },
{ text: "Total", style: "thCell", alignment: "right" },
]
: []),
];
const colCount = showPricing ? 4 : 2;
const tableRows: Record<string, unknown>[][] = [];
for (const item of data.lineItems) {
// Build the description cell — stack description + narrative so they
// are a single cell and pdfmake never splits them across pages.
const descriptionCell: Record<string, unknown> = item.narrative
? {
stack: [
{ text: item.description, style: "tdCell" },
{
text: item.narrative,
style: "narrative",
margin: [0, 2, 8, 0],
},
],
}
: { text: item.description, style: "tdCell" };
tableRows.push([
{ text: String(item.qty), style: "tdCell", alignment: "center" },
descriptionCell,
...(showPricing
? [
{
text: fmt(item.unitPrice),
style: "tdCell",
alignment: "right",
noWrap: true,
},
{
text: fmt(item.qty * item.unitPrice),
style: "tdCell",
alignment: "right",
noWrap: true,
},
]
: []),
]);
}
const headerImage = logoDataUrl
? { image: logoDataUrl, width: 200 }
: {
stack: [{ text: COMPANY.name, style: "companyName" }],
width: 200,
};
const docDefinition = {
pageSize: "LETTER" as const,
pageMargins: [MARGIN_L, MARGIN_TOP, MARGIN_R, MARGIN_BOTTOM] as [
number,
number,
number,
number,
],
info: {
title: `Quote ${data.quote.quoteNumber}`,
author: data.metadata?.createdByName ?? COMPANY.name,
subject: data.quote.description,
creator: COMPANY.name,
producer: COMPANY.name,
keywords: [
data.metadata?.quoteId ? `quoteId:${data.metadata.quoteId}` : null,
data.metadata?.createdById
? `createdById:${data.metadata.createdById}`
: null,
data.metadata?.createdByEmail
? `createdByEmail:${data.metadata.createdByEmail}`
: null,
data.metadata?.createdAt
? `createdAt:${data.metadata.createdAt}`
: null,
data.metadata?.downloadedAt
? `downloadedAt:${data.metadata.downloadedAt}`
: null,
data.metadata?.downloadedById
? `downloadedById:${data.metadata.downloadedById}`
: null,
data.metadata?.downloadedByName
? `downloadedByName:${data.metadata.downloadedByName}`
: null,
data.metadata?.downloadedByEmail
? `downloadedByEmail:${data.metadata.downloadedByEmail}`
: null,
data.isPreview ? "preview:true" : null,
]
.filter(Boolean)
.join("; "),
},
defaultStyle: {
font: "Roboto",
fontSize: 9.5,
color: SLATE,
lineHeight: 1.3,
},
styles: {
companyName: { fontSize: 18, bold: true, color: t.brandDark },
quoteLabel: { fontSize: 24, color: t.accent, bold: true, opacity: 0.12 },
sectionTitle: {
fontSize: 8.5,
bold: true,
color: t.brandPrimary,
characterSpacing: 1.2,
},
sectionBody: { fontSize: 9, color: SLATE },
sectionMuted: { fontSize: 8.5, color: SLATE_MID },
infoLabel: {
fontSize: 8,
bold: true,
color: SLATE_LIGHT,
characterSpacing: 0.5,
},
infoValue: { fontSize: 10, bold: true, color: t.brandDark },
contactLabel: { fontSize: 8, bold: true, color: SLATE_LIGHT },
contactValue: { fontSize: 9, color: SLATE },
thCell: {
fontSize: 8.5,
bold: true,
color: WHITE,
characterSpacing: 0.5,
},
tdCell: { fontSize: 9, color: SLATE },
narrative: {
fontSize: 8,
color: SLATE_MID,
italics: true,
lineHeight: 1.2,
},
totalsLabel: { fontSize: 9, color: SLATE_MID },
totalsValue: { fontSize: 9, color: SLATE, bold: true },
totalFinalLabel: { fontSize: 11, bold: true, color: WHITE },
totalFinalValue: { fontSize: 12, bold: true, color: t.brandDark },
footerText: { fontSize: 7.5, color: SLATE_MID },
footerBold: { fontSize: 7.5, color: t.brandPrimary, bold: true },
disclaimer: { fontSize: 7, color: SLATE_LIGHT, italics: true },
},
...(data.isPreview
? {
watermark: {
text: "PREVIEW",
color: t.brandDark,
opacity: 0.15,
bold: true,
},
}
: {}),
background: () => ({
canvas: [
{ type: "rect", x: 0, y: 0, w: PAGE_W, h: 6, color: t.accent },
{ type: "rect", x: 0, y: 6, w: 4, h: 786, color: t.brandLight },
],
}),
content: [
{
margin: [0, 4, 0, 0],
columns: [
headerImage,
{
stack: [
{ text: COMPANY.name, style: "companyName", alignment: "right" },
{
text: "QUOTE",
style: "quoteLabel",
alignment: "right",
margin: [0, -4, 0, 0],
},
],
width: "*",
},
],
},
{ ...hr(t.accent, 1.5), margin: [0, 8, 0, 0] },
{
margin: [0, 7, 0, 7],
columns: [
{
width: "auto",
stack: [
{ text: "QUOTE NUMBER", style: "infoLabel" },
{
text: data.quote.quoteNumber,
style: "infoValue",
margin: [0, 2, 0, 0],
},
],
},
{
width: "auto",
margin: [30, 0, 0, 0],
stack: [
{ text: "DATE", style: "infoLabel" },
{
text: data.quote.date,
style: "infoValue",
margin: [0, 2, 0, 0],
},
],
},
{
width: "*",
margin: [30, 0, 0, 0],
stack: [
{ text: "DESCRIPTION", style: "infoLabel" },
{
text: data.quote.description,
style: "infoValue",
margin: [0, 2, 0, 0],
},
],
},
],
},
{ ...hr(), margin: [0, 0, 0, 10] },
{
columns: [
{
width: 155,
stack: [
{ text: "FROM", style: "sectionTitle", margin: [0, 0, 0, 6] },
{
text: data.salesRep?.name ?? COMPANY.contactPerson,
style: "sectionBody",
bold: true,
},
{
text: COMPANY.name,
style: "sectionMuted",
margin: [0, 2, 0, 0],
},
...COMPANY.address.map((line) => ({
text: line,
style: "sectionMuted",
})),
{
text: COMPANY.phone,
style: "sectionBody",
margin: [0, 4, 0, 0],
},
{
text: data.salesRep?.email ?? COMPANY.email,
style: "sectionMuted",
margin: [0, 1, 0, 0],
},
],
},
{
width: 175,
margin: [25, 0, 0, 0],
stack: [
{
text: "PREPARED FOR",
style: "sectionTitle",
margin: [0, 0, 0, 6],
},
{ text: data.customer.name, style: "sectionBody", bold: true },
...(data.customer.company
? [
{
text: data.customer.company,
style: "sectionMuted",
margin: [0, 2, 0, 0],
},
]
: []),
...(data.customer.attention
? [{ text: data.customer.attention, style: "sectionMuted" }]
: []),
...data.customer.address.map((line) => ({
text: line,
style: "sectionMuted",
})),
],
},
...(data.contact.email || data.contact.phone
? [
{
width: "*" as const,
margin: [20, 0, 0, 0] as [number, number, number, number],
stack: [
{
text: "CONTACT",
style: "sectionTitle",
margin: [0, 0, 0, 6],
},
...(data.contact.email
? [
{
columns: [
{
text: "Email",
style: "contactLabel",
width: 40,
},
{
text: data.contact.email,
style: "contactValue",
width: "*",
},
],
},
]
: []),
...(data.contact.phone
? [
{
columns: [
{
text: "Mobile",
style: "contactLabel",
width: 40,
},
{
text: data.contact.phone,
style: "contactValue",
width: "*",
},
],
margin: [0, 4, 0, 0],
},
]
: []),
],
},
]
: []),
],
},
{ ...hr(), margin: [0, 10, 0, 0] },
...(data.quoteNarrative
? [
{
margin: [0, 8, 0, 6] as [number, number, number, number],
table: {
widths: [2, "*"],
body: [
[
{
text: "",
fillColor: t.accent,
border: [false, false, false, false],
},
{
text: data.quoteNarrative,
fontSize: 9,
color: SLATE_MID,
italics: true,
lineHeight: 1.4,
margin: [8, 6, 8, 6],
fillColor: ROW_ALT,
border: [false, false, false, false],
},
],
],
},
layout: {
hLineWidth: () => 0,
vLineWidth: () => 0,
paddingLeft: () => 0,
paddingRight: () => 0,
paddingTop: () => 0,
paddingBottom: () => 0,
},
},
]
: []),
{
margin: [0, 10, 0, 0],
table: {
headerRows: 1,
dontBreakRows: true,
widths: showPricing ? [40, "*", 75, 75] : [40, "*"],
body: [tableHeader, ...tableRows],
},
layout: {
fillColor: (rowIndex: number) => {
if (rowIndex === 0) return t.headerBg;
return rowIndex % 2 === 0 ? ROW_ALT : null;
},
hLineWidth: (i: number, node: { table: { body: unknown[] } }) => {
if (i === 0 || i === 1) return 0;
if (i === node.table.body.length) return 1;
return 0.5;
},
vLineWidth: () => 0,
hLineColor: (i: number, node: { table: { body: unknown[] } }) =>
i === node.table.body.length ? t.headerBg : "#E8E0D0",
paddingLeft: (col: number) => (col === 0 ? 6 : 8),
paddingRight: () => 8,
paddingTop: () => 4,
paddingBottom: () => 4,
},
},
{
unbreakable: true,
stack: [
{
margin: [0, 6, 0, 0],
columns: [
{ width: "*", text: "" },
{
width: 250,
table: {
widths: ["*", 110],
body: [
[
{
text: "Subtotal",
style: "totalsLabel",
margin: [0, 5, 0, 5],
border: [false, false, false, true],
},
{
text: fmt(subTotal),
style: "totalsValue",
alignment: "right",
noWrap: true,
margin: [0, 5, 0, 5],
border: [false, false, false, true],
},
],
[
{
text: data.tax.label,
style: "totalsLabel",
margin: [0, 5, 0, 5],
border: [false, false, false, true],
},
{
text: fmt(taxAmount),
style: "totalsValue",
alignment: "right",
noWrap: true,
margin: [0, 5, 0, 5],
border: [false, false, false, true],
},
],
[
{
text: "TOTAL",
style: "totalFinalLabel",
fillColor: t.headerBg,
margin: [10, 8, 6, 8],
border: [false, false, false, false],
},
{
text: fmt(total),
style: "totalFinalValue",
alignment: "right",
noWrap: true,
fillColor: t.brandLight,
margin: [6, 7, 8, 7],
border: [false, false, false, false],
},
],
],
},
layout: {
hLineWidth: (i: number) => (i >= 1 && i <= 2 ? 0.5 : 0),
vLineWidth: () => 0,
hLineColor: () => "#E0D6C6",
},
},
],
},
{
margin: [0, 40, 0, 0],
columns: [
{
width: "50%",
stack: [
{
canvas: [
{
type: "line",
x1: 0,
y1: 0,
x2: 220,
y2: 0,
lineWidth: 0.75,
lineColor: "#999",
},
],
},
{
text: "Authorized Signature",
fontSize: 7,
color: "#888",
margin: [0, 3, 0, 0],
},
],
},
{
width: "50%",
stack: [
{
canvas: [
{
type: "line",
x1: 0,
y1: 0,
x2: 160,
y2: 0,
lineWidth: 0.75,
lineColor: "#999",
},
],
},
{
text: "Date",
fontSize: 7,
color: "#888",
margin: [0, 3, 0, 0],
},
],
},
],
},
],
},
],
footer: (currentPage: number, pageCount: number) => ({
margin: [0, 0, 0, 0],
stack: [
{
canvas: [
{ type: "rect", x: 0, y: 0, w: PAGE_W, h: 44, color: t.footerBg },
],
},
{
margin: [MARGIN_L, -38, MARGIN_R, 0],
columns: [
{
width: "*",
stack: [
{
text: [
{ text: COMPANY.name, style: "footerBold" },
{
text: ` · ${COMPANY.licenseInfo}`,
style: "footerText",
},
],
},
],
},
{
width: "auto",
text: `Page ${currentPage} of ${pageCount}`,
style: "footerText",
alignment: "right",
},
],
},
{
margin: [MARGIN_L, 4, MARGIN_R, 0],
text: DEFAULT_DISCLAIMER,
style: "disclaimer",
},
],
}),
};
const maybeDoc = printer.createPdfKitDocument(docDefinition as never) as any;
const pdfDoc =
maybeDoc && typeof maybeDoc.then === "function" ? await maybeDoc : maybeDoc;
if (!pdfDoc || typeof pdfDoc.on !== "function") {
throw new Error("Failed to initialize PDF document stream");
}
return await new Promise<Buffer>((resolve, reject) => {
try {
const chunks: Buffer[] = [];
pdfDoc.on("data", (chunk: Buffer) => chunks.push(chunk));
pdfDoc.on("end", () => resolve(Buffer.concat(chunks)));
pdfDoc.on("error", reject);
if (typeof pdfDoc.end === "function") {
pdfDoc.end();
} else {
reject(new Error("PDF document stream does not support end()"));
}
} catch (err) {
reject(err);
}
});
}
+2
View File
@@ -0,0 +1,2 @@
export * from "./generateQuote";
export * from "./injectPdfMetadata";
@@ -0,0 +1,48 @@
import { PDFDocument } from "pdf-lib";
export interface DownloadMetadata {
downloadedAt: string;
downloadedById: string;
downloadedByName?: string;
downloadedByEmail?: string;
}
/**
* Injects download-time metadata into an existing PDF's document properties.
*
* Appends download-specific key:value pairs to the PDF's Keywords field
* (matching the semicolon-delimited format used at commit time) and updates
* the ModificationDate.
*
* Returns the modified PDF as a `Uint8Array`.
*/
export async function injectPdfMetadata(
pdfBytes: Buffer | Uint8Array,
metadata: DownloadMetadata,
): Promise<Uint8Array> {
const pdfDoc = await PDFDocument.load(pdfBytes);
// Build new keyword entries in the same format used by generateQuote
const newKeywordPairs = [
`downloadedAt:${metadata.downloadedAt}`,
`downloadedById:${metadata.downloadedById}`,
metadata.downloadedByName
? `downloadedByName:${metadata.downloadedByName}`
: null,
metadata.downloadedByEmail
? `downloadedByEmail:${metadata.downloadedByEmail}`
: null,
].filter(Boolean) as string[];
// Append to existing keywords (preserve commit-time metadata)
const existingKeywords = pdfDoc.getKeywords() ?? "";
const separator = existingKeywords.length > 0 ? "; " : "";
pdfDoc.setKeywords([
existingKeywords + separator + newKeywordPairs.join("; "),
]);
// Update modification date to download time
pdfDoc.setModificationDate(new Date(metadata.downloadedAt));
return pdfDoc.save();
}
@@ -0,0 +1,9 @@
export function genImplicitPerm(
resource: string,
resourceId: string,
userId: string
) {
return ["resource", resource, resourceId, "user", userId, "implicit"].join(
"."
);
}
@@ -0,0 +1,53 @@
/**
* Permission Validator
*
* This method is used for validaing user and role permissions. This method is given a single or and array
* of permission nodes that the user has and it is also given the permission node that is required for whatever
* query they are trying to execute, and this will determine if any of the permission nodes match or will
* verify the given permission node.
*
* Special token types:
* - Asterisk (*): verifies it's token and all following tokens.
* - Question Mark (?): verifies it's token and only it's token.
* - Inclusive List ([a,b,c]): verifies only the tokens in the list.
* - Exclusive List (<a,b,c>): verifies all tokens except for the ones in the list.
*
* @param permission - The required permission
* @param permissionExpressions - The owned permission(s)
* @returns {boolean} Does the user have the permission?
*/
export function permissionValidator(
permission: string,
permissionExpressions: string | string[]
): boolean {
if (typeof permissionExpressions === "string") {
// If the second parameter is a string, treat it as a single expression
permissionExpressions = [permissionExpressions];
}
// Iterate over each expression in the array and check if any of them match the permission
for (const expression of permissionExpressions) {
const rx = expression
.replace(/\./g, "\\.")
.replace(/\*/g, ".*")
.replace(/\?/g, ".")
.replace(/\[([^\]\[]*)\]/g, "($1)")
.replace(/<([^<>]+)>/g, "(?:(?!$1)[^.])*")
.replace(/,/g, "|");
if (new RegExp(`^${rx}$`).test(permission)) {
return true;
}
}
return false;
}
/**
* @TODO It's okay, you can't always get everything done and that is fine.
* Just take a breath and move on, come back if you feel upto it.
* What you make is good and whilst you can always do more,
* you can't do everything. Nothing will ever be perfect,
* so stop trying to be perfect and allow your self to move on
* even if you know there is more you can do.
*/
@@ -0,0 +1,31 @@
import UserController from "../../controllers/UserController";
export const processObjectValuePerms = async <T>(
obj: T,
scope: string, // e.g. "unifi.wifi.read"
user: UserController,
): Promise<Partial<T>> => {
let result: Partial<T> = {};
for (const key in obj) {
if (await user.hasPermission(`${scope}.${key}`)) {
result[key] = obj[key];
}
}
return result;
};
export const processObjectPermMap = async <T extends Record<string, unknown>>(
obj: T,
scope: string,
user: UserController,
): Promise<Record<keyof T, boolean>> => {
const result = {} as Record<keyof T, boolean>;
for (const key in obj) {
result[key] = await user.hasPermission(`${scope}.${key}`);
}
return result;
};
@@ -0,0 +1,24 @@
import jwt from "jsonwebtoken";
import { permissionsPrivateKey } from "../../constants";
import { PermissionIssuers } from "../../types/PermissionTypes";
/**
* Sign Permissions
*
* This will sign the array of permissions with the private key for permissions, and then return
* a JWT which will be stored in the databse.
*
* @param permissions - All the permissions to be signed
* @returns {string} - The signed permissions object
*/
export function signPermissions(data: {
issuer: PermissionIssuers;
subject: string;
permissions: string[];
}) {
return jwt.sign({ permissions: data.permissions }, permissionsPrivateKey, {
algorithm: "RS256",
issuer: data.issuer,
subject: data.subject,
});
}
@@ -0,0 +1,107 @@
import { readFileSync } from "fs";
import { join } from "path";
export interface SalesTaxAddressInput {
line1?: string | null;
line2?: string | null;
city?: string | null;
state?: string | null;
zip?: string | null;
country?: string | null;
}
interface LocalJurisdiction {
city: string;
local_rate: number;
combined_rate: number;
}
interface StateTaxRecord {
state: string;
abbreviation: string;
state_rate: number;
avg_local_rate: number;
avg_combined_rate: number;
has_local_tax: boolean;
local_jurisdictions?: LocalJurisdiction[];
}
const TAX_DATA_FALLBACK_URL = new URL("./salesTaxRates.json", import.meta.url);
const TAX_DATA_CANDIDATE_PATHS: Array<string | URL> = [
process.env.SALES_TAX_RATES_PATH ?? "",
join(process.cwd(), "salesTaxRates.json"),
TAX_DATA_FALLBACK_URL,
].filter(Boolean);
const parseTaxData = (): StateTaxRecord[] => {
for (const source of TAX_DATA_CANDIDATE_PATHS) {
try {
const raw = readFileSync(source, "utf-8");
const parsed = JSON.parse(raw) as StateTaxRecord[];
if (Array.isArray(parsed)) return parsed;
} catch {
continue;
}
}
return [];
};
const SALES_TAX_DATA = parseTaxData();
const normalizeToken = (value: string | null | undefined): string | null => {
if (!value) return null;
const normalized = value
.trim()
.toLowerCase()
.replace(/[^a-z0-9]+/g, " ")
.replace(/\s+/g, " ")
.trim();
if (!normalized) return null;
return normalized;
};
const normalizeState = (state: string | null | undefined): string | null => {
const normalized = normalizeToken(state);
if (!normalized) return null;
const directCode = normalized.toUpperCase();
if (directCode.length === 2) return directCode;
const match = SALES_TAX_DATA.find(
(record) => normalizeToken(record.state) === normalized,
);
if (!match) return null;
return match.abbreviation.toUpperCase();
};
/**
* Compute expected sales tax rate for an address.
* Returns a decimal tax rate (e.g. 0.06 for 6%).
*/
export const getExpectedSalesTaxRate = (
address: SalesTaxAddressInput | null | undefined,
): number => {
const state = normalizeState(address?.state);
if (!state) return 0;
// Business rule: Tennessee remains explicitly hard-coded.
if (state === "TN") return 0.0975;
const stateRecord = SALES_TAX_DATA.find(
(record) => record.abbreviation.toUpperCase() === state,
);
if (!stateRecord) return 0;
const city = normalizeToken(address?.city);
const cityMatch = stateRecord.local_jurisdictions?.find(
(jurisdiction) => normalizeToken(jurisdiction.city) === city,
);
if (cityMatch) return cityMatch.combined_rate;
return stateRecord.avg_combined_rate;
};
@@ -0,0 +1,24 @@
const clamp = (value: number, min: number, max: number) =>
Math.min(max, Math.max(min, value));
/**
* Normalize a probability-like input to a percent scale (0..100).
* Accepts values like "70", "70%", 70, or 0.7.
*/
export const normalizeProbabilityPercent = (value: unknown): number => {
const raw =
typeof value === "string"
? Number.parseFloat(value.replace(/%/g, "").trim())
: Number(value);
if (!Number.isFinite(raw)) return 0;
const scaled = raw <= 1 ? raw * 100 : raw;
return clamp(scaled, 0, 100);
};
/**
* Normalize a probability-like input to a ratio scale (0..1).
*/
export const normalizeProbabilityRatio = (value: unknown): number =>
normalizeProbabilityPercent(value) / 100;
File diff suppressed because it is too large Load Diff
+37
View File
@@ -0,0 +1,37 @@
import { blake2sHex } from "blakets";
import crypto from "crypto";
export default class Password {
public static generateSalt(options?: GenerateSaltOptions): string {
const length = options?.length ?? 12;
const randomBytes = crypto.randomBytes(Math.ceil(length / 2));
return randomBytes.toString("hex").slice(0, length);
}
public static hash(password: string, options?: HashPasswordOptions): string {
const salt =
options?.overrideSalt ??
(options?.saltOpts?.length
? Password.generateSalt(options?.saltOpts)
: "");
const hash = blake2sHex(`$BLAKE2s$${password}$${salt}`);
return `BLAKE2s$${hash}$${salt}`;
}
public static validate(newPass: string, hashed: string): boolean {
const [algo, oldHash, salt] = hashed.split(/\$/g);
return crypto.timingSafeEqual(
Buffer.from(hashed),
Buffer.from(Password.hash(newPass, { overrideSalt: salt })),
);
}
}
export interface HashPasswordOptions {
overrideSalt?: string;
saltOpts?: GenerateSaltOptions;
}
export interface GenerateSaltOptions {
length?: number; // default 12
}
+8
View File
@@ -0,0 +1,8 @@
export const mergeArrays = (a, b, predicate = (a, b) => a === b) => {
const c = [...a]; // copy to avoid side effects
// add all items from B to copy C if they're not already present
b.forEach((bItem) =>
c.some((cItem) => predicate(bItem, cItem)) ? null : c.push(bItem)
);
return c;
};
+953
View File
@@ -0,0 +1,953 @@
import axios, { AxiosInstance } from "axios";
import https from "https";
import { events } from "../globalEvents";
import {
ApGroup,
ApRadioWifiUsage,
ApWifiLimits,
CreateSiteOptions,
Device,
DeviceRadio,
DeviceState,
DeviceUplink,
Network,
PrivatePSK,
PrivatePSKCreateInput,
SiteListItem,
SiteOverview,
SubsystemHealth,
SysInfo,
UserGroup,
UserGroupCreateInput,
WlanConf,
WlanConfRaw,
WlanConfUpdate,
WlanGroup,
WlanGroupCreateInput,
} from "./unifiTypes";
export class UnifiClient {
private client: AxiosInstance;
constructor(baseURL: string) {
this.client = axios.create({
baseURL,
validateStatus: (s) => s >= 200 && s < 400,
httpsAgent: new https.Agent({ rejectUnauthorized: false }),
});
}
private persistSession(res: { headers: Record<string, unknown> }): void {
// Cookies
const raw = res.headers["set-cookie"];
if (raw) {
const cookies = (Array.isArray(raw) ? raw : [raw]) as string[];
const cookieString = cookies.map((c) => c.split(";")[0]).join("; ");
this.client.defaults.headers.common["Cookie"] = cookieString;
}
// CSRF token (UniFi OS)
const csrf = res.headers["x-csrf-token"];
if (typeof csrf === "string") {
this.client.defaults.headers.common["X-CSRF-Token"] = csrf;
}
}
async login(username: string, password: string): Promise<void> {
const body = { username, password };
try {
// UniFi OS
const res = await this.client.post("/api/auth/login", body);
events.emit("unifi:login:ok", { type: "unifi-os", status: res.status });
this.persistSession(res);
} catch (e) {
// Legacy controller
events.emit("unifi:login:fallback");
const res = await this.client.post("/api/login", body);
events.emit("unifi:login:ok", { type: "legacy", status: res.status });
this.persistSession(res);
}
}
private async fetchWlanConfRaw(site: string): Promise<WlanConfRaw[]> {
const paths = [
`/proxy/network/api/s/${site}/rest/wlanconf`,
`/api/s/${site}/rest/wlanconf`,
];
for (const path of paths) {
try {
const res = await this.client.get(path);
const data = (res.data?.data ?? res.data) as WlanConfRaw[];
events.emit("unifi:wlan:fetched", { path });
return data;
} catch (e) {
events.emit("unifi:wlan:fetch_failed", {
path,
status: axios.isAxiosError(e) ? e.response?.status : e,
});
}
}
throw new Error("Could not fetch WLAN config from any known path");
}
private static parseWlanConf(w: any): WlanConf {
return {
id: w._id,
name: (w.name || w.ssid || "").toString(),
siteId: w.site_id ?? "",
enabled: w.enabled ?? true,
security: w.security ?? "open",
wpaMode: w.wpa_mode ?? "",
wpaEnc: w.wpa_enc ?? "",
wpa3Support: w.wpa3_support ?? false,
wpa3Transition: w.wpa3_transition ?? false,
wpa3FastRoaming: w.wpa3_fast_roaming ?? false,
wpa3Enhanced192: w.wpa3_enhanced_192 ?? false,
passphrase: typeof w.x_passphrase === "string" ? w.x_passphrase : null,
passphraseAutogenerated: w.passphrase_autogenerated ?? false,
hideSSID: w.hide_ssid ?? false,
isGuest: w.is_guest ?? false,
band: w.wlan_band ?? "both",
bands: w.wlan_bands ?? [],
networkconfId: w.networkconf_id ?? "",
usergroupId: w.usergroup_id ?? "",
apGroupIds: w.ap_group_ids ?? [],
apGroupMode: w.ap_group_mode ?? "devices",
pmfMode: w.pmf_mode ?? "disabled",
groupRekey: w.group_rekey ?? 0,
dtimMode: w.dtim_mode ?? "default",
dtimNg: w.dtim_ng ?? 1,
dtimNa: w.dtim_na ?? 3,
dtim6e: w.dtim_6e ?? 3,
l2Isolation: w.l2_isolation ?? false,
fastRoamingEnabled: w.fast_roaming_enabled ?? false,
bssTransition: w.bss_transition ?? false,
uapsdEnabled: w.uapsd_enabled ?? false,
iappEnabled: w.iapp_enabled ?? false,
proxyArp: w.proxy_arp ?? false,
mcastenhanceEnabled: w.mcastenhance_enabled ?? false,
macFilterEnabled: w.mac_filter_enabled ?? false,
macFilterPolicy: w.mac_filter_policy ?? "allow",
macFilterList: w.mac_filter_list ?? [],
radiusDasEnabled: w.radius_das_enabled ?? false,
radiusMacAuthEnabled: w.radius_mac_auth_enabled ?? false,
radiusMacaclFormat: w.radius_macacl_format ?? "none_lower",
minrateSettingPreference: w.minrate_setting_preference ?? "auto",
minrateNgEnabled: w.minrate_ng_enabled ?? false,
minrateNgDataRateKbps: w.minrate_ng_data_rate_kbps ?? 1000,
minrateNgAdvertisingRates: w.minrate_ng_advertising_rates ?? false,
minrateNaEnabled: w.minrate_na_enabled ?? false,
minrateNaDataRateKbps: w.minrate_na_data_rate_kbps ?? 6000,
minrateNaAdvertisingRates: w.minrate_na_advertising_rates ?? false,
settingPreference: w.setting_preference ?? "auto",
no2ghzOui: w.no2ghz_oui ?? false,
privatePreSharedKeysEnabled: w.private_preshared_keys_enabled ?? false,
privatePreSharedKeys: w.private_preshared_keys ?? [],
saeGroups: w.sae_groups ?? [],
saePsk: w.sae_psk ?? [],
schedule: w.schedule ?? [],
scheduleWithDuration: w.schedule_with_duration ?? [],
bcFilterList: w.bc_filter_list ?? [],
externalId: w.external_id ?? null,
};
}
async getWlanConf(site: string): Promise<WlanConf[]> {
const raw = await this.fetchWlanConfRaw(site);
return raw.map(UnifiClient.parseWlanConf);
}
async updateWlanConf(
site: string,
wlanId: string,
updates: WlanConfUpdate,
): Promise<WlanConf> {
const paths = [
`/proxy/network/api/s/${site}/rest/wlanconf/${wlanId}`,
`/api/s/${site}/rest/wlanconf/${wlanId}`,
];
// Fetch current WLAN to check if a RADIUS profile is configured.
// The controller rejects RADIUS fields when no profile is set.
const currentWlans = await this.getWlanConf(site);
const currentWlan = currentWlans.find((w) => w.id === wlanId);
const hasRadius =
currentWlan?.security === "wpaeap" || updates.security === "wpaeap";
if (!hasRadius) {
delete updates.radius_das_enabled;
delete updates.radius_mac_auth_enabled;
}
for (const path of paths) {
try {
const res = await this.client.put(path, updates);
const raw = (res.data?.data?.[0] ?? res.data) as any;
return UnifiClient.parseWlanConf(raw);
} catch (e) {
if (!axios.isAxiosError(e)) throw e;
// Try next path on 404/401, throw on other errors
if (
e.response &&
e.response.status !== 404 &&
e.response.status !== 401
) {
throw new Error(
`Failed to update WLAN ${wlanId}: ${e.response.status} ${JSON.stringify(e.response.data)}`,
);
}
}
}
throw new Error("Could not update WLAN config from any known path");
}
async getAllSites(): Promise<SiteListItem[]> {
const paths = ["/proxy/network/api/self/sites", "/api/self/sites"];
for (const path of paths) {
try {
const res = await this.client.get(path);
const raw = (res.data?.data ?? res.data) as any[];
return raw.map(
(s: any): SiteListItem => ({
id: s._id,
name: s.name,
description: s.desc ?? "",
deviceCount: s.device_count ?? 0,
role: s.role ?? "",
}),
);
} catch (e) {
if (!axios.isAxiosError(e)) throw e;
if (
e.response &&
e.response.status !== 404 &&
e.response.status !== 401
) {
throw e;
}
}
}
throw new Error("Could not fetch sites from any known path");
}
async getSiteOverview(site: string): Promise<SiteOverview> {
const prefixes = ["/proxy/network", ""];
for (const prefix of prefixes) {
try {
const [healthRes, sysInfoRes, sitesRes] = await Promise.all([
this.client.get(`${prefix}/api/s/${site}/stat/health`),
this.client.get(`${prefix}/api/s/${site}/stat/sysinfo`),
this.client.get(`${prefix}/api/self/sites`),
]);
const healthRaw = (healthRes.data?.data ?? healthRes.data) as any[];
const sysInfoRaw = (sysInfoRes.data?.data?.[0] ??
sysInfoRes.data) as any;
const sitesRaw = (sitesRes.data?.data ?? sitesRes.data) as any[];
const siteRaw = sitesRaw.find((s: any) => s.name === site);
if (!siteRaw) throw new Error(`Site "${site}" not found in sites list`);
const health: SubsystemHealth[] = healthRaw.map((h: any) => ({
subsystem: h.subsystem,
status: h.status,
numUser: h.num_user,
numGuest: h.num_guest,
numIot: h.num_iot,
txBytesR: h["tx_bytes-r"],
rxBytesR: h["rx_bytes-r"],
numAp: h.num_ap,
numSw: h.num_sw,
numGw: h.num_gw,
numAdopted: h.num_adopted,
numDisconnected: h.num_disconnected,
numPending: h.num_pending,
numDisabled: h.num_disabled,
}));
const sysInfo: SysInfo = {
name: sysInfoRaw.name,
hostname: sysInfoRaw.hostname,
version: sysInfoRaw.version,
build: sysInfoRaw.build,
timezone: sysInfoRaw.timezone,
uptime: sysInfoRaw.uptime,
ipAddresses: sysInfoRaw.ip_addrs ?? [],
updateAvailable: sysInfoRaw.update_available ?? false,
isCloudConsole: sysInfoRaw.is_cloud_console ?? false,
dataRetentionDays: sysInfoRaw.data_retention_days ?? 0,
informPort: sysInfoRaw.inform_port,
httpsPort: sysInfoRaw.https_port,
unsupportedDeviceCount: sysInfoRaw.unsupported_device_count ?? 0,
};
return {
site: {
id: siteRaw._id,
name: siteRaw.name,
description: siteRaw.desc ?? "",
deviceCount: siteRaw.device_count ?? 0,
role: siteRaw.role ?? "",
},
health,
sysInfo,
};
} catch (e) {
if (!axios.isAxiosError(e)) throw e;
if (
e.response &&
e.response.status !== 404 &&
e.response.status !== 401
) {
throw e;
}
}
}
throw new Error("Could not fetch site overview from any known path");
}
private static parseDeviceState(state: number): DeviceState {
const map: Record<number, DeviceState> = {
0: "disconnected",
1: "connected",
2: "pending",
4: "adopting",
5: "adopting",
};
return map[state] ?? "unknown";
}
async getDevices(site: string): Promise<Device[]> {
const paths = [
`/proxy/network/api/s/${site}/stat/device`,
`/api/s/${site}/stat/device`,
];
for (const path of paths) {
try {
const res = await this.client.get(path);
const raw = (res.data?.data ?? res.data) as any[];
return raw.map((d: any): Device => {
const uplink: DeviceUplink | null = d.uplink
? {
type: d.uplink.type,
mac: d.uplink.uplink_mac,
ip: d.uplink.uplink_remote_ip,
uplinkRemotePort: d.uplink.uplink_remote_port,
speed: d.uplink.speed,
fullDuplex: d.uplink.full_duplex,
}
: null;
const radios: DeviceRadio[] = (d.radio_table ?? []).map(
(r: any, i: number) => {
const stats = d.radio_table_stats?.[i] ?? {};
return {
name: r.name ?? r.radio,
radio: r.radio,
channel: r.channel,
txPower: r.tx_power,
txPowerMode: r.tx_power_mode,
minRssiEnabled: r.min_rssi_enabled ?? false,
numSta: stats.num_sta ?? 0,
satisfaction: stats.satisfaction ?? null,
};
},
);
return {
id: d._id,
mac: d.mac,
ip: d.ip ?? "",
name: d.name ?? d.mac,
model: d.model,
shortname: d.shortname ?? d.model,
type: d.type,
version: d.version ?? "",
serial: d.serial ?? "",
state: UnifiClient.parseDeviceState(d.state),
adopted: d.adopted ?? false,
uptime: d.uptime ?? 0,
lastSeen: d.last_seen ?? 0,
upgradable: d.upgradable ?? false,
satisfaction: d.satisfaction ?? null,
numClients: d.num_sta ?? 0,
numUserClients: d["user-num_sta"] ?? 0,
numGuestClients: d["guest-num_sta"] ?? 0,
txBytes: d.tx_bytes ?? 0,
rxBytes: d.rx_bytes ?? 0,
uplink,
radios,
modelInLts: d.model_in_lts ?? false,
modelInEol: d.model_in_eol ?? false,
};
});
} catch (e) {
if (!axios.isAxiosError(e)) throw e;
if (
e.response &&
e.response.status !== 404 &&
e.response.status !== 401
) {
throw e;
}
}
}
throw new Error("Could not fetch devices from any known path");
}
async getNetworks(site: string): Promise<Network[]> {
const paths = [
`/proxy/network/api/s/${site}/rest/networkconf`,
`/api/s/${site}/rest/networkconf`,
];
for (const path of paths) {
try {
const res = await this.client.get(path);
const raw = (res.data?.data ?? res.data) as any[];
return raw.map(
(n: any): Network => ({
id: n._id,
name: n.name ?? "",
purpose: n.purpose ?? "corporate",
enabled: n.enabled ?? true,
ipSubnet: n.ip_subnet ?? null,
vlan: n.vlan != null ? Number(n.vlan) : null,
vlanEnabled: n.vlan_enabled ?? false,
isNat: n.is_nat ?? false,
domainName: n.domain_name ?? null,
networkGroup: n.networkgroup ?? null,
dhcpdEnabled: n.dhcpd_enabled ?? false,
dhcpdStart: n.dhcpd_start ?? null,
dhcpdStop: n.dhcpd_stop ?? null,
dhcpdLeasetime: n.dhcpd_leasetime ?? null,
dhcpRelayEnabled: n.dhcp_relay_enabled ?? false,
dhcpGuardEnabled: n.dhcpguard_enabled ?? false,
igmpSnooping: n.igmp_snooping ?? false,
ipv6Enabled: n.ipv6_enabled ?? false,
ipv6InterfaceType: n.ipv6_interface_type ?? null,
internetAccessEnabled: n.internet_access_enabled ?? null,
}),
);
} catch (e) {
if (!axios.isAxiosError(e)) throw e;
if (
e.response &&
e.response.status !== 404 &&
e.response.status !== 401
) {
throw e;
}
}
}
throw new Error("Could not fetch networks from any known path");
}
async createSite(description: string): Promise<SiteListItem> {
const paths = [
"/proxy/network/api/s/default/cmd/sitemgr",
"/api/s/default/cmd/sitemgr",
];
const body = { cmd: "add-site", desc: description };
for (const path of paths) {
try {
const res = await this.client.post(path, body);
const raw = (res.data?.data?.[0] ?? res.data) as any;
return {
id: raw._id,
name: raw.name,
description: raw.desc ?? description,
deviceCount: raw.device_count ?? 0,
role: raw.role ?? "",
};
} catch (e) {
if (!axios.isAxiosError(e)) throw e;
if (
e.response &&
e.response.status !== 404 &&
e.response.status !== 401
) {
throw new Error(
`Failed to create site: ${e.response.status} ${JSON.stringify(e.response.data)}`,
);
}
}
}
throw new Error("Could not create site from any known path");
}
// --- WLAN Groups ---
async getWlanGroups(site: string): Promise<WlanGroup[]> {
const paths = [
`/proxy/network/api/s/${site}/rest/wlangroup`,
`/api/s/${site}/rest/wlangroup`,
];
for (const path of paths) {
try {
const res = await this.client.get(path);
const raw = (res.data?.data ?? res.data) as any[];
return raw.map(
(g: any): WlanGroup => ({
id: g._id,
name: g.name ?? "",
siteId: g.site_id ?? "",
noDelete: g.attr_no_delete ?? false,
noEdit: g.attr_no_edit ?? false,
hidden: g.attr_hidden ?? false,
}),
);
} catch (e) {
if (!axios.isAxiosError(e)) throw e;
if (
e.response &&
e.response.status !== 404 &&
e.response.status !== 401
)
throw e;
}
}
throw new Error("Could not fetch WLAN groups from any known path");
}
async createWlanGroup(
site: string,
input: WlanGroupCreateInput,
): Promise<WlanGroup> {
const paths = [
`/proxy/network/api/s/${site}/rest/wlangroup`,
`/api/s/${site}/rest/wlangroup`,
];
const body: Record<string, unknown> = { name: input.name };
for (const path of paths) {
try {
const res = await this.client.post(path, body);
const raw = (res.data?.data?.[0] ?? res.data) as any;
return {
id: raw._id,
name: raw.name ?? input.name,
siteId: raw.site_id ?? "",
noDelete: raw.attr_no_delete ?? false,
noEdit: raw.attr_no_edit ?? false,
hidden: raw.attr_hidden ?? false,
};
} catch (e) {
if (!axios.isAxiosError(e)) throw e;
if (
e.response &&
e.response.status !== 404 &&
e.response.status !== 401
) {
throw new Error(
`Failed to create WLAN group: ${e.response.status} ${JSON.stringify(e.response.data)}`,
);
}
}
}
throw new Error("Could not create WLAN group from any known path");
}
// --- User Groups (Speed Profiles) ---
async getUserGroups(site: string): Promise<UserGroup[]> {
const paths = [
`/proxy/network/api/s/${site}/rest/usergroup`,
`/api/s/${site}/rest/usergroup`,
];
for (const path of paths) {
try {
const res = await this.client.get(path);
const raw = (res.data?.data ?? res.data) as any[];
return raw.map(
(g: any): UserGroup => ({
id: g._id,
name: g.name ?? "",
siteId: g.site_id ?? "",
noDelete: g.attr_no_delete ?? false,
downloadLimitKbps: g.qos_rate_max_down ?? -1,
uploadLimitKbps: g.qos_rate_max_up ?? -1,
}),
);
} catch (e) {
if (!axios.isAxiosError(e)) throw e;
if (
e.response &&
e.response.status !== 404 &&
e.response.status !== 401
)
throw e;
}
}
throw new Error("Could not fetch user groups from any known path");
}
async createUserGroup(
site: string,
input: UserGroupCreateInput,
): Promise<UserGroup> {
const paths = [
`/proxy/network/api/s/${site}/rest/usergroup`,
`/api/s/${site}/rest/usergroup`,
];
const body: Record<string, unknown> = { name: input.name };
if (input.downloadLimitKbps !== undefined)
body.qos_rate_max_down = input.downloadLimitKbps;
if (input.uploadLimitKbps !== undefined)
body.qos_rate_max_up = input.uploadLimitKbps;
for (const path of paths) {
try {
const res = await this.client.post(path, body);
const raw = (res.data?.data?.[0] ?? res.data) as any;
return {
id: raw._id,
name: raw.name ?? input.name,
siteId: raw.site_id ?? "",
noDelete: raw.attr_no_delete ?? false,
downloadLimitKbps: raw.qos_rate_max_down ?? -1,
uploadLimitKbps: raw.qos_rate_max_up ?? -1,
};
} catch (e) {
if (!axios.isAxiosError(e)) throw e;
if (
e.response &&
e.response.status !== 404 &&
e.response.status !== 401
) {
throw new Error(
`Failed to create user group: ${e.response.status} ${JSON.stringify(e.response.data)}`,
);
}
}
}
throw new Error("Could not create user group from any known path");
}
// --- AP Groups ---
async getApGroups(site: string): Promise<ApGroup[]> {
const paths = [
`/proxy/network/v2/api/site/${site}/apgroups`,
`/v2/api/site/${site}/apgroups`,
];
for (const path of paths) {
try {
const res = await this.client.get(path);
const raw = (res.data?.data ?? res.data) as any[];
return raw.map(
(g: any): ApGroup => ({
id: g._id,
name: g.name ?? "",
deviceMacs: g.device_macs ?? [],
noDelete: g.attr_no_delete ?? false,
forWlanconf: g.for_wlanconf ?? false,
}),
);
} catch (e) {
if (!axios.isAxiosError(e)) throw e;
if (
e.response &&
e.response.status !== 404 &&
e.response.status !== 401
)
throw e;
}
}
throw new Error("Could not fetch AP groups from any known path");
}
async createApGroup(
site: string,
name: string,
deviceMacs: string[],
forWlanconf: boolean = false,
): Promise<ApGroup> {
const paths = [
`/proxy/network/v2/api/site/${site}/apgroups`,
`/v2/api/site/${site}/apgroups`,
];
const body = {
name,
device_macs: deviceMacs,
for_wlanconf: forWlanconf,
};
for (const path of paths) {
try {
const res = await this.client.post(path, body);
const raw = (res.data?.data?.[0] ?? res.data) as any;
return {
id: raw._id,
name: raw.name ?? name,
deviceMacs: raw.device_macs ?? deviceMacs,
noDelete: raw.attr_no_delete ?? false,
forWlanconf: raw.for_wlanconf ?? forWlanconf,
};
} catch (e) {
if (!axios.isAxiosError(e)) throw e;
if (
e.response &&
e.response.status !== 404 &&
e.response.status !== 401
) {
throw new Error(
`Failed to create AP group: ${e.response.status} ${JSON.stringify(e.response.data)}`,
);
}
}
}
throw new Error("Could not create AP group from any known path");
}
async updateApGroup(
site: string,
groupId: string,
deviceMacs: string[],
): Promise<ApGroup> {
const paths = [
`/proxy/network/v2/api/site/${site}/apgroups/${groupId}`,
`/v2/api/site/${site}/apgroups/${groupId}`,
];
const body = {
name: "devices_ap_group",
device_macs: deviceMacs,
for_wlanconf: true,
};
for (const path of paths) {
try {
const res = await this.client.put(path, body);
const raw = (res.data?.data?.[0] ?? res.data) as any;
return {
id: raw._id ?? groupId,
name: raw.name ?? "devices_ap_group",
deviceMacs: raw.device_macs ?? deviceMacs,
noDelete: raw.attr_no_delete ?? false,
forWlanconf: raw.for_wlanconf ?? true,
};
} catch (e) {
if (!axios.isAxiosError(e)) throw e;
if (
e.response &&
e.response.status !== 404 &&
e.response.status !== 401
) {
throw new Error(
`Failed to update AP group: ${e.response.status} ${JSON.stringify(e.response.data)}`,
);
}
}
}
throw new Error("Could not update AP group from any known path");
}
// --- Access Points ---
async getAccessPoints(site: string): Promise<Device[]> {
const devices = await this.getDevices(site);
return devices.filter((d) => d.type === "uap");
}
// --- WiFi Limits ---
async getWifiLimits(site: string): Promise<ApWifiLimits[]> {
const SSID_LIMIT_PER_RADIO = 8;
const paths = [
`/proxy/network/api/s/${site}/stat/device`,
`/api/s/${site}/stat/device`,
];
for (const path of paths) {
try {
const res = await this.client.get(path);
const raw = (res.data?.data ?? res.data) as any[];
const aps = raw.filter((d: any) => d.type === "uap");
return aps.map((ap: any): ApWifiLimits => {
const vapTable: any[] = ap.vap_table ?? [];
const radioMap = new Map<string, { wlanNames: Set<string> }>();
for (const vap of vapTable) {
if (!vap.up || !vap.radio) continue;
if (!radioMap.has(vap.radio)) {
radioMap.set(vap.radio, { wlanNames: new Set() });
}
if (vap.essid) {
radioMap.get(vap.radio)!.wlanNames.add(vap.essid);
}
}
const radioBandMap: Record<string, string> = {
ng: "2g",
na: "5g",
"6e": "6e",
};
const radios: ApRadioWifiUsage[] = Array.from(radioMap.entries()).map(
([radio, data]): ApRadioWifiUsage => ({
radio,
band: radioBandMap[radio] ?? radio,
activeWlans: data.wlanNames.size,
limit: SSID_LIMIT_PER_RADIO,
remaining: Math.max(
0,
SSID_LIMIT_PER_RADIO - data.wlanNames.size,
),
wlanNames: Array.from(data.wlanNames),
}),
);
return {
apId: ap._id,
apName: ap.name ?? ap.mac,
mac: ap.mac,
model: ap.model ?? "",
radios,
};
});
} catch (e) {
if (!axios.isAxiosError(e)) throw e;
if (
e.response &&
e.response.status !== 404 &&
e.response.status !== 401
)
throw e;
}
}
throw new Error("Could not fetch WiFi limits from any known path");
}
// --- Private Pre-Shared Keys ---
private static parsePPSKs(raw: any[]): PrivatePSK[] {
return raw.map(
(p: any): PrivatePSK => ({
key: p.key ?? "",
name: p.name ?? "",
mac: p.mac ?? null,
vlanId: p.vlan_id ?? null,
}),
);
}
async getPrivatePSKs(site: string, wlanId: string): Promise<PrivatePSK[]> {
const paths = [
`/proxy/network/api/s/${site}/rest/wlanconf/${wlanId}`,
`/api/s/${site}/rest/wlanconf/${wlanId}`,
];
for (const path of paths) {
try {
const res = await this.client.get(path);
const raw = (res.data?.data?.[0] ?? res.data) as any;
return UnifiClient.parsePPSKs(raw.private_preshared_keys ?? []);
} catch (e) {
if (!axios.isAxiosError(e)) throw e;
if (
e.response &&
e.response.status !== 404 &&
e.response.status !== 401
)
throw e;
}
}
throw new Error("Could not fetch PPSKs from any known path");
}
async createPrivatePSK(
site: string,
wlanId: string,
psk: PrivatePSKCreateInput,
): Promise<PrivatePSK[]> {
const paths = [
`/proxy/network/api/s/${site}/rest/wlanconf/${wlanId}`,
`/api/s/${site}/rest/wlanconf/${wlanId}`,
];
// Fetch current PPSKs
let currentPpsks: any[] = [];
for (const path of paths) {
try {
const res = await this.client.get(path);
const raw = (res.data?.data?.[0] ?? res.data) as any;
currentPpsks = (raw.private_preshared_keys ?? []) as any[];
break;
} catch (e) {
if (!axios.isAxiosError(e)) throw e;
if (
e.response &&
e.response.status !== 404 &&
e.response.status !== 401
)
throw e;
}
}
const newPsk: Record<string, unknown> = {
key: psk.key,
name: psk.name,
};
if (psk.mac) newPsk.mac = psk.mac;
if (psk.vlanId !== undefined) newPsk.vlan_id = psk.vlanId;
currentPpsks.push(newPsk);
// Update WLAN with new PPSKs
for (const path of paths) {
try {
const res = await this.client.put(path, {
private_preshared_keys: currentPpsks,
private_preshared_keys_enabled: true,
});
const raw = (res.data?.data?.[0] ?? res.data) as any;
return UnifiClient.parsePPSKs(raw.private_preshared_keys ?? []);
} catch (e) {
if (!axios.isAxiosError(e)) throw e;
if (
e.response &&
e.response.status !== 404 &&
e.response.status !== 401
) {
throw new Error(
`Failed to create PPSK: ${e.response.status} ${JSON.stringify(e.response.data)}`,
);
}
}
}
throw new Error("Could not create PPSK from any known path");
}
}
+434
View File
@@ -0,0 +1,434 @@
export interface WlanConfRaw {
_id: string;
name?: string;
ssid?: string;
x_passphrase?: string;
[key: string]: unknown;
}
export interface WlanConf {
id: string;
name: string;
siteId: string;
enabled: boolean;
security: string;
wpaMode: string;
wpaEnc: string;
wpa3Support: boolean;
wpa3Transition: boolean;
wpa3FastRoaming: boolean;
wpa3Enhanced192: boolean;
passphrase: string | null;
passphraseAutogenerated: boolean;
hideSSID: boolean;
isGuest: boolean;
band: string;
bands: string[];
networkconfId: string;
usergroupId: string;
apGroupIds: string[];
apGroupMode: string;
pmfMode: string;
groupRekey: number;
dtimMode: string;
dtimNg: number;
dtimNa: number;
dtim6e: number;
l2Isolation: boolean;
fastRoamingEnabled: boolean;
bssTransition: boolean;
uapsdEnabled: boolean;
iappEnabled: boolean;
proxyArp: boolean;
mcastenhanceEnabled: boolean;
macFilterEnabled: boolean;
macFilterPolicy: string;
macFilterList: string[];
radiusDasEnabled: boolean;
radiusMacAuthEnabled: boolean;
radiusMacaclFormat: string;
minrateSettingPreference: string;
minrateNgEnabled: boolean;
minrateNgDataRateKbps: number;
minrateNgAdvertisingRates: boolean;
minrateNaEnabled: boolean;
minrateNaDataRateKbps: number;
minrateNaAdvertisingRates: boolean;
settingPreference: string;
no2ghzOui: boolean;
privatePreSharedKeysEnabled: boolean;
privatePreSharedKeys: unknown[];
saeGroups: unknown[];
saePsk: unknown[];
schedule: unknown[];
scheduleWithDuration: unknown[];
bcFilterList: unknown[];
externalId: string | null;
}
export interface WlanConfUpdate {
name?: string;
x_passphrase?: string;
enabled?: boolean;
security?: "wpapsk" | "wpaeap" | "open" | "osen";
wpa_mode?: "wpa2" | "wpa3" | "wpa2wpa3";
wpa_enc?: "ccmp" | "gcmp" | "ccmp-gcmp";
hide_ssid?: boolean;
mac_filter_enabled?: boolean;
mac_filter_policy?: "allow" | "deny";
is_guest?: boolean;
l2_isolation?: boolean;
fast_roaming_enabled?: boolean;
bss_transition?: boolean;
uapsd_enabled?: boolean;
group_rekey?: number;
dtim_mode?: "default" | "custom";
dtim_ng?: number;
dtim_na?: number;
minrate_ng_enabled?: boolean;
minrate_na_enabled?: boolean;
radius_das_enabled?: boolean;
radius_mac_auth_enabled?: boolean;
pmf_mode?: "disabled" | "optional" | "required";
wlan_band?: "both" | "2g" | "5g";
usergroup_id?: string;
proxy_arp?: boolean;
mcastenhance_enabled?: boolean;
mac_filter_list?: string[];
no2ghz_oui?: boolean;
ap_group_ids?: string[];
ap_group_mode?: string;
}
/**
* CamelCase update input matching the WlanConf return shape.
* Accepted by the API and converted to WlanConfUpdate (snake_case) before
* being sent to the UniFi controller.
*/
export interface WlanConfUpdateInput {
name?: string;
passphrase?: string;
enabled?: boolean;
security?: "wpapsk" | "wpaeap" | "open" | "osen";
wpaMode?: "wpa2" | "wpa3" | "wpa2wpa3";
wpaEnc?: "ccmp" | "gcmp" | "ccmp-gcmp";
hideSSID?: boolean;
macFilterEnabled?: boolean;
macFilterPolicy?: "allow" | "deny";
isGuest?: boolean;
l2Isolation?: boolean;
fastRoamingEnabled?: boolean;
bssTransition?: boolean;
uapsdEnabled?: boolean;
groupRekey?: number;
dtimMode?: "default" | "custom";
dtimNg?: number;
dtimNa?: number;
minrateNgEnabled?: boolean;
minrateNaEnabled?: boolean;
radiusDasEnabled?: boolean;
radiusMacAuthEnabled?: boolean;
pmfMode?: "disabled" | "optional" | "required";
band?: "both" | "2g" | "5g";
usergroupId?: string;
proxyArp?: boolean;
mcastenhanceEnabled?: boolean;
macFilterList?: string[];
no2ghzOui?: boolean;
apGroupIds?: string[];
apGroupMode?: string;
}
/**
* Converts a camelCase WlanConfUpdateInput to the snake_case WlanConfUpdate
* expected by the UniFi controller API.
*/
export function toWlanConfUpdate(input: WlanConfUpdateInput): WlanConfUpdate {
const result: WlanConfUpdate = {};
if (input.name !== undefined) result.name = input.name;
if (input.passphrase !== undefined) result.x_passphrase = input.passphrase;
if (input.enabled !== undefined) result.enabled = input.enabled;
if (input.security !== undefined) result.security = input.security;
if (input.wpaMode !== undefined) result.wpa_mode = input.wpaMode;
if (input.wpaEnc !== undefined) result.wpa_enc = input.wpaEnc;
if (input.hideSSID !== undefined) result.hide_ssid = input.hideSSID;
if (input.macFilterEnabled !== undefined)
result.mac_filter_enabled = input.macFilterEnabled;
if (input.macFilterPolicy !== undefined)
result.mac_filter_policy = input.macFilterPolicy;
if (input.isGuest !== undefined) result.is_guest = input.isGuest;
if (input.l2Isolation !== undefined) result.l2_isolation = input.l2Isolation;
if (input.fastRoamingEnabled !== undefined)
result.fast_roaming_enabled = input.fastRoamingEnabled;
if (input.bssTransition !== undefined)
result.bss_transition = input.bssTransition;
if (input.uapsdEnabled !== undefined)
result.uapsd_enabled = input.uapsdEnabled;
if (input.groupRekey !== undefined) result.group_rekey = input.groupRekey;
if (input.dtimMode !== undefined) result.dtim_mode = input.dtimMode;
if (input.dtimNg !== undefined) result.dtim_ng = input.dtimNg;
if (input.dtimNa !== undefined) result.dtim_na = input.dtimNa;
if (input.minrateNgEnabled !== undefined)
result.minrate_ng_enabled = input.minrateNgEnabled;
if (input.minrateNaEnabled !== undefined)
result.minrate_na_enabled = input.minrateNaEnabled;
if (input.radiusDasEnabled !== undefined)
result.radius_das_enabled = input.radiusDasEnabled;
if (input.radiusMacAuthEnabled !== undefined)
result.radius_mac_auth_enabled = input.radiusMacAuthEnabled;
if (input.pmfMode !== undefined) result.pmf_mode = input.pmfMode;
if (input.band !== undefined) result.wlan_band = input.band;
if (input.usergroupId !== undefined) result.usergroup_id = input.usergroupId;
if (input.proxyArp !== undefined) result.proxy_arp = input.proxyArp;
if (input.mcastenhanceEnabled !== undefined)
result.mcastenhance_enabled = input.mcastenhanceEnabled;
if (input.macFilterList !== undefined)
result.mac_filter_list = input.macFilterList;
if (input.no2ghzOui !== undefined) result.no2ghz_oui = input.no2ghzOui;
if (input.apGroupIds !== undefined) result.ap_group_ids = input.apGroupIds;
if (input.apGroupMode !== undefined) result.ap_group_mode = input.apGroupMode;
return result;
}
// --- Site overview types ---
export interface SubsystemHealth {
subsystem: "wlan" | "wan" | "www" | "lan" | "vpn";
status: "ok" | "warn" | "error" | "unknown";
numUser?: number;
numGuest?: number;
numIot?: number;
txBytesR?: number;
rxBytesR?: number;
// WLAN-specific
numAp?: number;
// LAN-specific
numSw?: number;
// WAN-specific
numGw?: number;
// Shared device counts
numAdopted?: number;
numDisconnected?: number;
numPending?: number;
numDisabled?: number;
}
export interface SysInfo {
name: string;
hostname: string;
version: string;
build: string;
timezone: string;
uptime: number;
ipAddresses: string[];
updateAvailable: boolean;
isCloudConsole: boolean;
dataRetentionDays: number;
informPort: number;
httpsPort: number;
unsupportedDeviceCount: number;
}
export interface SiteInfo {
id: string;
name: string;
description: string;
deviceCount: number;
role: string;
}
export interface SiteOverview {
site: SiteInfo;
health: SubsystemHealth[];
sysInfo: SysInfo;
}
// --- Device types ---
export type DeviceType = "uap" | "usw" | "ugw" | "uxg" | "ubb" | "udm";
export type DeviceState =
| "connected"
| "disconnected"
| "pending"
| "adopting"
| "unknown";
export interface DeviceUplink {
type?: string;
mac?: string;
ip?: string;
uplinkRemotePort?: number;
speed?: number;
fullDuplex?: boolean;
}
export interface DeviceRadio {
name: string;
radio: string;
channel: number;
txPower: number;
txPowerMode: string;
minRssiEnabled: boolean;
numSta: number;
satisfaction: number | null;
}
export interface Device {
id: string;
mac: string;
ip: string;
name: string;
model: string;
shortname: string;
type: DeviceType;
version: string;
serial: string;
state: DeviceState;
adopted: boolean;
uptime: number;
lastSeen: number;
upgradable: boolean;
satisfaction: number | null;
numClients: number;
numUserClients: number;
numGuestClients: number;
txBytes: number;
rxBytes: number;
uplink: DeviceUplink | null;
radios: DeviceRadio[];
modelInLts: boolean;
modelInEol: boolean;
}
// --- Network types ---
export type NetworkPurpose =
| "corporate"
| "vlan-only"
| "wan"
| "vpn-client"
| "remote-user-vpn"
| "site-vpn";
export interface Network {
id: string;
name: string;
purpose: NetworkPurpose;
enabled: boolean;
ipSubnet: string | null;
vlan: number | null;
vlanEnabled: boolean;
isNat: boolean;
domainName: string | null;
networkGroup: string | null;
dhcpdEnabled: boolean;
dhcpdStart: string | null;
dhcpdStop: string | null;
dhcpdLeasetime: number | null;
dhcpRelayEnabled: boolean;
dhcpGuardEnabled: boolean;
igmpSnooping: boolean;
ipv6Enabled: boolean;
ipv6InterfaceType: string | null;
internetAccessEnabled: boolean | null;
}
// --- Site create types ---
export interface CreateSiteOptions {
/** Human-readable description / display name for the site */
description: string;
}
// --- Site list types ---
export interface SiteListItem {
id: string;
name: string;
description: string;
deviceCount: number;
role: string;
}
// --- WLAN Group types ---
export interface WlanGroup {
id: string;
name: string;
siteId: string;
noDelete: boolean;
noEdit: boolean;
hidden: boolean;
}
export interface WlanGroupCreateInput {
name: string;
}
// --- AP Group types ---
export interface ApGroup {
id: string;
name: string;
deviceMacs: string[];
noDelete: boolean;
forWlanconf: boolean;
}
// --- User Group (Speed Profile) types ---
export interface UserGroup {
id: string;
name: string;
siteId: string;
noDelete: boolean;
/** Download rate limit in Kbps. -1 means unlimited. */
downloadLimitKbps: number;
/** Upload rate limit in Kbps. -1 means unlimited. */
uploadLimitKbps: number;
}
export interface UserGroupCreateInput {
name: string;
/** Download rate limit in Kbps. -1 or omit for unlimited. */
downloadLimitKbps?: number;
/** Upload rate limit in Kbps. -1 or omit for unlimited. */
uploadLimitKbps?: number;
}
// --- Private PSK types ---
export interface PrivatePSK {
key: string;
name: string;
mac: string | null;
vlanId: number | null;
}
export interface PrivatePSKCreateInput {
key: string;
name: string;
mac?: string;
vlanId?: number;
}
// --- WiFi Limit types ---
export interface ApRadioWifiUsage {
radio: string;
band: string;
activeWlans: number;
limit: number;
remaining: number;
wlanNames: string[];
}
export interface ApWifiLimits {
apId: string;
apName: string;
mac: string;
model: string;
radios: ApRadioWifiUsage[];
}
@@ -0,0 +1,427 @@
import { Socket } from "socket.io-client";
import {
createWorkerJob,
emitWorkerGlobalEvent,
workerLog,
} from "../jobFactory";
import { WorkerQueue } from "../queues";
import {
TTL_ARCHIVED_MS,
fetchAndCacheActivities,
fetchAndCacheNotes,
fetchAndCacheContacts,
fetchAndCacheProducts,
fetchAndCacheOppCwData,
fetchAndCacheCompanyCwData,
companyCwCacheKey,
} from "../../cache/opportunityCache";
import { computeCacheTTL } from "../../algorithms/computeCacheTTL";
import { prisma, redis } from "../../../constants";
/**
* Worker factory for active opportunity cache refresh.
*
* Runs the unified opportunity cache refresh pass for all opportunities.
* Active/recent opportunities use adaptive TTL, while archived opportunities
* (where adaptive TTL resolves to null) are refreshed with TTL_ARCHIVED_MS.
* Checks which cache keys have expired and re-fetches only those from
* ConnectWise.
*
* Designed to be called on the active cache job interval.
*
* @param socket - Socket.IO client connection to manager
* @returns Promise that resolves when refresh completes
*/
export async function refreshActiveOpportunitiesWorker(
socket: Socket,
opts?: {
runFullRefresh?: () => Promise<void>;
},
): Promise<void> {
return createWorkerJob(
socket,
WorkerQueue.REFRESH_ACTIVE_OPPORTUNITIES,
async (workerSocket: Socket) => {
if (opts?.runFullRefresh) {
workerLog(
workerSocket,
"[active-refresh] Starting full opportunities refresh stage",
);
await opts.runFullRefresh();
workerLog(
workerSocket,
"[active-refresh] Completed full opportunities refresh stage",
);
}
const lockKey = "worker-lock:cache:opportunities:refresh:active";
const lockValue = `${process.pid}:${Date.now()}:${Math.random()}`;
const lockTtlMs = Number(Bun.env.ACTIVE_REFRESH_LOCK_TTL_MS ?? "1800000");
const lockSet = await redis.set(
lockKey,
lockValue,
"PX",
lockTtlMs,
"NX",
);
if (lockSet !== "OK") {
workerLog(
workerSocket,
`[active-refresh] Skipping run: lock already held (${lockKey})`,
"WARN",
);
return;
}
try {
await performActiveOpportunityRefresh(workerSocket);
} finally {
const currentLockValue = await redis.get(lockKey);
if (currentLockValue === lockValue) {
await redis.del(lockKey);
}
}
},
);
}
/**
* Core logic for active opportunity cache refresh.
*
* Queries all opportunities, checks which cache keys have expired, and
* re-fetches from ConnectWise only for expired entries.
*/
async function performActiveOpportunityRefresh(
workerSocket: Socket,
): Promise<void> {
const opportunities = await prisma.opportunity.findMany({
select: {
cwOpportunityId: true,
closedFlag: true,
closedDate: true,
expectedCloseDate: true,
cwLastUpdated: true,
statusCwId: true,
company: { select: { cw_CompanyId: true } },
},
orderBy: { cwLastUpdated: "desc" },
});
workerLog(
workerSocket,
`[active-refresh] Starting refresh for ${opportunities.length} opportunities`,
);
emitWorkerGlobalEvent(workerSocket, "cache:opportunities:refresh:started", {
totalOpportunities: opportunities.length,
});
let activitiesRefreshed = 0;
let companiesRefreshed = 0;
let notesRefreshed = 0;
let contactsRefreshed = 0;
let productsRefreshed = 0;
let oppCwDataRefreshed = 0;
let archivedCount = 0;
const eligibleOpportunities: Array<{
cwOpportunityId: number;
ttl: number;
companyId: number | null;
}> = [];
const companyTtlById = new Map<number, number>();
for (const opp of opportunities) {
const adaptiveTtl = computeCacheTTL({
closedFlag: opp.closedFlag,
closedDate: opp.closedDate,
expectedCloseDate: opp.expectedCloseDate,
lastUpdated: opp.cwLastUpdated,
});
const ttl = adaptiveTtl ?? TTL_ARCHIVED_MS;
if (adaptiveTtl === null) archivedCount++;
const companyId = opp.company?.cw_CompanyId ?? null;
eligibleOpportunities.push({
cwOpportunityId: opp.cwOpportunityId,
ttl,
companyId,
});
if (companyId === null) continue;
const prevTtl = companyTtlById.get(companyId) ?? 0;
companyTtlById.set(companyId, Math.max(prevTtl, ttl));
}
// Batch-check which keys already exist via a single pipeline.
// One EXISTS command per key avoids Redis EXISTS multi-key count semantics.
const pipeline = redis.pipeline();
for (const opp of eligibleOpportunities) {
pipeline.exists(`opp:cw-data:${opp.cwOpportunityId}`);
pipeline.exists(`opp:activities:${opp.cwOpportunityId}`);
pipeline.exists(`opp:notes:${opp.cwOpportunityId}`);
pipeline.exists(`opp:contacts:${opp.cwOpportunityId}`);
pipeline.exists(`opp:products:${opp.cwOpportunityId}`);
}
for (const companyId of Array.from(companyTtlById.keys())) {
pipeline.exists(companyCwCacheKey(companyId));
}
const existsResults = (await pipeline.exec()) || [];
const existsAt = (index: number): boolean => {
const value = existsResults[index]?.[1];
return typeof value === "number" && value > 0;
};
let existsIndex = 0;
const oppExistsById = new Map<
number,
{
oppCwDataExists: boolean;
activitiesExists: boolean;
notesExists: boolean;
contactsExists: boolean;
productsExists: boolean;
}
>();
for (const opp of eligibleOpportunities) {
oppExistsById.set(opp.cwOpportunityId, {
oppCwDataExists: existsAt(existsIndex++),
activitiesExists: existsAt(existsIndex++),
notesExists: existsAt(existsIndex++),
contactsExists: existsAt(existsIndex++),
productsExists: existsAt(existsIndex++),
});
}
const companyCacheExistsById = new Map<number, boolean>();
for (const companyId of Array.from(companyTtlById.keys())) {
companyCacheExistsById.set(companyId, existsAt(existsIndex++));
}
const refreshTasks: (() => Promise<void>)[] = [];
let plannedOppCwData = 0;
let plannedActivities = 0;
let plannedNotes = 0;
let plannedContacts = 0;
let plannedProducts = 0;
let plannedCompanies = 0;
for (const opp of eligibleOpportunities) {
const existsForOpp = oppExistsById.get(opp.cwOpportunityId);
if (!existsForOpp) continue;
if (!existsForOpp.oppCwDataExists) {
plannedOppCwData++;
refreshTasks.push(async () => {
try {
await fetchAndCacheOppCwData(opp.cwOpportunityId, opp.ttl);
oppCwDataRefreshed++;
} catch (error) {
workerLog(
workerSocket,
`[active-refresh] oppCwData refresh failed for opp${opp.cwOpportunityId}: ${describeError(error)}`,
);
}
});
}
if (!existsForOpp.activitiesExists) {
plannedActivities++;
refreshTasks.push(async () => {
try {
await fetchAndCacheActivities(opp.cwOpportunityId, opp.ttl);
activitiesRefreshed++;
} catch (error) {
workerLog(
workerSocket,
`[active-refresh] activities refresh failed for opp${opp.cwOpportunityId}: ${describeError(error)}`,
);
}
});
}
if (!existsForOpp.notesExists) {
plannedNotes++;
refreshTasks.push(async () => {
try {
await fetchAndCacheNotes(opp.cwOpportunityId, opp.ttl);
notesRefreshed++;
} catch (error) {
workerLog(
workerSocket,
`[active-refresh] notes refresh failed for opp${opp.cwOpportunityId}: ${describeError(error)}`,
);
}
});
}
if (!existsForOpp.contactsExists) {
plannedContacts++;
refreshTasks.push(async () => {
try {
await fetchAndCacheContacts(opp.cwOpportunityId, opp.ttl);
contactsRefreshed++;
} catch (error) {
workerLog(
workerSocket,
`[active-refresh] contacts refresh failed for opp${opp.cwOpportunityId}: ${describeError(error)}`,
);
}
});
}
if (!existsForOpp.productsExists) {
plannedProducts++;
refreshTasks.push(async () => {
try {
await fetchAndCacheProducts(opp.cwOpportunityId, opp.ttl);
productsRefreshed++;
} catch (error) {
workerLog(
workerSocket,
`[active-refresh] products refresh failed for opp${opp.cwOpportunityId}: ${describeError(error)}`,
);
}
});
}
}
for (const [companyId, ttl] of Array.from(companyTtlById.entries())) {
const companyExists = companyCacheExistsById.get(companyId) ?? false;
if (companyExists) continue;
plannedCompanies++;
refreshTasks.push(async () => {
try {
await fetchAndCacheCompanyCwData(companyId, ttl);
companiesRefreshed++;
} catch (error) {
workerLog(
workerSocket,
`[active-refresh] company data refresh failed for company${companyId}: ${describeError(error)}`,
);
}
});
}
if (companyTtlById.size > 0) {
const missingCompanies = Array.from(companyTtlById.keys()).filter(
(id) => !(companyCacheExistsById.get(id) ?? false),
).length;
workerLog(
workerSocket,
`[active-refresh] Company cache checks: ${companyTtlById.size} unique, ${missingCompanies} missing`,
);
}
workerLog(
workerSocket,
`[active-refresh] Planned tasks: eligible=${eligibleOpportunities.length}, archived=${archivedCount}, totalTasks=${refreshTasks.length}, oppCwData=${plannedOppCwData}, activities=${plannedActivities}, notes=${plannedNotes}, contacts=${plannedContacts}, products=${plannedProducts}, companies=${plannedCompanies}`,
);
if (refreshTasks.length === 0) {
workerLog(workerSocket, `[active-refresh] No cache keys needed refresh`);
}
// Run refresh tasks via a continuous worker pool (no inter-batch idle waits).
const parsedConcurrency = Number(Bun.env.ACTIVE_REFRESH_CONCURRENCY ?? "12");
const CONCURRENCY = Number.isFinite(parsedConcurrency)
? Math.max(1, Math.floor(parsedConcurrency))
: 12;
const progressEvery = Math.max(
1,
Number(Bun.env.ACTIVE_REFRESH_PROGRESS_EVERY ?? "50") || 50,
);
workerLog(
workerSocket,
`[active-refresh] Runner config: concurrency=${CONCURRENCY}, progressEvery=${progressEvery}`,
"DEBUG",
);
let completedTasks = 0;
let failedTasks = 0;
let nextTaskIndex = 0;
const runWorker = async () => {
while (true) {
const taskIndex = nextTaskIndex;
nextTaskIndex++;
const task = refreshTasks[taskIndex];
if (!task) return;
try {
await task();
} catch (error) {
failedTasks++;
workerLog(
workerSocket,
`[active-refresh] task ${taskIndex + 1}/${refreshTasks.length} failed: ${describeError(error)}`,
);
}
completedTasks++;
const shouldLogProgress =
completedTasks % progressEvery === 0 ||
completedTasks === refreshTasks.length;
if (shouldLogProgress) {
workerLog(
workerSocket,
`[active-refresh] Progress: completedTasks=${completedTasks}/${refreshTasks.length}, failedTasks=${failedTasks}`,
"DEBUG",
);
}
}
};
await Promise.all(
Array.from(
{ length: Math.min(CONCURRENCY, Math.max(1, refreshTasks.length)) },
() => runWorker(),
),
);
if (failedTasks > 0) {
workerLog(
workerSocket,
`[active-refresh] ${failedTasks} task(s) encountered errors`,
);
}
emitWorkerGlobalEvent(workerSocket, "cache:opportunities:refresh:completed", {
totalOpportunities: opportunities.length,
activitiesRefreshed,
companiesRefreshed,
notesRefreshed,
contactsRefreshed,
productsRefreshed,
oppCwDataRefreshed,
archivedCount,
});
workerLog(
workerSocket,
`[active-refresh] Completed: ${activitiesRefreshed} activities, ${notesRefreshed} notes, ${contactsRefreshed} contacts, ${productsRefreshed} products, ${oppCwDataRefreshed} opp cw data, ${companiesRefreshed} companies, ${archivedCount} archived`,
);
}
/**
* Build a concise error description for logging.
*/
function describeError(err: unknown): string {
if (typeof err !== "object" || err === null) return String(err);
const e = err as Record<string, any>;
if (e.isAxiosError) {
const method = (e.config?.method ?? "?").toUpperCase();
const url = e.config?.url ?? "unknown";
const code = e.code ?? "";
const status = e.response?.status ?? "";
return `${method} ${url} -> ${code || `HTTP ${status}`} (${e.message})`;
}
return e.message ?? String(err);
}
@@ -0,0 +1,375 @@
import { Socket } from "socket.io-client";
import { createWorkerJob, workerLog } from "../jobFactory";
import { WorkerQueue } from "../queues";
import {
TTL_ARCHIVED_MS,
fetchAndCacheActivities,
fetchAndCacheNotes,
fetchAndCacheContacts,
fetchAndCacheProducts,
fetchAndCacheOppCwData,
fetchAndCacheCompanyCwData,
companyCwCacheKey,
} from "../../cache/opportunityCache";
import { prisma, redis } from "../../../constants";
interface ArchiveRefreshOptions {
/**
* When true, overwrite every cache key without checking if it exists.
* Used for midnight rebuild to ensure all keys are fresh.
*
* When false, only populate missing keys. Used on startup to avoid
* large CW bursts on every process restart.
*
* Defaults to false.
*/
force?: boolean;
}
/**
* Worker factory for archived opportunity cache refresh.
*
* Refreshes cache for opportunities that are closed more than 30 days ago.
* These opportunities fall outside the adaptive TTL window and are rebuilt
* with a fixed 24-hour TTL.
*
* Typically called once per day at midnight (with force=true) to ensure
* archived deals are not stale. On startup, force=false to avoid large
* CW bursts.
*
* @param socket - Socket.IO client connection to manager
* @param options - Configuration options (force, etc.)
* @returns Promise that resolves when refresh completes
*/
export async function refreshArchivedOpportunitiesWorker(
socket: Socket,
options: ArchiveRefreshOptions = {},
): Promise<void> {
return createWorkerJob(
socket,
WorkerQueue.REFRESH_ARCHIVED_OPPORTUNITIES,
async (workerSocket: Socket) => {
const lockKey = "worker-lock:cache:opportunities:refresh:archived";
const lockValue = `${process.pid}:${Date.now()}:${Math.random()}`;
const lockTtlMs = Number(
Bun.env.ARCHIVED_REFRESH_LOCK_TTL_MS ?? "10800000",
);
const lockSet = await redis.set(
lockKey,
lockValue,
"PX",
lockTtlMs,
"NX",
);
if (lockSet !== "OK") {
workerLog(
workerSocket,
`[archived-refresh] Skipping run: lock already held (${lockKey})`,
"WARN",
);
return;
}
try {
await performArchivedOpportunityRefresh(
workerSocket,
options.force ?? false,
);
} finally {
const currentLockValue = await redis.get(lockKey);
if (currentLockValue === lockValue) {
await redis.del(lockKey);
}
}
},
);
}
/**
* Core logic for archived opportunity cache refresh.
*
* Queries opportunities closed more than 30 days ago and refreshes their cache
* with a fixed 24-hour TTL.
*
* @param workerSocket - Worker socket for logging
* @param force - If true, refresh all keys. If false, only refresh missing keys.
*/
async function performArchivedOpportunityRefresh(
workerSocket: Socket,
force: boolean,
): Promise<void> {
const thirtyDaysAgo = new Date(Date.now() - 30 * 24 * 60 * 60 * 1000);
const opportunities = await prisma.opportunity.findMany({
where: {
closedFlag: true,
OR: [{ closedDate: { lt: thirtyDaysAgo } }, { closedDate: null }],
},
select: {
cwOpportunityId: true,
company: { select: { cw_CompanyId: true } },
},
orderBy: { cwLastUpdated: "desc" },
});
const label = force ? "midnight rebuild" : "startup warm";
workerLog(
workerSocket,
`[archived-refresh] Starting ${label} for ${opportunities.length} archived opportunities`,
);
if (opportunities.length === 0) {
workerLog(
workerSocket,
`[archived-refresh] No archived opportunities found`,
);
return;
}
const uniqueCompanyIds = Array.from(
new Set(
opportunities
.map((opp) => opp.company?.cw_CompanyId)
.filter((id): id is number => id !== undefined),
),
);
const oppMissingById = new Map<
number,
{
oppCwDataMissing: boolean;
activitiesMissing: boolean;
notesMissing: boolean;
contactsMissing: boolean;
productsMissing: boolean;
}
>();
const companyMissingById = new Map<number, boolean>();
if (force) {
for (const opp of opportunities) {
oppMissingById.set(opp.cwOpportunityId, {
oppCwDataMissing: true,
activitiesMissing: true,
notesMissing: true,
contactsMissing: true,
productsMissing: true,
});
}
for (const companyId of uniqueCompanyIds) {
companyMissingById.set(companyId, true);
}
} else {
const pipeline = redis.pipeline();
for (const opp of opportunities) {
pipeline.exists(`opp:cw-data:${opp.cwOpportunityId}`);
pipeline.exists(`opp:activities:${opp.cwOpportunityId}`);
pipeline.exists(`opp:notes:${opp.cwOpportunityId}`);
pipeline.exists(`opp:contacts:${opp.cwOpportunityId}`);
pipeline.exists(`opp:products:${opp.cwOpportunityId}`);
}
for (const companyId of uniqueCompanyIds) {
pipeline.exists(companyCwCacheKey(companyId));
}
const results = (await pipeline.exec()) || [];
const existsAt = (index: number): boolean => {
const value = results[index]?.[1];
return typeof value === "number" && value > 0;
};
let idx = 0;
for (const opp of opportunities) {
oppMissingById.set(opp.cwOpportunityId, {
oppCwDataMissing: !existsAt(idx++),
activitiesMissing: !existsAt(idx++),
notesMissing: !existsAt(idx++),
contactsMissing: !existsAt(idx++),
productsMissing: !existsAt(idx++),
});
}
for (const companyId of uniqueCompanyIds) {
companyMissingById.set(companyId, !existsAt(idx++));
}
}
const refreshTasks: (() => Promise<void>)[] = [];
let plannedOppCwData = 0;
let plannedActivities = 0;
let plannedNotes = 0;
let plannedContacts = 0;
let plannedProducts = 0;
let plannedCompanies = 0;
for (const opp of opportunities) {
const missing = oppMissingById.get(opp.cwOpportunityId);
if (!missing) continue;
if (missing.oppCwDataMissing) {
plannedOppCwData++;
refreshTasks.push(async () => {
try {
await fetchAndCacheOppCwData(opp.cwOpportunityId, TTL_ARCHIVED_MS);
} catch (error) {
workerLog(
workerSocket,
`[archived-refresh] oppCwData failed for opp${opp.cwOpportunityId}: ${describeError(error)}`,
);
}
});
}
if (missing.activitiesMissing) {
plannedActivities++;
refreshTasks.push(async () => {
try {
await fetchAndCacheActivities(opp.cwOpportunityId, TTL_ARCHIVED_MS);
} catch (error) {
workerLog(
workerSocket,
`[archived-refresh] activities failed for opp${opp.cwOpportunityId}: ${describeError(error)}`,
);
}
});
}
if (missing.notesMissing) {
plannedNotes++;
refreshTasks.push(async () => {
try {
await fetchAndCacheNotes(opp.cwOpportunityId, TTL_ARCHIVED_MS);
} catch (error) {
workerLog(
workerSocket,
`[archived-refresh] notes failed for opp${opp.cwOpportunityId}: ${describeError(error)}`,
);
}
});
}
if (missing.contactsMissing) {
plannedContacts++;
refreshTasks.push(async () => {
try {
await fetchAndCacheContacts(opp.cwOpportunityId, TTL_ARCHIVED_MS);
} catch (error) {
workerLog(
workerSocket,
`[archived-refresh] contacts failed for opp${opp.cwOpportunityId}: ${describeError(error)}`,
);
}
});
}
if (missing.productsMissing) {
plannedProducts++;
refreshTasks.push(async () => {
try {
await fetchAndCacheProducts(opp.cwOpportunityId, TTL_ARCHIVED_MS);
} catch (error) {
workerLog(
workerSocket,
`[archived-refresh] products failed for opp${opp.cwOpportunityId}: ${describeError(error)}`,
);
}
});
}
}
for (const companyId of uniqueCompanyIds) {
const companyMissing = companyMissingById.get(companyId) ?? true;
if (!companyMissing) continue;
plannedCompanies++;
refreshTasks.push(async () => {
try {
await fetchAndCacheCompanyCwData(companyId, TTL_ARCHIVED_MS);
} catch (error) {
workerLog(
workerSocket,
`[archived-refresh] company data failed for company${companyId}: ${describeError(error)}`,
);
}
});
}
workerLog(
workerSocket,
`[archived-refresh] Planned tasks (${label}): opportunities=${opportunities.length}, totalTasks=${refreshTasks.length}, oppCwData=${plannedOppCwData}, activities=${plannedActivities}, notes=${plannedNotes}, contacts=${plannedContacts}, products=${plannedProducts}, companies=${plannedCompanies}, uniqueCompanies=${uniqueCompanyIds.length}`,
);
if (refreshTasks.length === 0) {
workerLog(
workerSocket,
`[archived-refresh] No cache keys needed refresh (${label})`,
);
return;
}
// Run with bounded concurrency and inter-batch delay
const CONCURRENCY = 6;
const BATCH_DELAY_MS = 250;
let failCount = 0;
let completedTasks = 0;
const totalBatches = Math.ceil(refreshTasks.length / CONCURRENCY);
for (let i = 0; i < refreshTasks.length; i += CONCURRENCY) {
const batch = refreshTasks.slice(i, i + CONCURRENCY);
const batchNumber = Math.floor(i / CONCURRENCY) + 1;
try {
await Promise.all(batch.map((task) => task()));
completedTasks += batch.length;
const shouldLogProgress =
totalBatches <= 3 ||
batchNumber % 5 === 0 ||
batchNumber === totalBatches;
if (shouldLogProgress) {
workerLog(
workerSocket,
`[archived-refresh] Progress: batch ${batchNumber}/${totalBatches}, completedTasks=${completedTasks}/${refreshTasks.length}`,
"DEBUG",
);
}
} catch (error) {
failCount++;
workerLog(
workerSocket,
`[archived-refresh] error in batch at index ${i}: ${describeError(error)}`,
);
}
if (i + CONCURRENCY < refreshTasks.length) {
await new Promise((resolve) => setTimeout(resolve, BATCH_DELAY_MS));
}
}
if (failCount > 0) {
workerLog(
workerSocket,
`[archived-refresh] ${failCount} batch(es) encountered errors`,
);
}
workerLog(
workerSocket,
`[archived-refresh] Completed (${label}): ${opportunities.length} archived opportunities, ${refreshTasks.length} tasks`,
);
}
/**
* Build a concise error description for logging.
*/
function describeError(err: unknown): string {
if (typeof err !== "object" || err === null) return String(err);
const e = err as Record<string, any>;
if (e.isAxiosError) {
const method = (e.config?.method ?? "?").toUpperCase();
const url = e.config?.url ?? "unknown";
const code = e.code ?? "";
const status = e.response?.status ?? "";
return `${method} ${url}${code || `HTTP ${status}`} (${e.message})`;
}
return e.message ?? String(err);
}
+106
View File
@@ -0,0 +1,106 @@
import { Server } from "socket.io";
import { events, EventTypes } from "../globalEvents";
import { WorkerQueue } from "./queues";
import { reserveWorkerId } from "../../workert";
function emitGlobalEvent<K extends keyof EventTypes>(
name: K,
...args: Parameters<EventTypes[K]>
) {
events.emit(name, ...args);
}
export const startCommsServer = () => {
const ioServer = new Server(8671);
ioServer.on("connection", (socket) => {
events.emit("worker:io:connection", { socket });
socket.on(
"requestId",
async (
queueType: WorkerQueue,
callback: (workerId: string | null, error?: string) => void,
) => {
let workerId: string;
try {
workerId = await reserveWorkerId(queueType);
} catch (errorData) {
events.emit("worker:error", {
error: errorData,
context: "failed to reserve PgBoss worker ID",
});
callback(null, "failed to reserve PgBoss worker ID");
return;
}
const workerNamespace = ioServer.of(`/worker-${workerId}`);
callback(workerId);
workerNamespace.on("connection", (workerSocket) => {
let workerLogs: {
message: string;
timestamp: string;
}[] = [];
workerSocket.on(
"log",
(data: { message: string; level?: string } | string) => {
const message = typeof data === "string" ? data : data.message;
const level = (
typeof data === "object" ? (data.level ?? "INFO") : "INFO"
) as import("../globalEvents").JobLogLevel;
workerLogs.push({
message,
timestamp: new Date().toISOString(),
});
events.emit("job:log", {
message,
level,
timestamp: new Date().toISOString(),
workerId,
queueType: queueType,
});
},
);
workerSocket.on(
"globalEvent",
(
name: keyof EventTypes,
data: Parameters<EventTypes[keyof EventTypes]>[0],
) => {
emitGlobalEvent(name, data as never);
},
);
workerSocket.on(
"workerError",
(errorData: { error: unknown; context?: string }) => {
events.emit("job:error", {
...errorData,
workerId,
queueType,
});
},
);
});
},
);
socket.on("disconnect", () => {
events.emit("worker:io:disconnect", { socket });
});
});
};
/**
* 1. Worker connects to socket server
* 2. Worker request woker ID from server
* 3. Server generates worker ID and sends to worker
* 4. Worker connects to ID Namespace with worker ID
* 5. Server listens for messages on worker ID Namespace
* 6. Worker sends message to server with real time updates on events being run.
* 6a. Server can send messages to worker to trigger events or request status updates.
* 6b. Worker can emit globalEvents through server to consolidate logging and trigger cross-worker events.
* 7. Worker disconnects from socket server when process exits
*/
+149
View File
@@ -0,0 +1,149 @@
import { Socket, io } from "socket.io-client";
import { WorkerQueue } from "./queues";
import { events, EventTypes, JobLogLevel } from "../globalEvents";
export function emitWorkerGlobalEvent<K extends keyof EventTypes>(
workerSocket: Socket,
eventName: K,
data: Parameters<EventTypes[K]>[0],
): void {
const payload = [data] as Parameters<EventTypes[K]>;
events.emit(eventName, ...payload);
workerSocket.emit("globalEvent", eventName, data);
}
/**
* Generic job factory that manages worker lifecycle.
*
* Handles:
* 1. Requesting a worker ID from the manager
* 2. Connecting worker socket to the allocated namespace
* 3. Running the work function
* 4. Emitting errors via socket if work fails
* 5. Returning a Promise that resolves when the work completes
*
* @param socket - The Socket.IO client connection to the manager
* @param queueType - The queue type for this job
* @param workFn - Async function that performs the actual work. Receives the worker socket for logging/events.
* @returns Promise that resolves when work completes, or rejects if work fails
*/
export async function createWorkerJob<T>(
socket: Socket,
queueType: WorkerQueue,
workFn: (workerSocket: Socket) => Promise<T>,
): Promise<T> {
return new Promise((resolve, reject) => {
// Request a worker ID and namespace from the manager
socket.emit(
"requestId",
queueType,
(workerId: string | null, requestError?: string) => {
if (!workerId) {
const error = new Error(
requestError ?? "failed to receive worker ID",
);
events.emit("job:error", {
error,
context: "worker id reservation",
workerId: "pending",
queueType,
});
reject(error);
return;
}
// Connect to the worker-specific namespace
const workerSocket = io(`http://localhost:8671/worker-${workerId}`, {
reconnection: false,
});
workerSocket.on("connect", async () => {
const startedAt = Date.now();
// Emit synchronously BEFORE workFn so the flow + prefix map exist
// before any global events fired inside workFn arrive.
emitWorkerGlobalEvent(workerSocket, "job:started", {
workerId,
queueType,
});
workerLog(
workerSocket,
`[job] started queue=${queueType} workerId=${workerId}`,
"DEBUG",
);
try {
// Run the work function, passing the worker socket for logging
const result = await workFn(workerSocket);
workerLog(
workerSocket,
`[job] completed queue=${queueType} workerId=${workerId} durationMs=${Date.now() - startedAt}`,
"DEBUG",
);
// Emit finished event before disconnecting
emitWorkerGlobalEvent(workerSocket, "job:finished", {
workerId,
queueType,
});
// Cleanup
workerSocket.disconnect();
resolve(result);
} catch (error) {
workerLog(
workerSocket,
`[job] failed queue=${queueType} workerId=${workerId} durationMs=${Date.now() - startedAt}`,
"ERROR",
);
// Emit error through socket so it's logged on manager
workerSocket.emit("workerError", {
error,
context: "work function execution",
});
// Emit error event
emitWorkerGlobalEvent(workerSocket, "job:error", {
error,
context: "worker job",
workerId,
queueType,
});
// Cleanup
workerSocket.disconnect();
reject(error);
}
});
workerSocket.on("connect_error", (err) => {
emitWorkerGlobalEvent(workerSocket, "job:error", {
error: err,
context: "worker socket connection",
workerId,
queueType,
});
reject(err);
});
},
);
});
}
/**
* Helper to emit logs from within a worker job.
*
* @param workerSocket - The worker socket connection
* @param message - Log message to emit
* @param level - Log level (default: INFO)
*/
export function workerLog(
workerSocket: Socket,
message: string,
level: JobLogLevel = "INFO",
): void {
workerSocket.emit("log", { message, level });
}
+7
View File
@@ -0,0 +1,7 @@
export enum WorkerQueue {
WORKER_NAMESPACE_RESERVATION = "workers/namespace/reservation",
REFRESH_COMPANIES = "cw/companies/refresh",
REFRESH_OPPORTUNITIES = "cw/opportunities/refresh",
REFRESH_ACTIVE_OPPORTUNITIES = "cache/opportunities/refresh/active",
REFRESH_ARCHIVED_OPPORTUNITIES = "cache/opportunities/refresh/archived",
}