Compare commits

..

37 Commits

Author SHA1 Message Date
HoloPanio a7979eba73 Update README.md
API - Tests / Test (push) Failing after 39s
Dalpuri - Tests / Test (push) Failing after 1s
UI - Tests / Test (push) Failing after 3h14m37s
Forced Commit for testing
2026-05-06 05:16:53 +00:00
HoloPanio 5141ed20f9 chore(global): remove a bunch of test and temp files 2026-04-17 22:34:41 +00:00
HoloPanio a8c48e8c75 fix: correct prisma client import path in setup-admin 2026-04-14 03:18:58 +00:00
HoloPanio 051edb5f78 chore: add setup-admin script 2026-04-14 03:06:39 +00:00
HoloPanio f87f6dd336 chore: add setup-admin dockerfile stage 2026-04-14 02:52:51 +00:00
HoloPanio 2eb387811d fix(worker): break circular import by extracting PgBoss singleton
incremental-sync.ts and api/cw/sync.ts imported getBoss() from workert.ts.
When workert.ts (the entry point) dynamically imported incremental-sync.ts,
it triggered a circular module re-evaluation that hung indefinitely.

Extract the PgBoss singleton and getBoss() factory to a new boss-instance.ts
module that neither has top-level async side-effects nor imports from
workert.ts. All consumers (workert.ts, index.ts, incremental-sync.ts,
cw/sync.ts) now import from boss-instance.ts instead.
2026-04-14 00:34:33 +00:00
HoloPanio db27c9224d fix(worker): add granular debug logging to isolate startup hang
Add console.log before/after each createQueue() call and dynamic
import to pinpoint exactly where the worker startup is blocking.
2026-04-14 00:12:20 +00:00
HoloPanio 7f6e6fdfbc fix(worker): add PgBoss startup timeouts and debug logging
- Add statement_timeout=30000ms to PgBoss connection URL to prevent
  SQL queries from hanging indefinitely
- Add connectionTimeoutMillis=15s to PgBoss config for connection timeout
- Wrap boss.start() in 30s Promise.race timeout with process.exit(1)
  on failure to ensure container restarts instead of hanging silently
- Add debug logging around PgBoss startup to diagnose connection issues
2026-04-13 23:53:32 +00:00
HoloPanio 5f5f610060 fix: remove prisma/config import; use plain export in prisma.config.ts 2026-04-13 21:35:34 +00:00
HoloPanio 809841d672 fix: add url = env(DATABASE_URL) to prisma schema datasource 2026-04-13 21:31:43 +00:00
HoloPanio 276eb563bf fix: remove prisma.config.ts from runtime image (use defaults) 2026-04-13 21:26:40 +00:00
HoloPanio 7624ba0bc0 fix: add bunx symlink to runtime Docker image 2026-04-13 21:18:01 +00:00
HoloPanio 1063231107 chore: update bun.lock (@types/bun 1.3.11 -> 1.3.12) 2026-04-13 21:11:25 +00:00
Jackson 2cd5dee612 Merge pull request #3 from HorizonStackSoftware/copilot/remove-prisma-script
Replace migrate-entrypoint.sh with direct Prisma commands in Dockerfile
2026-04-12 10:37:33 -05:00
copilot-swe-agent[bot] 8ac1cbaf3e chore: replace migrate-entrypoint.sh with direct prisma commands in Dockerfile
Agent-Logs-Url: https://github.com/HorizonStackSoftware/optima/sessions/eb8e2182-3a0d-4a9c-ad4f-4d1d9cf8a923

Co-authored-by: HoloPanio <30759238+HoloPanio@users.noreply.github.com>
2026-04-12 15:34:53 +00:00
Jackson bd7e6a37cd Merge pull request #2 from HorizonStackSoftware/copilot/remove-frozen-lockfile-params
Remove --frozen-lockfile from test workflows
2026-04-12 09:44:53 -05:00
copilot-swe-agent[bot] 4e0799f9d9 Remove --frozen-lockfile from test workflow files
Agent-Logs-Url: https://github.com/HorizonStackSoftware/optima/sessions/8b3e4db9-a1bf-44c4-98fc-3304890cb3f4

Co-authored-by: HoloPanio <30759238+HoloPanio@users.noreply.github.com>
2026-04-12 14:41:14 +00:00
Jackson 223a06ba27 Merge pull request #1 from HorizonStackSoftware/copilot/add-post-build-command-for-migration
Run Prisma migrations automatically on API container startup
2026-04-12 09:38:45 -05:00
copilot-swe-agent[bot] 503657d168 feat: run prisma migrate deploy on api container startup
Agent-Logs-Url: https://github.com/HorizonStackSoftware/optima/sessions/509d6156-c474-457b-9627-82f7b2f13158

Co-authored-by: HoloPanio <30759238+HoloPanio@users.noreply.github.com>
2026-04-12 14:34:58 +00:00
Jackson cf68e281e8 Update bun install commands in Dockerfile
Removed the --frozen-lockfile option from bun install commands in the Dockerfile.
2026-04-12 09:21:27 -05:00
HoloPanio 57b5763d41 fix(opportunity): remove synthetic Contact suffix in contact field 2026-04-10 05:09:40 +00:00
HoloPanio 2bd498a35d fix(sync): use CW watermark incremental path for critical tables 2026-04-10 04:53:57 +00:00
HoloPanio 86d7426e8b fix(sync): harden incremental observability and periodic reconciliation 2026-04-10 04:36:36 +00:00
HoloPanio afe56393e7 fix(sync): restore worker incremental API DB resolution 2026-04-10 04:07:27 +00:00
HoloPanio b2cd26af30 fix(release): unblock deploy workflow image build and desktop rebuild 2026-04-10 03:44:33 +00:00
HoloPanio 0594816ea4 fix(api): include pdfmake Roboto fonts in runtime image 2026-04-10 03:00:31 +00:00
HoloPanio 71fe36c0b8 fix(worker): restore reliable 5s incremental sync cadence 2026-04-10 01:00:04 +00:00
HoloPanio e0d575454e fix(dalpuri): sync CW Members before Users to resolve FK ordering issue
User rows have a FK constraint to CwMember (User_cwMemberId_fkey). Syncing
Users first caused all 140 User upserts to fail since the CwMember table was
empty. This cascade failure then caused all Opportunity upserts to fail because
Opportunity.primarySalesRepId is FK-constrained to User.cwIdentifier.

Fix: reorder steps so CW Members syncs first, then Users.
2026-04-09 01:04:00 +00:00
HoloPanio 32bba31e72 fix(dalpuri): populate locationId and fix closedFlag on opportunities
- Add ownerLevelRecId -> locationId mapping to opportunity translation
- Include soOppStatus in opportunity query and derive closedFlag from
  status.closedFlag (with fallback to legacy oldCloseFlag field)
- Add locationId sanitization guard in both sync.ts and sync-by-table.ts

Note: departmentId is not available in CW SO_Opportunity table and
remains null for synced records.
2026-04-09 00:22:41 +00:00
HoloPanio 1233535b20 fix(dalpuri): populate userIdentifiersByMemberRecId from CwMember table
When no User accounts have cwMemberId linked, the context map was empty
and all opportunities got primarySalesRepId = null. Now also populate
the map from CwMember rows directly (User-linked entries take precedence),
so rep identifiers resolve correctly regardless of user account linkage.
2026-04-08 23:23:51 +00:00
HoloPanio 2c737b22f1 fix(dalpuri): exit(0) after sync completes to release k8s job
Prisma MSSQL adapter keeps connections open after the sync finishes,
preventing the process from exiting naturally. The k8s job was staying
in Running state indefinitely. Call process.exit(0) on success so the
job completes and the GH workflow step passes.
2026-04-08 21:50:52 +00:00
HoloPanio a3bfe9f374 fix(ci): increase dalpuri sync timeout from 30min to 2h
Full initial sync has 500k+ rows across all tables and exceeded the
30-minute activeDeadlineSeconds. Bump both the k8s job deadline and
the kubectl wait timeout to 7200s (2 hours).
2026-04-08 21:19:43 +00:00
HoloPanio a106bb15a8 fix(ci): explicit env vars in dalpuri sync job; add CW_DATABASE_URL to secret
envFrom was loading api-env-secret but CW_DATABASE_URL was absent from the
deployed secret, causing sync.ts to fall back to DATABASE_URL (Postgres) as
the MSSQL connection string -> 'Invalid port number: //optima'.

- Replaced envFrom with explicit CW_DATABASE_URL and API_DATABASE_URL env
  entries so the mapping is unambiguous
- Patched api-env-secret in cluster to add CW_DATABASE_URL
2026-04-08 20:41:49 +00:00
HoloPanio d9a431d99a fix(ci): sync-cw-to-api must wait for migrate-api to complete
Migration must finish before sync runs so the schema exists.
2026-04-08 20:27:05 +00:00
HoloPanio 83377a7d0d feat(ci): run dalpuri CW-to-API sync as a k8s Job before deploy
The CW MSSQL and API Postgres addresses are internal to the cluster and
unreachable from GitHub-hosted runners, so the sync must run inside k8s.

- Add dalpuri-sync Docker stage to api/Dockerfile: installs deps,
  generates both Prisma clients, and runs dalpuri/src/sync.ts
- Add dalpuri/kubernetes/sync-job.yaml: mounts api-env-secret (which
  already contains CW_DATABASE_URL) and maps DATABASE_URL -> API_DATABASE_URL
- build-api job now also pushes optima-dalpuri-sync:TAG image
- sync-cw-to-api CI job replaced with kubectl apply/wait pattern,
  needs [build-api, build-worker], blocks deploy-api and deploy-worker
2026-04-08 20:19:06 +00:00
HoloPanio a81618007c fix(worker): pass socket to enqueueDalpuriFullSync
The socket retrieved from ensureManagerSocketReady() was never passed to
enqueueDalpuriFullSync(), so inside createWorkerJob the socket.emit('requestId')
call crashed with 'TypeError: undefined is not an object (evaluating A.emit)'.

This caused every full sync job to fail immediately, leaving the DB empty.
The 5s incremental sync interval then flooded the queue with 4700+ jobs that
all failed too since there was no data.

Also manually cleared the backlog of 4720 failed/pending incremental jobs and
2 failed full sync jobs from the production queue.
2026-04-08 19:34:33 +00:00
HoloPanio f56c49e242 fix(migrate): handle existing Company/UnifiSite data in catch-up migration
Two bugs in the catch-up migration that only manifest with real production data:

1. Company (4520 rows): uid was added as TEXT NOT NULL DEFAULT '' causing
   all existing rows to get uid='' which makes the PRIMARY KEY constraint
   fail with 'could not create unique index, Key (uid)=() is duplicated'.
   Fix: add uid as nullable, UPDATE uid = id (copies the existing CUID text
   PK into uid), then SET NOT NULL, then swap PK. Also populate the new
   integer id column from cw_CompanyId (which is fully populated in prod).

2. UnifiSite (180 rows): old approach just dropped the text companyId and
   added a null integer column, destroying all company relationships.
   Fix: add companyId_int, UPDATE via JOIN on Company.uid (= old Company.id
   text), drop old text column, rename integer column.

Also fix the P3009 handler in migrate-entrypoint.sh: Prisma may emit ANSI
color codes even without a TTY, wrapping backticks in escape sequences and
breaking the regex match. Fix: strip ANSI codes with sed before extracting
the migration name. Also simplify the regex from a rigid format match to a
simpler backtick-content grep.

Production DB manually unblocked (migrate resolve --rolled-back) so the
next deploy will cleanly apply the corrected migration.
2026-04-08 18:07:16 +00:00
48 changed files with 864 additions and 2549 deletions
@@ -1,133 +0,0 @@
name: API - Build and Publish
on:
release:
types: [created]
jobs:
test:
name: Test
runs-on: ubuntu-latest
defaults:
run:
working-directory: api
steps:x
- name: Checkout source code
uses: actions/checkout@v4
- name: Setup Bun
uses: oven-sh/setup-bun@v2
with:
bun-version: "1.3.6"
- name: Install dependencies
run: bun install --frozen-lockfile
- name: Generate Prisma client
run: DATABASE_URL="postgresql://dummy:dummy@localhost:5432/dummy" bunx prisma generate
- name: Run tests
run: bun test --preload ./tests/setup.ts
build:
name: Build
needs: [test]
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push the Docker image
uses: docker/build-push-action@v6
with:
context: ./api
push: true
target: runtime
tags: |
ghcr.io/project-optima/ttscm-api:latest
ghcr.io/project-optima/ttscm-api:${{ github.event.release.tag_name }}
- name: Build and push the migration image
uses: docker/build-push-action@v6
with:
context: ./api
push: true
target: migration
tags: |
ghcr.io/project-optima/ttscm-api-migrate:latest
ghcr.io/project-optima/ttscm-api-migrate:${{ github.event.release.tag_name }}
migrate:
name: Run Migrations
needs: [build]
runs-on: ubuntu-latest
defaults:
run:
working-directory: api
steps:
- name: Set the Kubernetes context
uses: azure/k8s-set-context@v2
with:
method: kubeconfig
kubeconfig: ${{ secrets.KUBECONFIG }}
- name: Checkout source code
uses: actions/checkout@v4
- name: Delete previous migration job if exists
run: kubectl delete job -n optima -l app=prisma-migrate --ignore-not-found
- name: Apply migration job
run: |
TAG=${{ github.event.release.tag_name }}
sed "s/RELEASE_TAG/${TAG}/g" kubernetes/migration-job.yaml | kubectl apply -f -
- name: Wait for migration to complete
run: |
TAG=${{ github.event.release.tag_name }}
kubectl wait --for=condition=complete --timeout=120s -n optima job/prisma-migrate-${TAG}
deploy:
name: Deploy
needs: [migrate]
runs-on: ubuntu-latest
steps:
- name: Set the Kubernetes context
uses: azure/k8s-set-context@v2
with:
method: kubeconfig
kubeconfig: ${{ secrets.KUBECONFIG }}
- name: Checkout source code
uses: actions/checkout@v4
- name: Lint Kubernetes manifests
uses: azure/k8s-lint@v3
with:
lintType: dryrun
manifests: |
api/kubernetes/deployment.yaml
api/kubernetes/ingress.yaml
namespace: optima
- name: Deploy to the Kubernetes cluster
uses: azure/k8s-deploy@v5
with:
namespace: optima
force: true
skip-tls-verify: true
manifests: |
api/kubernetes/deployment.yaml
api/kubernetes/ingress.yaml
images: |
ghcr.io/project-optima/ttscm-api:${{ github.event.release.tag_name }}
+1 -1
View File
@@ -18,7 +18,7 @@ jobs:
bun-version: "1.3.6"
- name: Install dependencies
run: bun install --frozen-lockfile
run: bun install
- name: Generate API Prisma client
run: DATABASE_URL="postgresql://dummy:dummy@localhost:5432/dummy" bunx prisma generate
+1 -1
View File
@@ -18,7 +18,7 @@ jobs:
bun-version: "1.3.6"
- name: Install dependencies
run: bun install --frozen-lockfile
run: bun install
- name: Generate Dalpuri Prisma client (CW MSSQL)
run: DATABASE_URL="sqlserver://localhost:1433;database=dummy;user=dummy;password=dummy;trustServerCertificate=true" bunx prisma generate
+66 -4
View File
@@ -130,6 +130,17 @@ jobs:
ghcr.io/horizonstacksoftware/optima-api-migrate:latest
ghcr.io/horizonstacksoftware/optima-api-migrate:${{ github.event.release.tag_name }}
- name: Build and push the dalpuri sync image
uses: docker/build-push-action@v6
with:
context: .
file: api/Dockerfile
push: true
target: dalpuri-sync
tags: |
ghcr.io/horizonstacksoftware/optima-dalpuri-sync:latest
ghcr.io/horizonstacksoftware/optima-dalpuri-sync:${{ github.event.release.tag_name }}
build-worker:
name: Build - Worker
needs: [test-api, test-dalpuri, test-ui]
@@ -220,9 +231,10 @@ jobs:
run: bun install --frozen-lockfile
- name: Rebuild native modules
run: npm rebuild
run: npm rebuild --ignore-scripts
env:
HUSKY: "0"
HUSKY_SKIP_INSTALL: "1"
- name: Build macOS distributables
run: bun run make:macos
@@ -261,9 +273,10 @@ jobs:
run: bun install --frozen-lockfile
- name: Rebuild native modules
run: npm rebuild
run: npm rebuild --ignore-scripts
env:
HUSKY: "0"
HUSKY_SKIP_INSTALL: "1"
- name: Build Windows distributables
run: bun run make -- --platform win32
@@ -276,6 +289,55 @@ jobs:
files: |
ui/out/make/**/*.exe
# Runs a full CW → API data sync as a Kubernetes Job (the CW MSSQL and
# API Postgres addresses are internal to the cluster and unreachable from
# GitHub-hosted runners). Waits for both images to be built first and
# must succeed before either the API or worker deploys.
sync-cw-to-api:
name: Sync - CW to API
needs: [migrate-api, build-worker]
runs-on: ubuntu-latest
steps:
- name: Set the Kubernetes context
uses: azure/k8s-set-context@v2
with:
method: kubeconfig
kubeconfig: ${{ secrets.KUBECONFIG }}
- name: Checkout source code
uses: actions/checkout@v4
- name: Delete previous sync job if exists
run: kubectl delete job -n optima -l app=dalpuri-sync --ignore-not-found
- name: Apply sync job
run: |
TAG=${{ github.event.release.tag_name }}
sed "s/RELEASE_TAG/${TAG}/g" dalpuri/kubernetes/sync-job.yaml | kubectl apply -f -
- name: Wait for sync to complete
run: |
TAG=${{ github.event.release.tag_name }}
JOB="job/dalpuri-sync-${TAG}"
kubectl wait --for=condition=complete --timeout=7200s -n optima "$JOB" &
WAIT_COMPLETE=$!
kubectl wait --for=condition=failed --timeout=7200s -n optima "$JOB" &
WAIT_FAILED=$!
wait -n $WAIT_COMPLETE $WAIT_FAILED
echo "--- Sync job logs ---"
kubectl logs -n optima "$JOB" --tail=500 || true
if kubectl get -n optima "$JOB" -o jsonpath='{.status.conditions[?(@.type=="Complete")].status}' | grep -q "True"; then
echo "Sync completed successfully."
exit 0
else
echo "Sync FAILED."
exit 1
fi
# ==========================================================================
# Deploy jobs
# ==========================================================================
@@ -332,7 +394,7 @@ jobs:
deploy-api:
name: Deploy - API
needs: [migrate-api]
needs: [migrate-api, sync-cw-to-api]
runs-on: ubuntu-latest
steps:
- name: Set the Kubernetes context
@@ -402,7 +464,7 @@ jobs:
deploy-worker:
name: Deploy - Worker
needs: [build-worker]
needs: [build-worker, sync-cw-to-api]
runs-on: ubuntu-latest
steps:
- name: Set the Kubernetes context
+1 -1
View File
@@ -21,7 +21,7 @@ jobs:
bun-version: "1.3.11"
- name: Install dependencies
run: bun install --frozen-lockfile
run: bun install
- name: Run unit tests
run: bun run test:unit -- --run
+1
View File
@@ -1,2 +1,3 @@
# optima
The primary repository for Optima.
+62 -6
View File
@@ -17,7 +17,7 @@ COPY dalpuri/package.json ./dalpuri/package.json
COPY ui/package.json ./ui/package.json
COPY patches ./patches
RUN bun install --frozen-lockfile --production
RUN bun install --production
# ---- Stage 2: Build ----
FROM oven/bun:1.3.11 AS build
@@ -32,7 +32,7 @@ COPY ui/package.json ./ui/package.json
COPY patches ./patches
# Install all deps (including dev) for the full workspace
RUN bun install --frozen-lockfile
RUN bun install
# Copy API source and config
COPY api/src/ ./api/src/
@@ -90,6 +90,13 @@ COPY --from=build /app/dalpuri/generated/ ./dalpuri/generated/
# Copy production node_modules (Prisma adapter needs native bindings)
COPY --from=deps /app/node_modules/ ./node_modules/
# Copy bun so prisma migrate deploy can run at container startup
COPY --from=build /usr/local/bin/bun /usr/local/bin/bun
RUN ln -s /usr/local/bin/bun /usr/local/bin/bunx
# Ensure pdfmake Roboto fonts are present at runtime for PDF generation.
COPY --from=build /app/api/node_modules/pdfmake/build/fonts/ ./node_modules/pdfmake/build/fonts/
ENV NODE_ENV=production
# ---- Stage 4: API server runtime image ----
@@ -101,7 +108,7 @@ COPY --from=build /app/api/logo.png ./logo.png
COPY --from=build /app/api/src/modules/sales-utils/salesTaxRates.json ./salesTaxRates.json
EXPOSE 3000
CMD ["./server"]
CMD ["sh", "-c", "bunx prisma migrate deploy && ./server"]
# ---- Stage 5: Worker runtime image ----
FROM runtime-base AS worker
@@ -125,12 +132,61 @@ COPY dalpuri/package.json ./dalpuri/package.json
COPY ui/package.json ./ui/package.json
COPY patches ./patches
RUN bun install --frozen-lockfile
RUN bun install
COPY api/prisma/ ./api/prisma/
COPY api/prisma.config.ts ./api/prisma.config.ts
RUN chmod +x /app/api/prisma/migrate-entrypoint.sh
WORKDIR /app/api
CMD ["bunx", "prisma", "migrate", "deploy"]
# ---- Stage 7: Dalpuri CW-to-API sync runner ----
FROM oven/bun:1.3.11 AS dalpuri-sync
WORKDIR /app
COPY package.json bun.lock ./
COPY api/package.json ./api/package.json
COPY dalpuri/package.json ./dalpuri/package.json
COPY ui/package.json ./ui/package.json
COPY patches ./patches
RUN bun install
COPY dalpuri/src/ ./dalpuri/src/
COPY dalpuri/prisma/ ./dalpuri/prisma/
COPY dalpuri/prisma.config.ts ./dalpuri/prisma.config.ts
COPY api/prisma/ ./api/prisma/
COPY api/prisma.config.ts ./api/prisma.config.ts
WORKDIR /app/dalpuri
RUN DATABASE_URL="sqlserver://localhost:1433;database=dummy;user=dummy;password=dummy;trustServerCertificate=true" \
bunx prisma generate
WORKDIR /app/api
CMD ["sh", "prisma/migrate-entrypoint.sh"]
RUN DATABASE_URL="postgresql://dummy:dummy@localhost:5432/dummy" bunx prisma generate
WORKDIR /app/dalpuri
CMD ["bun", "run", "src/sync.ts"]
FROM oven/bun:1.3.11 AS setup-admin
WORKDIR /app
COPY package.json bun.lock ./
COPY api/package.json ./api/package.json
COPY dalpuri/package.json ./dalpuri/package.json
COPY ui/package.json ./ui/package.json
COPY patches ./patches
RUN bun install
COPY api/prisma/ ./api/prisma/
COPY api/prisma.config.ts ./api/prisma.config.ts
COPY api/setup-admin.ts ./api/setup-admin.ts
WORKDIR /app/api
RUN DATABASE_URL="postgresql://dummy:dummy@localhost:5432/dummy" bunx prisma generate
CMD ["bun", "run", "setup-admin.ts"]
File diff suppressed because one or more lines are too long
+111 -92
View File
@@ -381,7 +381,7 @@ export type OpportunityGroupByOutputType = {
name: string
notes: string | null
oppNarrative: string | null
typeId: number
typeId: number | null
stageId: number | null
statusId: number | null
taxCodeId: number | null
@@ -438,7 +438,7 @@ export type OpportunityWhereInput = {
name?: Prisma.StringFilter<"Opportunity"> | string
notes?: Prisma.StringNullableFilter<"Opportunity"> | string | null
oppNarrative?: Prisma.StringNullableFilter<"Opportunity"> | string | null
typeId?: Prisma.IntFilter<"Opportunity"> | number
typeId?: Prisma.IntNullableFilter<"Opportunity"> | number | null
stageId?: Prisma.IntNullableFilter<"Opportunity"> | number | null
statusId?: Prisma.IntNullableFilter<"Opportunity"> | number | null
taxCodeId?: Prisma.IntNullableFilter<"Opportunity"> | number | null
@@ -465,7 +465,7 @@ export type OpportunityWhereInput = {
createdAt?: Prisma.DateTimeFilter<"Opportunity"> | Date | string
updatedAt?: Prisma.DateTimeFilter<"Opportunity"> | Date | string
generatedQuotes?: Prisma.GeneratedQuotesListRelationFilter
type?: Prisma.XOR<Prisma.OpportunityTypeScalarRelationFilter, Prisma.OpportunityTypeWhereInput>
type?: Prisma.XOR<Prisma.OpportunityTypeNullableScalarRelationFilter, Prisma.OpportunityTypeWhereInput> | null
stage?: Prisma.XOR<Prisma.OpportunityStageNullableScalarRelationFilter, Prisma.OpportunityStageWhereInput> | null
status?: Prisma.XOR<Prisma.OpportunityStatusNullableScalarRelationFilter, Prisma.OpportunityStatusWhereInput> | null
taxCode?: Prisma.XOR<Prisma.TaxCodeNullableScalarRelationFilter, Prisma.TaxCodeWhereInput> | null
@@ -485,7 +485,7 @@ export type OpportunityOrderByWithRelationInput = {
name?: Prisma.SortOrder
notes?: Prisma.SortOrderInput | Prisma.SortOrder
oppNarrative?: Prisma.SortOrderInput | Prisma.SortOrder
typeId?: Prisma.SortOrder
typeId?: Prisma.SortOrderInput | Prisma.SortOrder
stageId?: Prisma.SortOrderInput | Prisma.SortOrder
statusId?: Prisma.SortOrderInput | Prisma.SortOrder
taxCodeId?: Prisma.SortOrderInput | Prisma.SortOrder
@@ -535,7 +535,7 @@ export type OpportunityWhereUniqueInput = Prisma.AtLeast<{
name?: Prisma.StringFilter<"Opportunity"> | string
notes?: Prisma.StringNullableFilter<"Opportunity"> | string | null
oppNarrative?: Prisma.StringNullableFilter<"Opportunity"> | string | null
typeId?: Prisma.IntFilter<"Opportunity"> | number
typeId?: Prisma.IntNullableFilter<"Opportunity"> | number | null
stageId?: Prisma.IntNullableFilter<"Opportunity"> | number | null
statusId?: Prisma.IntNullableFilter<"Opportunity"> | number | null
taxCodeId?: Prisma.IntNullableFilter<"Opportunity"> | number | null
@@ -562,7 +562,7 @@ export type OpportunityWhereUniqueInput = Prisma.AtLeast<{
createdAt?: Prisma.DateTimeFilter<"Opportunity"> | Date | string
updatedAt?: Prisma.DateTimeFilter<"Opportunity"> | Date | string
generatedQuotes?: Prisma.GeneratedQuotesListRelationFilter
type?: Prisma.XOR<Prisma.OpportunityTypeScalarRelationFilter, Prisma.OpportunityTypeWhereInput>
type?: Prisma.XOR<Prisma.OpportunityTypeNullableScalarRelationFilter, Prisma.OpportunityTypeWhereInput> | null
stage?: Prisma.XOR<Prisma.OpportunityStageNullableScalarRelationFilter, Prisma.OpportunityStageWhereInput> | null
status?: Prisma.XOR<Prisma.OpportunityStatusNullableScalarRelationFilter, Prisma.OpportunityStatusWhereInput> | null
taxCode?: Prisma.XOR<Prisma.TaxCodeNullableScalarRelationFilter, Prisma.TaxCodeWhereInput> | null
@@ -582,7 +582,7 @@ export type OpportunityOrderByWithAggregationInput = {
name?: Prisma.SortOrder
notes?: Prisma.SortOrderInput | Prisma.SortOrder
oppNarrative?: Prisma.SortOrderInput | Prisma.SortOrder
typeId?: Prisma.SortOrder
typeId?: Prisma.SortOrderInput | Prisma.SortOrder
stageId?: Prisma.SortOrderInput | Prisma.SortOrder
statusId?: Prisma.SortOrderInput | Prisma.SortOrder
taxCodeId?: Prisma.SortOrderInput | Prisma.SortOrder
@@ -624,7 +624,7 @@ export type OpportunityScalarWhereWithAggregatesInput = {
name?: Prisma.StringWithAggregatesFilter<"Opportunity"> | string
notes?: Prisma.StringNullableWithAggregatesFilter<"Opportunity"> | string | null
oppNarrative?: Prisma.StringNullableWithAggregatesFilter<"Opportunity"> | string | null
typeId?: Prisma.IntWithAggregatesFilter<"Opportunity"> | number
typeId?: Prisma.IntNullableWithAggregatesFilter<"Opportunity"> | number | null
stageId?: Prisma.IntNullableWithAggregatesFilter<"Opportunity"> | number | null
statusId?: Prisma.IntNullableWithAggregatesFilter<"Opportunity"> | number | null
taxCodeId?: Prisma.IntNullableWithAggregatesFilter<"Opportunity"> | number | null
@@ -674,7 +674,7 @@ export type OpportunityCreateInput = {
createdAt?: Date | string
updatedAt?: Date | string
generatedQuotes?: Prisma.GeneratedQuotesCreateNestedManyWithoutOpportunityInput
type: Prisma.OpportunityTypeCreateNestedOneWithoutOpportunitiesInput
type?: Prisma.OpportunityTypeCreateNestedOneWithoutOpportunitiesInput
stage?: Prisma.OpportunityStageCreateNestedOneWithoutOpportunitiesInput
status?: Prisma.OpportunityStatusCreateNestedOneWithoutOpportunitiesInput
taxCode?: Prisma.TaxCodeCreateNestedOneWithoutOpportunitiesInput
@@ -694,7 +694,7 @@ export type OpportunityUncheckedCreateInput = {
name: string
notes?: string | null
oppNarrative?: string | null
typeId: number
typeId?: number | null
stageId?: number | null
statusId?: number | null
taxCodeId?: number | null
@@ -746,7 +746,7 @@ export type OpportunityUpdateInput = {
createdAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string
updatedAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string
generatedQuotes?: Prisma.GeneratedQuotesUpdateManyWithoutOpportunityNestedInput
type?: Prisma.OpportunityTypeUpdateOneRequiredWithoutOpportunitiesNestedInput
type?: Prisma.OpportunityTypeUpdateOneWithoutOpportunitiesNestedInput
stage?: Prisma.OpportunityStageUpdateOneWithoutOpportunitiesNestedInput
status?: Prisma.OpportunityStatusUpdateOneWithoutOpportunitiesNestedInput
taxCode?: Prisma.TaxCodeUpdateOneWithoutOpportunitiesNestedInput
@@ -766,7 +766,7 @@ export type OpportunityUncheckedUpdateInput = {
name?: Prisma.StringFieldUpdateOperationsInput | string
notes?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
oppNarrative?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
typeId?: Prisma.IntFieldUpdateOperationsInput | number
typeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
stageId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
statusId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
taxCodeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
@@ -802,7 +802,7 @@ export type OpportunityCreateManyInput = {
name: string
notes?: string | null
oppNarrative?: string | null
typeId: number
typeId?: number | null
stageId?: number | null
statusId?: number | null
taxCodeId?: number | null
@@ -859,7 +859,7 @@ export type OpportunityUncheckedUpdateManyInput = {
name?: Prisma.StringFieldUpdateOperationsInput | string
notes?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
oppNarrative?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
typeId?: Prisma.IntFieldUpdateOperationsInput | number
typeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
stageId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
statusId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
taxCodeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
@@ -1572,7 +1572,7 @@ export type OpportunityCreateWithoutPrimarySalesRepInput = {
createdAt?: Date | string
updatedAt?: Date | string
generatedQuotes?: Prisma.GeneratedQuotesCreateNestedManyWithoutOpportunityInput
type: Prisma.OpportunityTypeCreateNestedOneWithoutOpportunitiesInput
type?: Prisma.OpportunityTypeCreateNestedOneWithoutOpportunitiesInput
stage?: Prisma.OpportunityStageCreateNestedOneWithoutOpportunitiesInput
status?: Prisma.OpportunityStatusCreateNestedOneWithoutOpportunitiesInput
taxCode?: Prisma.TaxCodeCreateNestedOneWithoutOpportunitiesInput
@@ -1591,7 +1591,7 @@ export type OpportunityUncheckedCreateWithoutPrimarySalesRepInput = {
name: string
notes?: string | null
oppNarrative?: string | null
typeId: number
typeId?: number | null
stageId?: number | null
statusId?: number | null
taxCodeId?: number | null
@@ -1652,7 +1652,7 @@ export type OpportunityCreateWithoutSecondarySalesRepInput = {
createdAt?: Date | string
updatedAt?: Date | string
generatedQuotes?: Prisma.GeneratedQuotesCreateNestedManyWithoutOpportunityInput
type: Prisma.OpportunityTypeCreateNestedOneWithoutOpportunitiesInput
type?: Prisma.OpportunityTypeCreateNestedOneWithoutOpportunitiesInput
stage?: Prisma.OpportunityStageCreateNestedOneWithoutOpportunitiesInput
status?: Prisma.OpportunityStatusCreateNestedOneWithoutOpportunitiesInput
taxCode?: Prisma.TaxCodeCreateNestedOneWithoutOpportunitiesInput
@@ -1671,7 +1671,7 @@ export type OpportunityUncheckedCreateWithoutSecondarySalesRepInput = {
name: string
notes?: string | null
oppNarrative?: string | null
typeId: number
typeId?: number | null
stageId?: number | null
statusId?: number | null
taxCodeId?: number | null
@@ -1735,7 +1735,7 @@ export type OpportunityScalarWhereInput = {
name?: Prisma.StringFilter<"Opportunity"> | string
notes?: Prisma.StringNullableFilter<"Opportunity"> | string | null
oppNarrative?: Prisma.StringNullableFilter<"Opportunity"> | string | null
typeId?: Prisma.IntFilter<"Opportunity"> | number
typeId?: Prisma.IntNullableFilter<"Opportunity"> | number | null
stageId?: Prisma.IntNullableFilter<"Opportunity"> | number | null
statusId?: Prisma.IntNullableFilter<"Opportunity"> | number | null
taxCodeId?: Prisma.IntNullableFilter<"Opportunity"> | number | null
@@ -1801,7 +1801,7 @@ export type OpportunityCreateWithoutLocationInput = {
createdAt?: Date | string
updatedAt?: Date | string
generatedQuotes?: Prisma.GeneratedQuotesCreateNestedManyWithoutOpportunityInput
type: Prisma.OpportunityTypeCreateNestedOneWithoutOpportunitiesInput
type?: Prisma.OpportunityTypeCreateNestedOneWithoutOpportunitiesInput
stage?: Prisma.OpportunityStageCreateNestedOneWithoutOpportunitiesInput
status?: Prisma.OpportunityStatusCreateNestedOneWithoutOpportunitiesInput
taxCode?: Prisma.TaxCodeCreateNestedOneWithoutOpportunitiesInput
@@ -1820,7 +1820,7 @@ export type OpportunityUncheckedCreateWithoutLocationInput = {
name: string
notes?: string | null
oppNarrative?: string | null
typeId: number
typeId?: number | null
stageId?: number | null
statusId?: number | null
taxCodeId?: number | null
@@ -1897,7 +1897,7 @@ export type OpportunityCreateWithoutDepartmentInput = {
createdAt?: Date | string
updatedAt?: Date | string
generatedQuotes?: Prisma.GeneratedQuotesCreateNestedManyWithoutOpportunityInput
type: Prisma.OpportunityTypeCreateNestedOneWithoutOpportunitiesInput
type?: Prisma.OpportunityTypeCreateNestedOneWithoutOpportunitiesInput
stage?: Prisma.OpportunityStageCreateNestedOneWithoutOpportunitiesInput
status?: Prisma.OpportunityStatusCreateNestedOneWithoutOpportunitiesInput
taxCode?: Prisma.TaxCodeCreateNestedOneWithoutOpportunitiesInput
@@ -1916,7 +1916,7 @@ export type OpportunityUncheckedCreateWithoutDepartmentInput = {
name: string
notes?: string | null
oppNarrative?: string | null
typeId: number
typeId?: number | null
stageId?: number | null
statusId?: number | null
taxCodeId?: number | null
@@ -1993,7 +1993,7 @@ export type OpportunityCreateWithoutCompanyInput = {
createdAt?: Date | string
updatedAt?: Date | string
generatedQuotes?: Prisma.GeneratedQuotesCreateNestedManyWithoutOpportunityInput
type: Prisma.OpportunityTypeCreateNestedOneWithoutOpportunitiesInput
type?: Prisma.OpportunityTypeCreateNestedOneWithoutOpportunitiesInput
stage?: Prisma.OpportunityStageCreateNestedOneWithoutOpportunitiesInput
status?: Prisma.OpportunityStatusCreateNestedOneWithoutOpportunitiesInput
taxCode?: Prisma.TaxCodeCreateNestedOneWithoutOpportunitiesInput
@@ -2012,7 +2012,7 @@ export type OpportunityUncheckedCreateWithoutCompanyInput = {
name: string
notes?: string | null
oppNarrative?: string | null
typeId: number
typeId?: number | null
stageId?: number | null
statusId?: number | null
taxCodeId?: number | null
@@ -2089,7 +2089,7 @@ export type OpportunityCreateWithoutSiteInput = {
createdAt?: Date | string
updatedAt?: Date | string
generatedQuotes?: Prisma.GeneratedQuotesCreateNestedManyWithoutOpportunityInput
type: Prisma.OpportunityTypeCreateNestedOneWithoutOpportunitiesInput
type?: Prisma.OpportunityTypeCreateNestedOneWithoutOpportunitiesInput
stage?: Prisma.OpportunityStageCreateNestedOneWithoutOpportunitiesInput
status?: Prisma.OpportunityStatusCreateNestedOneWithoutOpportunitiesInput
taxCode?: Prisma.TaxCodeCreateNestedOneWithoutOpportunitiesInput
@@ -2108,7 +2108,7 @@ export type OpportunityUncheckedCreateWithoutSiteInput = {
name: string
notes?: string | null
oppNarrative?: string | null
typeId: number
typeId?: number | null
stageId?: number | null
statusId?: number | null
taxCodeId?: number | null
@@ -2185,7 +2185,7 @@ export type OpportunityCreateWithoutContactInput = {
createdAt?: Date | string
updatedAt?: Date | string
generatedQuotes?: Prisma.GeneratedQuotesCreateNestedManyWithoutOpportunityInput
type: Prisma.OpportunityTypeCreateNestedOneWithoutOpportunitiesInput
type?: Prisma.OpportunityTypeCreateNestedOneWithoutOpportunitiesInput
stage?: Prisma.OpportunityStageCreateNestedOneWithoutOpportunitiesInput
status?: Prisma.OpportunityStatusCreateNestedOneWithoutOpportunitiesInput
taxCode?: Prisma.TaxCodeCreateNestedOneWithoutOpportunitiesInput
@@ -2204,7 +2204,7 @@ export type OpportunityUncheckedCreateWithoutContactInput = {
name: string
notes?: string | null
oppNarrative?: string | null
typeId: number
typeId?: number | null
stageId?: number | null
statusId?: number | null
taxCodeId?: number | null
@@ -2281,7 +2281,7 @@ export type OpportunityCreateWithoutProductsInput = {
createdAt?: Date | string
updatedAt?: Date | string
generatedQuotes?: Prisma.GeneratedQuotesCreateNestedManyWithoutOpportunityInput
type: Prisma.OpportunityTypeCreateNestedOneWithoutOpportunitiesInput
type?: Prisma.OpportunityTypeCreateNestedOneWithoutOpportunitiesInput
stage?: Prisma.OpportunityStageCreateNestedOneWithoutOpportunitiesInput
status?: Prisma.OpportunityStatusCreateNestedOneWithoutOpportunitiesInput
taxCode?: Prisma.TaxCodeCreateNestedOneWithoutOpportunitiesInput
@@ -2300,7 +2300,7 @@ export type OpportunityUncheckedCreateWithoutProductsInput = {
name: string
notes?: string | null
oppNarrative?: string | null
typeId: number
typeId?: number | null
stageId?: number | null
statusId?: number | null
taxCodeId?: number | null
@@ -2367,7 +2367,7 @@ export type OpportunityUpdateWithoutProductsInput = {
createdAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string
updatedAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string
generatedQuotes?: Prisma.GeneratedQuotesUpdateManyWithoutOpportunityNestedInput
type?: Prisma.OpportunityTypeUpdateOneRequiredWithoutOpportunitiesNestedInput
type?: Prisma.OpportunityTypeUpdateOneWithoutOpportunitiesNestedInput
stage?: Prisma.OpportunityStageUpdateOneWithoutOpportunitiesNestedInput
status?: Prisma.OpportunityStatusUpdateOneWithoutOpportunitiesNestedInput
taxCode?: Prisma.TaxCodeUpdateOneWithoutOpportunitiesNestedInput
@@ -2386,7 +2386,7 @@ export type OpportunityUncheckedUpdateWithoutProductsInput = {
name?: Prisma.StringFieldUpdateOperationsInput | string
notes?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
oppNarrative?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
typeId?: Prisma.IntFieldUpdateOperationsInput | number
typeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
stageId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
statusId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
taxCodeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
@@ -2437,7 +2437,7 @@ export type OpportunityCreateWithoutStageInput = {
createdAt?: Date | string
updatedAt?: Date | string
generatedQuotes?: Prisma.GeneratedQuotesCreateNestedManyWithoutOpportunityInput
type: Prisma.OpportunityTypeCreateNestedOneWithoutOpportunitiesInput
type?: Prisma.OpportunityTypeCreateNestedOneWithoutOpportunitiesInput
status?: Prisma.OpportunityStatusCreateNestedOneWithoutOpportunitiesInput
taxCode?: Prisma.TaxCodeCreateNestedOneWithoutOpportunitiesInput
primarySalesRep?: Prisma.UserCreateNestedOneWithoutOpportunitiesInput
@@ -2456,7 +2456,7 @@ export type OpportunityUncheckedCreateWithoutStageInput = {
name: string
notes?: string | null
oppNarrative?: string | null
typeId: number
typeId?: number | null
statusId?: number | null
taxCodeId?: number | null
interest?: $Enums.OpportunityInterest | null
@@ -2629,7 +2629,7 @@ export type OpportunityCreateWithoutStatusInput = {
createdAt?: Date | string
updatedAt?: Date | string
generatedQuotes?: Prisma.GeneratedQuotesCreateNestedManyWithoutOpportunityInput
type: Prisma.OpportunityTypeCreateNestedOneWithoutOpportunitiesInput
type?: Prisma.OpportunityTypeCreateNestedOneWithoutOpportunitiesInput
stage?: Prisma.OpportunityStageCreateNestedOneWithoutOpportunitiesInput
taxCode?: Prisma.TaxCodeCreateNestedOneWithoutOpportunitiesInput
primarySalesRep?: Prisma.UserCreateNestedOneWithoutOpportunitiesInput
@@ -2648,7 +2648,7 @@ export type OpportunityUncheckedCreateWithoutStatusInput = {
name: string
notes?: string | null
oppNarrative?: string | null
typeId: number
typeId?: number | null
stageId?: number | null
taxCodeId?: number | null
interest?: $Enums.OpportunityInterest | null
@@ -2724,7 +2724,7 @@ export type OpportunityCreateWithoutGeneratedQuotesInput = {
eneteredBy: string
createdAt?: Date | string
updatedAt?: Date | string
type: Prisma.OpportunityTypeCreateNestedOneWithoutOpportunitiesInput
type?: Prisma.OpportunityTypeCreateNestedOneWithoutOpportunitiesInput
stage?: Prisma.OpportunityStageCreateNestedOneWithoutOpportunitiesInput
status?: Prisma.OpportunityStatusCreateNestedOneWithoutOpportunitiesInput
taxCode?: Prisma.TaxCodeCreateNestedOneWithoutOpportunitiesInput
@@ -2744,7 +2744,7 @@ export type OpportunityUncheckedCreateWithoutGeneratedQuotesInput = {
name: string
notes?: string | null
oppNarrative?: string | null
typeId: number
typeId?: number | null
stageId?: number | null
statusId?: number | null
taxCodeId?: number | null
@@ -2810,7 +2810,7 @@ export type OpportunityUpdateWithoutGeneratedQuotesInput = {
eneteredBy?: Prisma.StringFieldUpdateOperationsInput | string
createdAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string
updatedAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string
type?: Prisma.OpportunityTypeUpdateOneRequiredWithoutOpportunitiesNestedInput
type?: Prisma.OpportunityTypeUpdateOneWithoutOpportunitiesNestedInput
stage?: Prisma.OpportunityStageUpdateOneWithoutOpportunitiesNestedInput
status?: Prisma.OpportunityStatusUpdateOneWithoutOpportunitiesNestedInput
taxCode?: Prisma.TaxCodeUpdateOneWithoutOpportunitiesNestedInput
@@ -2830,7 +2830,7 @@ export type OpportunityUncheckedUpdateWithoutGeneratedQuotesInput = {
name?: Prisma.StringFieldUpdateOperationsInput | string
notes?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
oppNarrative?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
typeId?: Prisma.IntFieldUpdateOperationsInput | number
typeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
stageId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
statusId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
taxCodeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
@@ -2881,7 +2881,7 @@ export type OpportunityCreateWithoutTaxCodeInput = {
createdAt?: Date | string
updatedAt?: Date | string
generatedQuotes?: Prisma.GeneratedQuotesCreateNestedManyWithoutOpportunityInput
type: Prisma.OpportunityTypeCreateNestedOneWithoutOpportunitiesInput
type?: Prisma.OpportunityTypeCreateNestedOneWithoutOpportunitiesInput
stage?: Prisma.OpportunityStageCreateNestedOneWithoutOpportunitiesInput
status?: Prisma.OpportunityStatusCreateNestedOneWithoutOpportunitiesInput
primarySalesRep?: Prisma.UserCreateNestedOneWithoutOpportunitiesInput
@@ -2900,7 +2900,7 @@ export type OpportunityUncheckedCreateWithoutTaxCodeInput = {
name: string
notes?: string | null
oppNarrative?: string | null
typeId: number
typeId?: number | null
stageId?: number | null
statusId?: number | null
interest?: $Enums.OpportunityInterest | null
@@ -2961,7 +2961,7 @@ export type OpportunityCreateManyPrimarySalesRepInput = {
name: string
notes?: string | null
oppNarrative?: string | null
typeId: number
typeId?: number | null
stageId?: number | null
statusId?: number | null
taxCodeId?: number | null
@@ -2994,7 +2994,7 @@ export type OpportunityCreateManySecondarySalesRepInput = {
name: string
notes?: string | null
oppNarrative?: string | null
typeId: number
typeId?: number | null
stageId?: number | null
statusId?: number | null
taxCodeId?: number | null
@@ -3043,7 +3043,7 @@ export type OpportunityUpdateWithoutPrimarySalesRepInput = {
createdAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string
updatedAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string
generatedQuotes?: Prisma.GeneratedQuotesUpdateManyWithoutOpportunityNestedInput
type?: Prisma.OpportunityTypeUpdateOneRequiredWithoutOpportunitiesNestedInput
type?: Prisma.OpportunityTypeUpdateOneWithoutOpportunitiesNestedInput
stage?: Prisma.OpportunityStageUpdateOneWithoutOpportunitiesNestedInput
status?: Prisma.OpportunityStatusUpdateOneWithoutOpportunitiesNestedInput
taxCode?: Prisma.TaxCodeUpdateOneWithoutOpportunitiesNestedInput
@@ -3062,7 +3062,7 @@ export type OpportunityUncheckedUpdateWithoutPrimarySalesRepInput = {
name?: Prisma.StringFieldUpdateOperationsInput | string
notes?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
oppNarrative?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
typeId?: Prisma.IntFieldUpdateOperationsInput | number
typeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
stageId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
statusId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
taxCodeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
@@ -3097,7 +3097,7 @@ export type OpportunityUncheckedUpdateManyWithoutPrimarySalesRepInput = {
name?: Prisma.StringFieldUpdateOperationsInput | string
notes?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
oppNarrative?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
typeId?: Prisma.IntFieldUpdateOperationsInput | number
typeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
stageId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
statusId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
taxCodeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
@@ -3146,7 +3146,7 @@ export type OpportunityUpdateWithoutSecondarySalesRepInput = {
createdAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string
updatedAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string
generatedQuotes?: Prisma.GeneratedQuotesUpdateManyWithoutOpportunityNestedInput
type?: Prisma.OpportunityTypeUpdateOneRequiredWithoutOpportunitiesNestedInput
type?: Prisma.OpportunityTypeUpdateOneWithoutOpportunitiesNestedInput
stage?: Prisma.OpportunityStageUpdateOneWithoutOpportunitiesNestedInput
status?: Prisma.OpportunityStatusUpdateOneWithoutOpportunitiesNestedInput
taxCode?: Prisma.TaxCodeUpdateOneWithoutOpportunitiesNestedInput
@@ -3165,7 +3165,7 @@ export type OpportunityUncheckedUpdateWithoutSecondarySalesRepInput = {
name?: Prisma.StringFieldUpdateOperationsInput | string
notes?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
oppNarrative?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
typeId?: Prisma.IntFieldUpdateOperationsInput | number
typeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
stageId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
statusId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
taxCodeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
@@ -3200,7 +3200,7 @@ export type OpportunityUncheckedUpdateManyWithoutSecondarySalesRepInput = {
name?: Prisma.StringFieldUpdateOperationsInput | string
notes?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
oppNarrative?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
typeId?: Prisma.IntFieldUpdateOperationsInput | number
typeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
stageId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
statusId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
taxCodeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
@@ -3233,7 +3233,7 @@ export type OpportunityCreateManyLocationInput = {
name: string
notes?: string | null
oppNarrative?: string | null
typeId: number
typeId?: number | null
stageId?: number | null
statusId?: number | null
taxCodeId?: number | null
@@ -3282,7 +3282,7 @@ export type OpportunityUpdateWithoutLocationInput = {
createdAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string
updatedAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string
generatedQuotes?: Prisma.GeneratedQuotesUpdateManyWithoutOpportunityNestedInput
type?: Prisma.OpportunityTypeUpdateOneRequiredWithoutOpportunitiesNestedInput
type?: Prisma.OpportunityTypeUpdateOneWithoutOpportunitiesNestedInput
stage?: Prisma.OpportunityStageUpdateOneWithoutOpportunitiesNestedInput
status?: Prisma.OpportunityStatusUpdateOneWithoutOpportunitiesNestedInput
taxCode?: Prisma.TaxCodeUpdateOneWithoutOpportunitiesNestedInput
@@ -3301,7 +3301,7 @@ export type OpportunityUncheckedUpdateWithoutLocationInput = {
name?: Prisma.StringFieldUpdateOperationsInput | string
notes?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
oppNarrative?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
typeId?: Prisma.IntFieldUpdateOperationsInput | number
typeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
stageId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
statusId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
taxCodeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
@@ -3336,7 +3336,7 @@ export type OpportunityUncheckedUpdateManyWithoutLocationInput = {
name?: Prisma.StringFieldUpdateOperationsInput | string
notes?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
oppNarrative?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
typeId?: Prisma.IntFieldUpdateOperationsInput | number
typeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
stageId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
statusId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
taxCodeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
@@ -3369,7 +3369,7 @@ export type OpportunityCreateManyDepartmentInput = {
name: string
notes?: string | null
oppNarrative?: string | null
typeId: number
typeId?: number | null
stageId?: number | null
statusId?: number | null
taxCodeId?: number | null
@@ -3418,7 +3418,7 @@ export type OpportunityUpdateWithoutDepartmentInput = {
createdAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string
updatedAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string
generatedQuotes?: Prisma.GeneratedQuotesUpdateManyWithoutOpportunityNestedInput
type?: Prisma.OpportunityTypeUpdateOneRequiredWithoutOpportunitiesNestedInput
type?: Prisma.OpportunityTypeUpdateOneWithoutOpportunitiesNestedInput
stage?: Prisma.OpportunityStageUpdateOneWithoutOpportunitiesNestedInput
status?: Prisma.OpportunityStatusUpdateOneWithoutOpportunitiesNestedInput
taxCode?: Prisma.TaxCodeUpdateOneWithoutOpportunitiesNestedInput
@@ -3437,7 +3437,7 @@ export type OpportunityUncheckedUpdateWithoutDepartmentInput = {
name?: Prisma.StringFieldUpdateOperationsInput | string
notes?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
oppNarrative?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
typeId?: Prisma.IntFieldUpdateOperationsInput | number
typeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
stageId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
statusId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
taxCodeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
@@ -3472,7 +3472,7 @@ export type OpportunityUncheckedUpdateManyWithoutDepartmentInput = {
name?: Prisma.StringFieldUpdateOperationsInput | string
notes?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
oppNarrative?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
typeId?: Prisma.IntFieldUpdateOperationsInput | number
typeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
stageId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
statusId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
taxCodeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
@@ -3505,7 +3505,7 @@ export type OpportunityCreateManyCompanyInput = {
name: string
notes?: string | null
oppNarrative?: string | null
typeId: number
typeId?: number | null
stageId?: number | null
statusId?: number | null
taxCodeId?: number | null
@@ -3554,7 +3554,7 @@ export type OpportunityUpdateWithoutCompanyInput = {
createdAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string
updatedAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string
generatedQuotes?: Prisma.GeneratedQuotesUpdateManyWithoutOpportunityNestedInput
type?: Prisma.OpportunityTypeUpdateOneRequiredWithoutOpportunitiesNestedInput
type?: Prisma.OpportunityTypeUpdateOneWithoutOpportunitiesNestedInput
stage?: Prisma.OpportunityStageUpdateOneWithoutOpportunitiesNestedInput
status?: Prisma.OpportunityStatusUpdateOneWithoutOpportunitiesNestedInput
taxCode?: Prisma.TaxCodeUpdateOneWithoutOpportunitiesNestedInput
@@ -3573,7 +3573,7 @@ export type OpportunityUncheckedUpdateWithoutCompanyInput = {
name?: Prisma.StringFieldUpdateOperationsInput | string
notes?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
oppNarrative?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
typeId?: Prisma.IntFieldUpdateOperationsInput | number
typeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
stageId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
statusId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
taxCodeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
@@ -3608,7 +3608,7 @@ export type OpportunityUncheckedUpdateManyWithoutCompanyInput = {
name?: Prisma.StringFieldUpdateOperationsInput | string
notes?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
oppNarrative?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
typeId?: Prisma.IntFieldUpdateOperationsInput | number
typeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
stageId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
statusId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
taxCodeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
@@ -3641,7 +3641,7 @@ export type OpportunityCreateManySiteInput = {
name: string
notes?: string | null
oppNarrative?: string | null
typeId: number
typeId?: number | null
stageId?: number | null
statusId?: number | null
taxCodeId?: number | null
@@ -3690,7 +3690,7 @@ export type OpportunityUpdateWithoutSiteInput = {
createdAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string
updatedAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string
generatedQuotes?: Prisma.GeneratedQuotesUpdateManyWithoutOpportunityNestedInput
type?: Prisma.OpportunityTypeUpdateOneRequiredWithoutOpportunitiesNestedInput
type?: Prisma.OpportunityTypeUpdateOneWithoutOpportunitiesNestedInput
stage?: Prisma.OpportunityStageUpdateOneWithoutOpportunitiesNestedInput
status?: Prisma.OpportunityStatusUpdateOneWithoutOpportunitiesNestedInput
taxCode?: Prisma.TaxCodeUpdateOneWithoutOpportunitiesNestedInput
@@ -3709,7 +3709,7 @@ export type OpportunityUncheckedUpdateWithoutSiteInput = {
name?: Prisma.StringFieldUpdateOperationsInput | string
notes?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
oppNarrative?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
typeId?: Prisma.IntFieldUpdateOperationsInput | number
typeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
stageId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
statusId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
taxCodeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
@@ -3744,7 +3744,7 @@ export type OpportunityUncheckedUpdateManyWithoutSiteInput = {
name?: Prisma.StringFieldUpdateOperationsInput | string
notes?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
oppNarrative?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
typeId?: Prisma.IntFieldUpdateOperationsInput | number
typeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
stageId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
statusId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
taxCodeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
@@ -3777,7 +3777,7 @@ export type OpportunityCreateManyContactInput = {
name: string
notes?: string | null
oppNarrative?: string | null
typeId: number
typeId?: number | null
stageId?: number | null
statusId?: number | null
taxCodeId?: number | null
@@ -3826,7 +3826,7 @@ export type OpportunityUpdateWithoutContactInput = {
createdAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string
updatedAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string
generatedQuotes?: Prisma.GeneratedQuotesUpdateManyWithoutOpportunityNestedInput
type?: Prisma.OpportunityTypeUpdateOneRequiredWithoutOpportunitiesNestedInput
type?: Prisma.OpportunityTypeUpdateOneWithoutOpportunitiesNestedInput
stage?: Prisma.OpportunityStageUpdateOneWithoutOpportunitiesNestedInput
status?: Prisma.OpportunityStatusUpdateOneWithoutOpportunitiesNestedInput
taxCode?: Prisma.TaxCodeUpdateOneWithoutOpportunitiesNestedInput
@@ -3845,7 +3845,7 @@ export type OpportunityUncheckedUpdateWithoutContactInput = {
name?: Prisma.StringFieldUpdateOperationsInput | string
notes?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
oppNarrative?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
typeId?: Prisma.IntFieldUpdateOperationsInput | number
typeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
stageId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
statusId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
taxCodeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
@@ -3880,7 +3880,7 @@ export type OpportunityUncheckedUpdateManyWithoutContactInput = {
name?: Prisma.StringFieldUpdateOperationsInput | string
notes?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
oppNarrative?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
typeId?: Prisma.IntFieldUpdateOperationsInput | number
typeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
stageId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
statusId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
taxCodeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
@@ -3913,7 +3913,7 @@ export type OpportunityCreateManyStageInput = {
name: string
notes?: string | null
oppNarrative?: string | null
typeId: number
typeId?: number | null
statusId?: number | null
taxCodeId?: number | null
interest?: $Enums.OpportunityInterest | null
@@ -3962,7 +3962,7 @@ export type OpportunityUpdateWithoutStageInput = {
createdAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string
updatedAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string
generatedQuotes?: Prisma.GeneratedQuotesUpdateManyWithoutOpportunityNestedInput
type?: Prisma.OpportunityTypeUpdateOneRequiredWithoutOpportunitiesNestedInput
type?: Prisma.OpportunityTypeUpdateOneWithoutOpportunitiesNestedInput
status?: Prisma.OpportunityStatusUpdateOneWithoutOpportunitiesNestedInput
taxCode?: Prisma.TaxCodeUpdateOneWithoutOpportunitiesNestedInput
primarySalesRep?: Prisma.UserUpdateOneWithoutOpportunitiesNestedInput
@@ -3981,7 +3981,7 @@ export type OpportunityUncheckedUpdateWithoutStageInput = {
name?: Prisma.StringFieldUpdateOperationsInput | string
notes?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
oppNarrative?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
typeId?: Prisma.IntFieldUpdateOperationsInput | number
typeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
statusId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
taxCodeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
interest?: Prisma.NullableEnumOpportunityInterestFieldUpdateOperationsInput | $Enums.OpportunityInterest | null
@@ -4016,7 +4016,7 @@ export type OpportunityUncheckedUpdateManyWithoutStageInput = {
name?: Prisma.StringFieldUpdateOperationsInput | string
notes?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
oppNarrative?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
typeId?: Prisma.IntFieldUpdateOperationsInput | number
typeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
statusId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
taxCodeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
interest?: Prisma.NullableEnumOpportunityInterestFieldUpdateOperationsInput | $Enums.OpportunityInterest | null
@@ -4185,7 +4185,7 @@ export type OpportunityCreateManyStatusInput = {
name: string
notes?: string | null
oppNarrative?: string | null
typeId: number
typeId?: number | null
stageId?: number | null
taxCodeId?: number | null
interest?: $Enums.OpportunityInterest | null
@@ -4234,7 +4234,7 @@ export type OpportunityUpdateWithoutStatusInput = {
createdAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string
updatedAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string
generatedQuotes?: Prisma.GeneratedQuotesUpdateManyWithoutOpportunityNestedInput
type?: Prisma.OpportunityTypeUpdateOneRequiredWithoutOpportunitiesNestedInput
type?: Prisma.OpportunityTypeUpdateOneWithoutOpportunitiesNestedInput
stage?: Prisma.OpportunityStageUpdateOneWithoutOpportunitiesNestedInput
taxCode?: Prisma.TaxCodeUpdateOneWithoutOpportunitiesNestedInput
primarySalesRep?: Prisma.UserUpdateOneWithoutOpportunitiesNestedInput
@@ -4253,7 +4253,7 @@ export type OpportunityUncheckedUpdateWithoutStatusInput = {
name?: Prisma.StringFieldUpdateOperationsInput | string
notes?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
oppNarrative?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
typeId?: Prisma.IntFieldUpdateOperationsInput | number
typeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
stageId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
taxCodeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
interest?: Prisma.NullableEnumOpportunityInterestFieldUpdateOperationsInput | $Enums.OpportunityInterest | null
@@ -4288,7 +4288,7 @@ export type OpportunityUncheckedUpdateManyWithoutStatusInput = {
name?: Prisma.StringFieldUpdateOperationsInput | string
notes?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
oppNarrative?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
typeId?: Prisma.IntFieldUpdateOperationsInput | number
typeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
stageId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
taxCodeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
interest?: Prisma.NullableEnumOpportunityInterestFieldUpdateOperationsInput | $Enums.OpportunityInterest | null
@@ -4321,7 +4321,7 @@ export type OpportunityCreateManyTaxCodeInput = {
name: string
notes?: string | null
oppNarrative?: string | null
typeId: number
typeId?: number | null
stageId?: number | null
statusId?: number | null
interest?: $Enums.OpportunityInterest | null
@@ -4370,7 +4370,7 @@ export type OpportunityUpdateWithoutTaxCodeInput = {
createdAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string
updatedAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string
generatedQuotes?: Prisma.GeneratedQuotesUpdateManyWithoutOpportunityNestedInput
type?: Prisma.OpportunityTypeUpdateOneRequiredWithoutOpportunitiesNestedInput
type?: Prisma.OpportunityTypeUpdateOneWithoutOpportunitiesNestedInput
stage?: Prisma.OpportunityStageUpdateOneWithoutOpportunitiesNestedInput
status?: Prisma.OpportunityStatusUpdateOneWithoutOpportunitiesNestedInput
primarySalesRep?: Prisma.UserUpdateOneWithoutOpportunitiesNestedInput
@@ -4389,7 +4389,7 @@ export type OpportunityUncheckedUpdateWithoutTaxCodeInput = {
name?: Prisma.StringFieldUpdateOperationsInput | string
notes?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
oppNarrative?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
typeId?: Prisma.IntFieldUpdateOperationsInput | number
typeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
stageId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
statusId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
interest?: Prisma.NullableEnumOpportunityInterestFieldUpdateOperationsInput | $Enums.OpportunityInterest | null
@@ -4424,7 +4424,7 @@ export type OpportunityUncheckedUpdateManyWithoutTaxCodeInput = {
name?: Prisma.StringFieldUpdateOperationsInput | string
notes?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
oppNarrative?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
typeId?: Prisma.IntFieldUpdateOperationsInput | number
typeId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
stageId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
statusId?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
interest?: Prisma.NullableEnumOpportunityInterestFieldUpdateOperationsInput | $Enums.OpportunityInterest | null
@@ -4524,7 +4524,7 @@ export type OpportunitySelect<ExtArgs extends runtime.Types.Extensions.InternalA
createdAt?: boolean
updatedAt?: boolean
generatedQuotes?: boolean | Prisma.Opportunity$generatedQuotesArgs<ExtArgs>
type?: boolean | Prisma.OpportunityTypeDefaultArgs<ExtArgs>
type?: boolean | Prisma.Opportunity$typeArgs<ExtArgs>
stage?: boolean | Prisma.Opportunity$stageArgs<ExtArgs>
status?: boolean | Prisma.Opportunity$statusArgs<ExtArgs>
taxCode?: boolean | Prisma.Opportunity$taxCodeArgs<ExtArgs>
@@ -4571,7 +4571,7 @@ export type OpportunitySelectCreateManyAndReturn<ExtArgs extends runtime.Types.E
eneteredBy?: boolean
createdAt?: boolean
updatedAt?: boolean
type?: boolean | Prisma.OpportunityTypeDefaultArgs<ExtArgs>
type?: boolean | Prisma.Opportunity$typeArgs<ExtArgs>
stage?: boolean | Prisma.Opportunity$stageArgs<ExtArgs>
status?: boolean | Prisma.Opportunity$statusArgs<ExtArgs>
taxCode?: boolean | Prisma.Opportunity$taxCodeArgs<ExtArgs>
@@ -4616,7 +4616,7 @@ export type OpportunitySelectUpdateManyAndReturn<ExtArgs extends runtime.Types.E
eneteredBy?: boolean
createdAt?: boolean
updatedAt?: boolean
type?: boolean | Prisma.OpportunityTypeDefaultArgs<ExtArgs>
type?: boolean | Prisma.Opportunity$typeArgs<ExtArgs>
stage?: boolean | Prisma.Opportunity$stageArgs<ExtArgs>
status?: boolean | Prisma.Opportunity$statusArgs<ExtArgs>
taxCode?: boolean | Prisma.Opportunity$taxCodeArgs<ExtArgs>
@@ -4666,7 +4666,7 @@ export type OpportunitySelectScalar = {
export type OpportunityOmit<ExtArgs extends runtime.Types.Extensions.InternalArgs = runtime.Types.Extensions.DefaultArgs> = runtime.Types.Extensions.GetOmit<"id" | "uid" | "name" | "notes" | "oppNarrative" | "typeId" | "stageId" | "statusId" | "taxCodeId" | "interest" | "probability" | "source" | "primarySalesRepId" | "secondarySalesRepId" | "companyId" | "contactId" | "siteId" | "customerPO" | "locationId" | "departmentId" | "expectedCloseDate" | "pipelineChangeDate" | "dateBecameLead" | "closedDate" | "closedFlag" | "closedById" | "productSequence" | "updatedBy" | "eneteredBy" | "createdAt" | "updatedAt", ExtArgs["result"]["opportunity"]>
export type OpportunityInclude<ExtArgs extends runtime.Types.Extensions.InternalArgs = runtime.Types.Extensions.DefaultArgs> = {
generatedQuotes?: boolean | Prisma.Opportunity$generatedQuotesArgs<ExtArgs>
type?: boolean | Prisma.OpportunityTypeDefaultArgs<ExtArgs>
type?: boolean | Prisma.Opportunity$typeArgs<ExtArgs>
stage?: boolean | Prisma.Opportunity$stageArgs<ExtArgs>
status?: boolean | Prisma.Opportunity$statusArgs<ExtArgs>
taxCode?: boolean | Prisma.Opportunity$taxCodeArgs<ExtArgs>
@@ -4681,7 +4681,7 @@ export type OpportunityInclude<ExtArgs extends runtime.Types.Extensions.Internal
_count?: boolean | Prisma.OpportunityCountOutputTypeDefaultArgs<ExtArgs>
}
export type OpportunityIncludeCreateManyAndReturn<ExtArgs extends runtime.Types.Extensions.InternalArgs = runtime.Types.Extensions.DefaultArgs> = {
type?: boolean | Prisma.OpportunityTypeDefaultArgs<ExtArgs>
type?: boolean | Prisma.Opportunity$typeArgs<ExtArgs>
stage?: boolean | Prisma.Opportunity$stageArgs<ExtArgs>
status?: boolean | Prisma.Opportunity$statusArgs<ExtArgs>
taxCode?: boolean | Prisma.Opportunity$taxCodeArgs<ExtArgs>
@@ -4694,7 +4694,7 @@ export type OpportunityIncludeCreateManyAndReturn<ExtArgs extends runtime.Types.
department?: boolean | Prisma.Opportunity$departmentArgs<ExtArgs>
}
export type OpportunityIncludeUpdateManyAndReturn<ExtArgs extends runtime.Types.Extensions.InternalArgs = runtime.Types.Extensions.DefaultArgs> = {
type?: boolean | Prisma.OpportunityTypeDefaultArgs<ExtArgs>
type?: boolean | Prisma.Opportunity$typeArgs<ExtArgs>
stage?: boolean | Prisma.Opportunity$stageArgs<ExtArgs>
status?: boolean | Prisma.Opportunity$statusArgs<ExtArgs>
taxCode?: boolean | Prisma.Opportunity$taxCodeArgs<ExtArgs>
@@ -4711,7 +4711,7 @@ export type $OpportunityPayload<ExtArgs extends runtime.Types.Extensions.Interna
name: "Opportunity"
objects: {
generatedQuotes: Prisma.$GeneratedQuotesPayload<ExtArgs>[]
type: Prisma.$OpportunityTypePayload<ExtArgs>
type: Prisma.$OpportunityTypePayload<ExtArgs> | null
stage: Prisma.$OpportunityStagePayload<ExtArgs> | null
status: Prisma.$OpportunityStatusPayload<ExtArgs> | null
taxCode: Prisma.$TaxCodePayload<ExtArgs> | null
@@ -4730,7 +4730,7 @@ export type $OpportunityPayload<ExtArgs extends runtime.Types.Extensions.Interna
name: string
notes: string | null
oppNarrative: string | null
typeId: number
typeId: number | null
stageId: number | null
statusId: number | null
taxCodeId: number | null
@@ -5151,7 +5151,7 @@ readonly fields: OpportunityFieldRefs;
export interface Prisma__OpportunityClient<T, Null = never, ExtArgs extends runtime.Types.Extensions.InternalArgs = runtime.Types.Extensions.DefaultArgs, GlobalOmitOptions = {}> extends Prisma.PrismaPromise<T> {
readonly [Symbol.toStringTag]: "PrismaPromise"
generatedQuotes<T extends Prisma.Opportunity$generatedQuotesArgs<ExtArgs> = {}>(args?: Prisma.Subset<T, Prisma.Opportunity$generatedQuotesArgs<ExtArgs>>): Prisma.PrismaPromise<runtime.Types.Result.GetResult<Prisma.$GeneratedQuotesPayload<ExtArgs>, T, "findMany", GlobalOmitOptions> | Null>
type<T extends Prisma.OpportunityTypeDefaultArgs<ExtArgs> = {}>(args?: Prisma.Subset<T, Prisma.OpportunityTypeDefaultArgs<ExtArgs>>): Prisma.Prisma__OpportunityTypeClient<runtime.Types.Result.GetResult<Prisma.$OpportunityTypePayload<ExtArgs>, T, "findUniqueOrThrow", GlobalOmitOptions> | Null, Null, ExtArgs, GlobalOmitOptions>
type<T extends Prisma.Opportunity$typeArgs<ExtArgs> = {}>(args?: Prisma.Subset<T, Prisma.Opportunity$typeArgs<ExtArgs>>): Prisma.Prisma__OpportunityTypeClient<runtime.Types.Result.GetResult<Prisma.$OpportunityTypePayload<ExtArgs>, T, "findUniqueOrThrow", GlobalOmitOptions> | null, null, ExtArgs, GlobalOmitOptions>
stage<T extends Prisma.Opportunity$stageArgs<ExtArgs> = {}>(args?: Prisma.Subset<T, Prisma.Opportunity$stageArgs<ExtArgs>>): Prisma.Prisma__OpportunityStageClient<runtime.Types.Result.GetResult<Prisma.$OpportunityStagePayload<ExtArgs>, T, "findUniqueOrThrow", GlobalOmitOptions> | null, null, ExtArgs, GlobalOmitOptions>
status<T extends Prisma.Opportunity$statusArgs<ExtArgs> = {}>(args?: Prisma.Subset<T, Prisma.Opportunity$statusArgs<ExtArgs>>): Prisma.Prisma__OpportunityStatusClient<runtime.Types.Result.GetResult<Prisma.$OpportunityStatusPayload<ExtArgs>, T, "findUniqueOrThrow", GlobalOmitOptions> | null, null, ExtArgs, GlobalOmitOptions>
taxCode<T extends Prisma.Opportunity$taxCodeArgs<ExtArgs> = {}>(args?: Prisma.Subset<T, Prisma.Opportunity$taxCodeArgs<ExtArgs>>): Prisma.Prisma__TaxCodeClient<runtime.Types.Result.GetResult<Prisma.$TaxCodePayload<ExtArgs>, T, "findUniqueOrThrow", GlobalOmitOptions> | null, null, ExtArgs, GlobalOmitOptions>
@@ -5647,6 +5647,25 @@ export type Opportunity$generatedQuotesArgs<ExtArgs extends runtime.Types.Extens
distinct?: Prisma.GeneratedQuotesScalarFieldEnum | Prisma.GeneratedQuotesScalarFieldEnum[]
}
/**
* Opportunity.type
*/
export type Opportunity$typeArgs<ExtArgs extends runtime.Types.Extensions.InternalArgs = runtime.Types.Extensions.DefaultArgs> = {
/**
* Select specific fields to fetch from the OpportunityType
*/
select?: Prisma.OpportunityTypeSelect<ExtArgs> | null
/**
* Omit specific fields from the OpportunityType
*/
omit?: Prisma.OpportunityTypeOmit<ExtArgs> | null
/**
* Choose, which related nodes to fetch as well
*/
include?: Prisma.OpportunityTypeInclude<ExtArgs> | null
where?: Prisma.OpportunityTypeWhereInput
}
/**
* Opportunity.stage
*/
+5
View File
@@ -20,6 +20,11 @@ spec:
env:
- name: MANAGER_SOCKET_URL
value: "http://optima-api.optima.svc.cluster.local:8671"
- name: API_DATABASE_URL
valueFrom:
secretKeyRef:
name: api-env-secret
key: DATABASE_URL
envFrom:
- secretRef:
name: api-env-secret
+3 -5
View File
@@ -1,11 +1,9 @@
import { defineConfig, env } from 'prisma/config'
export default defineConfig({
export default {
schema: 'prisma/schema.prisma',
migrations: {
path: 'prisma/migrations',
},
datasource: {
url: env('DATABASE_URL'),
url: process.env.DATABASE_URL,
},
})
}
+5 -2
View File
@@ -26,8 +26,11 @@ while [ $ATTEMPT -lt $MAX_RETRIES ]; do
# P3009: a previously-failed migration is blocking deploy.
# The error message contains the migration name in backticks:
# The `20260402000000_fix_severity_typo` migration started at ... failed
if echo "$DEPLOY_OUTPUT" | grep -q "P3009"; then
FAILED=$(echo "$DEPLOY_OUTPUT" | grep -oE '\`[0-9]{14}(_[a-zA-Z_]+)?\`' | tr -d '\`' | head -1)
# Strip ANSI escape codes first (Prisma may colorize output even without TTY),
# then use a simple backtick-content regex rather than a rigid format match.
CLEAN_OUTPUT=$(printf '%s\n' "$DEPLOY_OUTPUT" | sed 's/\x1b\[[0-9;]*[mGKHFJr]//g')
if printf '%s\n' "$CLEAN_OUTPUT" | grep -q "P3009"; then
FAILED=$(printf '%s\n' "$CLEAN_OUTPUT" | grep -o '`[^`]*`' | grep '[0-9]' | tr -d '`' | head -1)
if [ -n "$FAILED" ]; then
echo "[migrate] Resolving failed migration as rolled-back: $FAILED"
RESOLVE_OUTPUT=""
@@ -210,6 +210,8 @@ CREATE UNIQUE INDEX IF NOT EXISTS "CatalogItem_id_key" ON "CatalogItem"("id");
-- =============================================================================
-- SECTION 4: Company — change id TEXT→INTEGER, add uid PK, add columns
-- Production has ~4500 rows with CUID text PKs and cw_CompanyId integers
-- that must be preserved as uid and id respectively.
-- =============================================================================
-- Drop FKs that reference Company by old id
@@ -229,12 +231,18 @@ DROP INDEX IF EXISTS "Company_cw_CompanyId_key";
DROP INDEX IF EXISTS "Company_cw_Identifier_key";
DO $$ BEGIN
-- Add uid PK column if missing
-- Step 1: Add uid as NULLABLE (no default) so existing rows stay NULL temporarily
IF NOT EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'Company' AND column_name = 'uid') THEN
ALTER TABLE "Company" ADD COLUMN "uid" TEXT NOT NULL DEFAULT '';
ALTER TABLE "Company" ADD COLUMN "uid" TEXT;
END IF;
-- Swap PK from id to uid
-- Step 2: Populate uid from the old text PK (old id was a CUID — it becomes uid)
UPDATE "Company" SET "uid" = "id" WHERE "uid" IS NULL;
-- Step 3: Now make uid NOT NULL (all rows are populated)
ALTER TABLE "Company" ALTER COLUMN "uid" SET NOT NULL;
-- Step 4: Swap PK from id (text) to uid (text)
IF EXISTS (
SELECT 1 FROM information_schema.table_constraints tc
JOIN information_schema.key_column_usage kcu ON tc.constraint_name = kcu.constraint_name
@@ -244,7 +252,8 @@ DO $$ BEGIN
ALTER TABLE "Company" ADD CONSTRAINT "Company_pkey" PRIMARY KEY ("uid");
END IF;
-- Change id from TEXT to INTEGER
-- Step 5: Change id from TEXT to INTEGER
-- NOTE: do this BEFORE dropping cw_CompanyId so we can populate from it below
IF EXISTS (
SELECT 1 FROM information_schema.columns
WHERE table_name = 'Company' AND column_name = 'id' AND data_type = 'text'
@@ -253,7 +262,12 @@ DO $$ BEGIN
ALTER TABLE "Company" ADD COLUMN "id" INTEGER;
END IF;
-- Drop old CW-specific columns
-- Step 6: Populate new integer id from cw_CompanyId (CW integer company id)
IF EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'Company' AND column_name = 'cw_CompanyId') THEN
UPDATE "Company" SET "id" = "cw_CompanyId" WHERE "id" IS NULL;
END IF;
-- Step 7: Drop old CW-specific columns (data now in id and uid)
IF EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'Company' AND column_name = 'cw_CompanyId') THEN
ALTER TABLE "Company" DROP COLUMN "cw_CompanyId";
END IF;
@@ -261,7 +275,7 @@ DO $$ BEGIN
ALTER TABLE "Company" DROP COLUMN "cw_Identifier";
END IF;
-- Add new columns
-- Step 8: Add new columns
IF NOT EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'Company' AND column_name = 'dateDeleted') THEN
ALTER TABLE "Company" ADD COLUMN "dateDeleted" TIMESTAMP(3);
END IF;
@@ -291,10 +305,15 @@ DO $$ BEGIN
END IF;
END $$;
-- Make Company.id NOT NULL (all rows were populated from cw_CompanyId above)
ALTER TABLE "Company" ALTER COLUMN "id" SET NOT NULL;
CREATE UNIQUE INDEX IF NOT EXISTS "Company_id_key" ON "Company"("id");
-- =============================================================================
-- SECTION 5: UnifiSite — change companyId from TEXT to INTEGER
-- SECTION 5: UnifiSite — change companyId from TEXT to INTEGER (data migration)
-- Production has ~180 rows where companyId (text) = Company.uid (the old text
-- PK that was copied into uid in Section 4). We join on Company.uid to get
-- the new integer Company.id and preserve the relationship.
-- =============================================================================
DO $$ BEGIN
@@ -302,8 +321,24 @@ DO $$ BEGIN
SELECT 1 FROM information_schema.columns
WHERE table_name = 'UnifiSite' AND column_name = 'companyId' AND data_type = 'text'
) THEN
-- Add a temporary integer column to hold the mapped value
IF NOT EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'UnifiSite' AND column_name = 'companyId_int') THEN
ALTER TABLE "UnifiSite" ADD COLUMN "companyId_int" INTEGER;
END IF;
-- Map old text companyId (= Company.uid) → new integer Company.id
UPDATE "UnifiSite" us
SET "companyId_int" = c."id"
FROM "Company" c
WHERE c."uid" = us."companyId";
-- Replace old text column with the populated integer column
ALTER TABLE "UnifiSite" DROP COLUMN "companyId";
ALTER TABLE "UnifiSite" ADD COLUMN "companyId" INTEGER;
ALTER TABLE "UnifiSite" RENAME COLUMN "companyId_int" TO "companyId";
ELSIF EXISTS (
SELECT 1 FROM information_schema.columns
WHERE table_name = 'UnifiSite' AND column_name = 'companyId_int'
) THEN
-- Edge case: int column added but not renamed (interrupted previous run)
ALTER TABLE "UnifiSite" RENAME COLUMN "companyId_int" TO "companyId";
END IF;
END $$;
@@ -1467,7 +1502,8 @@ ALTER TABLE "CatalogItem" ALTER COLUMN "uid" DROP DEFAULT;
ALTER TABLE "CatalogItem" ALTER COLUMN "id" SET NOT NULL;
ALTER TABLE "CatalogItem" ALTER COLUMN "subcategoryId" DROP DEFAULT;
-- Company: drop defaults, enforce NOT NULL
-- Company: uid was added nullable (no default), id was made NOT NULL in Section 4.
-- These are no-ops but kept for safety on fresh DBs.
ALTER TABLE "Company" ALTER COLUMN "uid" DROP DEFAULT;
ALTER TABLE "Company" ALTER COLUMN "id" SET NOT NULL;
+2 -2
View File
@@ -1032,8 +1032,8 @@ model Opportunity {
generatedQuotes GeneratedQuotes[]
typeId Int
type OpportunityType @relation(fields: [typeId], references: [id])
typeId Int?
type OpportunityType? @relation(fields: [typeId], references: [id])
stageId Int?
stage OpportunityStage? @relation(fields: [stageId], references: [id])
+30 -17
View File
@@ -4,6 +4,7 @@ import { companies } from "../../../managers/companies";
import { apiResponse } from "../../../modules/api-utils/apiResponse";
import { ContentfulStatusCode } from "hono/utils/http-status";
import { authMiddleware } from "../../middleware/authorization";
import GenericError from "../../../Errors/GenericError";
import { processObjectValuePerms } from "../../../modules/permission-utils/processObjectPermissions";
/* /v1/company/companies/[id] */
@@ -12,18 +13,37 @@ export default createRoute(
["/companies/:identifier"],
async (c) => {
const company = await companies.fetch(c.req.param("identifier"));
const user = c.get("user");
const includeAddress =
c.req.query("includeAddress") === "true" &&
!!user &&
(await user.hasPermission("company.fetch.address"));
const company = await companies.fetch(c.req.param("identifier") as string);
const includeAddress = c.req.query("includeAddress") === "true";
const includePrimaryContact =
c.req.query("includePrimaryContact") === "true";
const includeAllContacts =
c.req.query("includeAllContacts") === "true" &&
!!user &&
(await user.hasPermission("company.fetch.contacts"));
const includeAllContacts = c.req.query("includeAllContacts") === "true";
console.log(company.toJson({ includeAddress, includePrimaryContact, includeAllContacts }));
// Check for address-specific permission if includeAddress is requested
if (includeAddress) {
const user = c.get("user");
if (!user || !(await user.hasPermission("company.fetch.address"))) {
throw new GenericError({
name: "InsufficientPermission",
message: "You do not have permission to view company addresses.",
status: 403,
});
}
}
// Check for contacts permission if includeAllContacts is requested
if (includeAllContacts) {
const user = c.get("user");
if (!user || !(await user.hasPermission("company.fetch.contacts"))) {
throw new GenericError({
name: "InsufficientPermission",
message: "You do not have permission to view company contacts.",
status: 403,
});
}
}
const companyData = company.toJson({
includeAddress,
@@ -36,13 +56,6 @@ export default createRoute(
c.get("user"),
);
// cw_Data fields were already gated by the explicit permission checks above
// (company.fetch.contacts / company.fetch.address). Re-attach them so they
// are not silently dropped by field-level gating on obj.company.cw_Data.
if (companyData.cw_Data && Object.keys(companyData.cw_Data).length > 0) {
(gatedData as any).cw_Data = companyData.cw_Data;
}
const response = apiResponse.successful(
"Company Fetched Successfully!",
gatedData,
+1 -1
View File
@@ -2,7 +2,7 @@ import { createRoute } from "../../modules/api-utils/createRoute";
import { apiResponse } from "../../modules/api-utils/apiResponse";
import { ContentfulStatusCode } from "hono/utils/http-status";
import { authMiddleware } from "../middleware/authorization";
import { getBoss } from "../../workert";
import { getBoss } from "../../boss-instance";
import { WorkerQueue } from "../../modules/workers/queues";
/* POST /v1/cw/sync/full */
+30
View File
@@ -0,0 +1,30 @@
/**
* Shared PgBoss singleton — kept in its own module to break circular imports
* between workert.ts and the worker modules that call getBoss().
*/
import { PgBoss } from "pg-boss";
function makePgBossUrl(rawUrl: string): string {
try {
const u = new URL(rawUrl);
// 30-second statement timeout to prevent individual SQL queries from
// hanging indefinitely if the DB server stops responding mid-query.
u.searchParams.set("options", "-c statement_timeout=30000");
return u.toString();
} catch {
return rawUrl;
}
}
export const boss = new PgBoss({
connectionString: makePgBossUrl(process.env.DATABASE_URL!),
connectionTimeoutMillis: 15_000,
});
boss.on("error", (err) => {
console.error("[worker] PgBoss error", err);
});
export function getBoss(): PgBoss {
return boss;
}
+34 -9
View File
@@ -274,7 +274,7 @@ export class CompanyController {
(ci) => ci.type?.name === "Email"
);
return {
id: contact.id,
cwId: contact.id,
firstName: contact.firstName,
lastName: contact.lastName,
inactive: contact.inactiveFlag ?? false,
@@ -303,16 +303,41 @@ export class CompanyController {
};
}
if (opts?.includePrimaryContact && this.cw_Data?.defaultContact) {
cw_Data.primaryContact = this._serializeContact(
this.cw_Data.defaultContact
);
if (opts?.includePrimaryContact) {
if (this.cw_Data?.defaultContact) {
cw_Data.primaryContact = this._serializeContact(
this.cw_Data.defaultContact
);
} else if (this._defaultContact) {
const c = this._defaultContact;
cw_Data.primaryContact = {
cwId: c.id,
firstName: c.firstName,
lastName: c.lastName,
inactive: !c.active,
title: c.title ?? null,
phone: c.phone ?? null,
email: c.email ?? null,
};
}
}
if (opts?.includeAllContacts && this.cw_Data?.allContacts) {
cw_Data.allContacts = this.cw_Data.allContacts.map((c) =>
this._serializeContact(c)
);
if (opts?.includeAllContacts) {
if (this.cw_Data?.allContacts) {
cw_Data.allContacts = this.cw_Data.allContacts.map((c) =>
this._serializeContact(c)
);
} else if (this._contacts.length > 0) {
cw_Data.allContacts = this._contacts.map((c) => ({
cwId: c.id,
firstName: c.firstName,
lastName: c.lastName,
inactive: !c.active,
title: c.title ?? null,
phone: c.phone ?? null,
email: c.email ?? null,
}));
}
}
return {
+16 -2
View File
@@ -76,6 +76,20 @@ function mapRatingNameToInterest(
return null;
}
function formatOpportunityContactName(
firstName?: string | null,
lastName?: string | null
): string {
const first = (firstName ?? "").trim();
const last = (lastName ?? "").trim();
if (first && last.toLowerCase() === "contact") {
return first;
}
return `${first} ${last}`.trim();
}
/**
* Opportunity Controller
*
@@ -290,7 +304,7 @@ export class OpportunityController {
| null
| undefined;
this.contactName = (data as any).contactName ?? (contactRel
? `${contactRel.firstName} ${contactRel.lastName}`.trim()
? formatOpportunityContactName(contactRel.firstName, contactRel.lastName)
: null);
// Site
@@ -674,7 +688,7 @@ export class OpportunityController {
id: contact.id,
contact: {
id: contact.id,
name: `${contact.firstName} ${contact.lastName}`.trim(),
name: formatOpportunityContactName(contact.firstName, contact.lastName),
},
company: contact.company
? {
+2 -1
View File
@@ -6,7 +6,8 @@ import { events } from "./modules/globalEvents";
import { setupEventDebugger } from "./modules/logging/eventDebugger";
import { signPermissions } from "./modules/permission-utils/signPermissions";
import { RoleController } from "./controllers/RoleController";
import { initializeWorkerSystem, getBoss } from "./workert";
import { initializeWorkerSystem } from "./workert";
import { getBoss } from "./boss-instance";
import { WorkerQueue } from "./modules/workers/queues";
import { enqueueIncrementalSync } from "./modules/workers/incremental-sync";
import { startCommsServer } from "./modules/workers/coms";
+6 -1
View File
@@ -46,7 +46,7 @@ export const opportunities = {
// Resolve optional local FKs — nullify any that don't exist locally yet
// (the sync may be behind; these are all nullable in the schema)
const [companyExists, contactExists, siteExists] = await Promise.all([
const [companyExists, contactExists, siteExists, typeExists] = await Promise.all([
cwData.company?.id
? prisma.company.findFirst({ where: { id: cwData.company.id }, select: { id: true } })
: null,
@@ -56,16 +56,21 @@ export const opportunities = {
mapped.siteId != null
? prisma.companyAddress.findFirst({ where: { id: mapped.siteId }, select: { id: true } })
: null,
mapped.typeId != null
? prisma.opportunityType.findFirst({ where: { id: mapped.typeId }, select: { id: true } })
: null,
]);
const companyId = companyExists?.id ?? null;
const contactId = contactExists?.id ?? null;
const siteId = siteExists?.id ?? null;
const typeId = typeExists?.id ?? null;
const record = await prisma.opportunity.create({
data: {
id: cwData.id,
...mapped,
typeId,
companyId,
contactId,
siteId,
+21 -2
View File
@@ -1,5 +1,5 @@
import PdfPrinter from "pdfmake/src/Printer";
import { readFileSync } from "node:fs";
import { existsSync, readFileSync } from "node:fs";
import { join } from "node:path";
export interface QuoteLineItem {
@@ -110,7 +110,26 @@ const COMPANY = {
const DEFAULT_LOGO_PATH = join(process.cwd(), "logo.png");
const fontDir = join(process.cwd(), "node_modules/pdfmake/build/fonts/Roboto");
function resolveRobotoFontDir(): string {
const candidates = [
join(process.cwd(), "node_modules/pdfmake/build/fonts/Roboto"),
join(import.meta.dir, "../../../node_modules/pdfmake/build/fonts/Roboto"),
join("/app/node_modules/pdfmake/build/fonts/Roboto"),
join("/app/api/node_modules/pdfmake/build/fonts/Roboto"),
];
for (const dir of candidates) {
if (existsSync(join(dir, "Roboto-Medium.ttf"))) {
return dir;
}
}
throw new Error(
`[pdf] Could not locate pdfmake Roboto fonts. Checked: ${candidates.join(", ")}`
);
}
const fontDir = resolveRobotoFontDir();
const fonts = {
Roboto: {
normal: join(fontDir, "Roboto-Regular.ttf"),
+1
View File
@@ -1,6 +1,7 @@
import { Server } from "socket.io";
import { events, EventTypes } from "../globalEvents";
import { WorkerQueue } from "./queues";
import { reserveWorkerId } from "../../workert";
function emitGlobalEvent<K extends keyof EventTypes>(
name: K,
+50 -1
View File
@@ -1,5 +1,6 @@
import { Socket } from "socket.io-client";
import { executeFullDalpuriSync, executeForcedIncrementalDalpuriSync } from "dalpuri";
import { prisma } from "../../constants";
/**
* Execute a full sync from Dalpuri (ConnectWise) to the API database.
@@ -14,5 +15,53 @@ export async function executeFullSync(_workerSocket: Socket): Promise<void> {
* Called every 5 seconds via PgBoss from the API process interval.
*/
export async function executeIncrementalSync(): Promise<void> {
return executeForcedIncrementalDalpuriSync();
let jobRunId: string | undefined;
try {
const run = await prisma.syncJobRun.create({
data: {
jobType: "INCREMENTAL_SYNC",
status: "RUNNING",
triggeredBy: "worker",
startedAt: new Date(),
},
select: { id: true },
});
jobRunId = run.id;
} catch (err) {
// Sync should still run even if tracking insert fails.
console.error("[sync] Failed to create incremental SyncJobRun", err);
}
try {
await executeForcedIncrementalDalpuriSync({ jobRunId });
if (jobRunId) {
await prisma.syncJobRun.update({
where: { id: jobRunId },
data: {
status: "COMPLETED",
completedAt: new Date(),
},
});
}
} catch (err) {
if (jobRunId) {
const errorSummary = err instanceof Error ? err.message : String(err);
await prisma.syncJobRun
.update({
where: { id: jobRunId },
data: {
status: "FAILED",
completedAt: new Date(),
errorSummary: errorSummary.slice(0, 2000),
},
})
.catch(() => {
// Best-effort update only.
});
}
throw err;
}
}
+14 -2
View File
@@ -1,4 +1,4 @@
import { getBoss } from "../../workert";
import { getBoss } from "../../boss-instance";
import { WorkerQueue } from "./queues";
/**
@@ -6,5 +6,17 @@ import { WorkerQueue } from "./queues";
* Called on an interval from the main API process so it survives worker restarts.
*/
export async function enqueueIncrementalSync(): Promise<void> {
await getBoss().send(WorkerQueue.DALPURI_INCREMENTAL_SYNC, {});
const jobId = await getBoss().send(
WorkerQueue.DALPURI_INCREMENTAL_SYNC,
{
enqueuedAt: new Date().toISOString(),
},
{
singletonKey: "dalpuri-incremental-sync",
}
);
if (!jobId) {
console.debug("[interval] DALPURI_INCREMENTAL_SYNC already pending or active");
}
}
+3 -1
View File
@@ -32,6 +32,8 @@ export async function createWorkerJob<T>(
queueType: WorkerQueue,
workFn: (workerSocket: Socket) => Promise<T>,
): Promise<T> {
const managerUrl = process.env.MANAGER_SOCKET_URL ?? "http://localhost:8671";
return new Promise((resolve, reject) => {
// Request a worker ID and namespace from the manager
socket.emit(
@@ -53,7 +55,7 @@ export async function createWorkerJob<T>(
}
// Connect to the worker-specific namespace
const workerSocket = io(`http://localhost:8671/worker-${workerId}`, {
const workerSocket = io(`${managerUrl}/worker-${workerId}`, {
reconnection: false,
});
+62 -24
View File
@@ -1,13 +1,7 @@
import { PgBoss } from "pg-boss";
import { io, Socket } from "socket.io-client";
import { WorkerQueue } from "./modules/workers/queues";
import { setupEventDebugger } from "./modules/logging/eventDebugger";
const boss = new PgBoss(process.env.DATABASE_URL!);
boss.on("error", (err) => {
console.error("[worker] PgBoss error", err);
});
import { boss, getBoss } from "./boss-instance";
let bossStartPromise: Promise<void> | null = null;
let reservationQueueReady = false;
@@ -111,19 +105,25 @@ export async function reserveWorkerId(queueType: WorkerQueue): Promise<string> {
async function ensureDalpuriSyncQueue(): Promise<void> {
try {
console.log("[worker] Creating DALPURI_FULL_SYNC queue...");
await boss.createQueue(WorkerQueue.DALPURI_FULL_SYNC);
} catch {
// Queue may already exist; ignore to keep this idempotent.
console.log("[worker] DALPURI_FULL_SYNC queue ready");
} catch (err) {
console.log("[worker] DALPURI_FULL_SYNC queue already exists (or error):", (err as Error).message);
}
try {
console.log("[worker] Creating DALPURI_INCREMENTAL_SYNC queue...");
await boss.createQueue(WorkerQueue.DALPURI_INCREMENTAL_SYNC);
} catch {
// Queue may already exist; ignore to keep this idempotent.
console.log("[worker] DALPURI_INCREMENTAL_SYNC queue ready");
} catch (err) {
console.log("[worker] DALPURI_INCREMENTAL_SYNC queue already exists (or error):", (err as Error).message);
}
try {
console.log("[worker] Creating REFRESH_SALES_METRICS queue...");
await boss.createQueue(WorkerQueue.REFRESH_SALES_METRICS);
} catch {
// Queue may already exist; ignore to keep this idempotent.
console.log("[worker] REFRESH_SALES_METRICS queue ready");
} catch (err) {
console.log("[worker] REFRESH_SALES_METRICS queue already exists (or error):", (err as Error).message);
}
}
@@ -138,14 +138,6 @@ export async function initializeWorkerSystem(): Promise<void> {
console.log("[worker] Worker system initialized - ready for job enqueueing");
}
/**
* Get the PgBoss instance for direct job enqueueing.
* Must call initializeWorkerSystem() first.
*/
export function getBoss(): PgBoss {
return boss;
}
if (import.meta.main) {
// if (Bun.env.NODE_ENV === "development") {
// setupEventDebugger({ processLabel: "WORKER" });
@@ -155,25 +147,71 @@ if (import.meta.main) {
console.log(
`[worker] Connecting to PgBoss on DATABASE_URL and SocketIO on ${process.env.MANAGER_SOCKET_URL ?? "http://localhost:8671"}`
);
console.log(`[worker] DATABASE_URL set: ${!!process.env.DATABASE_URL}`);
// Ensure PgBoss is connected and queues exist
await ensureBossStarted();
console.log("[worker] Starting PgBoss...");
try {
await Promise.race([
ensureBossStarted(),
new Promise<never>((_, reject) =>
setTimeout(() => reject(new Error("boss.start() timed out after 30s")), 30_000)
),
]);
} catch (err) {
console.error("[worker] FATAL: PgBoss failed to start:", err);
process.exit(1);
}
console.log("[worker] PgBoss started successfully");
console.log("[worker] Ensuring sync queues...");
await ensureDalpuriSyncQueue();
console.log("[worker] Sync queues ready");
// Register job handler for DALPURI_FULL_SYNC
console.log("[worker] Importing sync-manager...");
const { enqueueDalpuriFullSync } = await import("./modules/workers/sync-manager");
console.log("[worker] Importing dalpuri-sync...");
const { executeIncrementalSync } = await import("./modules/workers/dalpuri-sync");
console.log("[worker] Importing incremental-sync...");
const { enqueueIncrementalSync } = await import("./modules/workers/incremental-sync");
await boss.work(WorkerQueue.DALPURI_FULL_SYNC, async () => {
const socket = await ensureManagerSocketReady();
await enqueueDalpuriFullSync();
await enqueueDalpuriFullSync(socket);
});
console.log("[worker] Registered DALPURI_FULL_SYNC job handler");
await boss.work(WorkerQueue.DALPURI_INCREMENTAL_SYNC, async () => {
await executeIncrementalSync();
const startedAt = Date.now();
console.log("[worker] DALPURI_INCREMENTAL_SYNC started");
try {
await executeIncrementalSync();
console.log(
`[worker] DALPURI_INCREMENTAL_SYNC completed in ${Date.now() - startedAt}ms`
);
} catch (err) {
console.error(
`[worker] DALPURI_INCREMENTAL_SYNC failed in ${Date.now() - startedAt}ms`,
err
);
throw err;
}
});
console.log("[worker] Registered DALPURI_INCREMENTAL_SYNC job handler");
const enqueueIncrementalWithLogging = () => {
enqueueIncrementalSync().catch((err) => {
console.error(
`[worker] interval enqueueIncrementalSync failed: ${err?.message ?? err}`
);
});
};
// Keep a worker-local 5s scheduler so incremental sync continues even when
// API interval scheduling is unavailable.
enqueueIncrementalWithLogging();
setInterval(enqueueIncrementalWithLogging, 5_000);
console.log("[worker] Started 5-second incremental enqueue interval");
// Register job handler for REFRESH_SALES_METRICS
const { executeSalesMetricsRefresh } = await import("./modules/workers/sales-metrics");
await boss.work(WorkerQueue.REFRESH_SALES_METRICS, async (jobs) => {
-71
View File
@@ -1,71 +0,0 @@
import axios from "axios";
const connectWiseApi = axios.create({
baseURL: `https://ttscw.totaltech.net/v4_6_release/apis/3.0/`,
headers: {
Authorization: `Basic ${process.env.CW_BASIC_TOKEN}`,
clientId: `${process.env.CW_CLIENT_ID}`,
"Content-Type": "application/json",
},
});
async function main() {
// Fetch inactive catalog items
const pageSize = 1000;
let page = 1;
const inactiveItems: any[] = [];
while (true) {
const response = await connectWiseApi.get(
`/procurement/catalog?page=${page}&pageSize=${pageSize}&conditions=inactiveFlag=true&fields=id,identifier,description,_info`,
);
if (response.data.length === 0) break;
inactiveItems.push(...response.data);
page++;
}
console.log(`Found ${inactiveItems.length} inactive catalog items`);
console.log(`Checking inventory for each (batches of 50)...\n`);
const withStock: any[] = [];
const batchSize = 50;
for (let i = 0; i < inactiveItems.length; i += batchSize) {
const batch = inactiveItems.slice(i, i + batchSize);
await Promise.all(
batch.map(async (item) => {
try {
const res = await connectWiseApi.get(
`/procurement/catalog/${item.id}/inventory?fields=onHand`,
);
const totalOnHand = (res.data as { onHand: number }[]).reduce(
(sum, e) => sum + (e.onHand || 0),
0,
);
if (totalOnHand > 0) {
withStock.push({
id: item.id,
identifier: item.identifier,
description: item.description,
totalOnHand,
});
}
} catch {}
}),
);
const done = Math.min(i + batchSize, inactiveItems.length);
if (done % 500 === 0 || done === inactiveItems.length) {
console.log(` ${done}/${inactiveItems.length} checked`);
}
}
console.log(
`\nInactive items with inventory: ${withStock.length}/${inactiveItems.length}\n`,
);
if (withStock.length > 0) {
console.log(JSON.stringify(withStock, null, 2));
}
}
main().catch(console.error);
-600
View File
@@ -1,600 +0,0 @@
/**
* Test Script: CW Forecast Item Edit & Partial Cancellation
*
* This script performs read-write operations against the ConnectWise API:
*
* 1. Search all open opportunities for a forecast item with description
* matching "labor Special Order" (case-insensitive).
* 2. Report the current state of that item (price, cost, qty, etc.).
* 3. PATCH the item: revenue → 72,000 | cost → 8,500 | quantity → 67
* 4. Verify the update by re-fetching the forecast.
* 5. Cancel 13 units via the linked procurement product
* (partial cancellation: quantityCancelled = 13).
* 6. Verify the cancellation by re-fetching procurement data.
* 7. Report on every step.
*
* Usage: bun run test-cw-edit-item.ts
*/
import axios from "axios";
const cw = axios.create({
baseURL: "https://ttscw.totaltech.net/v4_6_release/apis/3.0/",
headers: {
Authorization: `Basic ${process.env.CW_BASIC_TOKEN}`,
clientId: `${process.env.CW_CLIENT_ID}`,
"Content-Type": "application/json",
},
timeout: 30_000,
});
// ── Helpers ───────────────────────────────────────────────────────────────────
const log = (label: string, ...args: unknown[]) =>
console.log(`\n[${label}]`, ...args);
const divider = () => console.log("─".repeat(72));
const sleep = (ms: number) => new Promise((r) => setTimeout(r, ms));
const fmt = (n: number) =>
n.toLocaleString("en-US", {
minimumFractionDigits: 2,
maximumFractionDigits: 2,
});
// ── Types (minimal, for this script) ──────────────────────────────────────────
interface ForecastItem {
id: number;
forecastDescription: string;
productDescription: string;
quantity: number;
revenue: number;
cost: number;
margin: number;
forecastType: string;
sequenceNumber: number;
catalogItem?: { id: number; identifier: string };
status?: { id: number; name: string };
opportunity?: { id: number; name: string };
[key: string]: unknown;
}
interface Forecast {
id: number;
forecastItems: ForecastItem[];
[key: string]: unknown;
}
interface ProcurementProduct {
id: number;
forecastDetailId: number;
description: string;
quantity: number;
price: number;
cost: number;
cancelledFlag: boolean;
quantityCancelled: number;
cancelledReason: string | null;
cancelledBy: string | null;
cancelledDate: string | null;
opportunity?: { id: number };
[key: string]: unknown;
}
// ── Main ──────────────────────────────────────────────────────────────────────
async function main() {
divider();
log("START", "CW Forecast Item Edit & Cancellation Test");
log("START", `Timestamp: ${new Date().toISOString()}`);
divider();
// ── Step 1: Find the "labor Special Order" forecast item ────────────────
const OPP_ID = 5150;
log(
"SEARCH",
`Looking for forecast item matching "labor Special Order" on opportunity ${OPP_ID}...`,
);
// Fetch the forecast for opportunity 5150 directly
let targetOppId: number = OPP_ID;
let targetItem: ForecastItem | null = null;
let targetForecast: Forecast | null = null;
const forecastRes = await cw.get(`/sales/opportunities/${OPP_ID}/forecast`);
targetForecast = forecastRes.data as Forecast;
const match = (targetForecast.forecastItems ?? []).find(
(fi: ForecastItem) =>
fi.forecastDescription?.toLowerCase().includes("special order") ||
fi.productDescription?.toLowerCase().includes("special order"),
);
if (match) {
targetItem = match;
log("SEARCH", `✓ FOUND forecast item on opportunity ${OPP_ID}`);
}
if (!targetItem || !targetForecast) {
log(
"SEARCH",
`✗ No "labor Special Order" item found on opportunity ${OPP_ID}.`,
);
log("SEARCH", "All forecast items on this opportunity:");
for (const fi of targetForecast.forecastItems ?? []) {
console.log(
` id=${fi.id} "${fi.forecastDescription}" / "${fi.productDescription}"`,
);
}
log("SEARCH", "Aborting.");
process.exit(1);
}
// ── Step 2: Report current state ────────────────────────────────────────
divider();
log("CURRENT STATE", "Forecast item details BEFORE edit:");
console.log(` Opportunity ID: ${targetOppId}`);
console.log(` Forecast Item ID: ${targetItem.id}`);
console.log(` Forecast Description: ${targetItem.forecastDescription}`);
console.log(` Product Description: ${targetItem.productDescription}`);
console.log(
` Catalog Item: ${targetItem.catalogItem?.identifier ?? "(none)"} (cwId=${targetItem.catalogItem?.id ?? "N/A"})`,
);
console.log(` Forecast Type: ${targetItem.forecastType}`);
console.log(
` Status: ${targetItem.status?.name ?? "?"} (id=${targetItem.status?.id ?? "?"})`,
);
console.log(` Sequence Number: ${targetItem.sequenceNumber}`);
console.log(` ──────────────────────────────────`);
console.log(` Quantity: ${targetItem.quantity}`);
console.log(` Revenue (Price): $${fmt(targetItem.revenue)}`);
console.log(` Cost: $${fmt(targetItem.cost)}`);
console.log(` Margin: $${fmt(targetItem.margin)}`);
// Also report all items on this opportunity for context
const allItems = targetForecast.forecastItems ?? [];
log(
"CONTEXT",
`Total forecast items on this opportunity: ${allItems.length}`,
);
for (const fi of allItems) {
const marker = fi.id === targetItem.id ? " ◀ TARGET" : "";
console.log(
` [${fi.sequenceNumber}] id=${fi.id} "${fi.forecastDescription}" ` +
`qty=${fi.quantity} rev=$${fmt(fi.revenue)} cost=$${fmt(fi.cost)}${marker}`,
);
}
// ── Step 3: PATCH the forecast item ─────────────────────────────────────
divider();
const UNIT_PRICE = 72_000;
const UNIT_COST = 8_500;
const QTY = 67;
const TOTAL_REVENUE = UNIT_PRICE * QTY; // $4,824,000
const TOTAL_COST = UNIT_COST * QTY; // $569,500
log("EDIT", "Patching forecast item...");
log(
"EDIT",
` Unit price: $${fmt(UNIT_PRICE)} × ${QTY} = $${fmt(TOTAL_REVENUE)} (revenue)`,
);
log(
"EDIT",
` Unit cost: $${fmt(UNIT_COST)} × ${QTY} = $${fmt(TOTAL_COST)} (cost)`,
);
log("EDIT", ` Quantity: ${QTY}`);
// Find the index of our target item in the forecast array
const forecastItems = targetForecast.forecastItems ?? [];
const targetIdx = forecastItems.findIndex((fi) => fi.id === targetItem!.id);
if (targetIdx === -1) {
log(
"EDIT",
"✗ Could not find target item index in forecast array. Aborting.",
);
process.exit(1);
}
log("EDIT", `Target item is at index ${targetIdx} in forecastItems array.`);
const patchOps = [
{
op: "replace",
path: `/forecastItems/${targetIdx}/revenue`,
value: TOTAL_REVENUE,
},
{
op: "replace",
path: `/forecastItems/${targetIdx}/cost`,
value: TOTAL_COST,
},
{ op: "replace", path: `/forecastItems/${targetIdx}/quantity`, value: QTY },
];
log("EDIT", "Patch operations:");
for (const op of patchOps) {
console.log(` ${op.op} ${op.path}${op.value}`);
}
try {
const patchRes = await cw.patch(
`/sales/opportunities/${targetOppId}/forecast`,
patchOps,
);
const updatedForecast: Forecast = patchRes.data;
const updatedItem = (updatedForecast.forecastItems ?? [])[targetIdx];
if (!updatedItem) {
log("EDIT", "✗ Item not found at expected index after PATCH.");
} else {
log("EDIT", "✓ PATCH successful. Updated item:");
console.log(` Forecast Item ID: ${updatedItem.id}`);
console.log(` Forecast Description: ${updatedItem.forecastDescription}`);
console.log(` Quantity: ${updatedItem.quantity}`);
console.log(` Revenue (Price): $${fmt(updatedItem.revenue)}`);
console.log(` Cost: $${fmt(updatedItem.cost)}`);
console.log(` Margin: $${fmt(updatedItem.margin)}`);
// Verify values match what we set
const checks = [
{
field: "revenue",
expected: TOTAL_REVENUE,
actual: updatedItem.revenue,
},
{ field: "cost", expected: TOTAL_COST, actual: updatedItem.cost },
{ field: "quantity", expected: QTY, actual: updatedItem.quantity },
];
log("VERIFY EDIT", "Checking values match requested:");
for (const check of checks) {
const ok = check.actual === check.expected;
console.log(
` ${ok ? "✓" : "✗"} ${check.field}: expected=${check.expected}, actual=${check.actual}`,
);
}
// Update our reference for the cancellation step
targetItem = updatedItem;
}
} catch (err: any) {
log("EDIT", `✗ PATCH failed: ${err.response?.status ?? err.message}`);
if (err.response?.data) {
console.log(" Response:", JSON.stringify(err.response.data, null, 2));
}
// If quantity PATCH failed (read-only), try without quantity
if (err.response?.status === 400 || err.response?.status === 422) {
log(
"EDIT",
"Retrying without quantity (may be read-only on forecast items)...",
);
const retryOps = patchOps.filter((op) => !op.path.endsWith("/quantity"));
try {
const retryRes = await cw.patch(
`/sales/opportunities/${targetOppId}/forecast`,
retryOps,
);
const retryForecast: Forecast = retryRes.data;
const retryItem = (retryForecast.forecastItems ?? [])[targetIdx];
if (retryItem) {
log(
"EDIT",
"✓ Retry PATCH successful (without quantity). Updated item:",
);
console.log(
` Quantity: ${retryItem.quantity} (unchanged — read-only)`,
);
console.log(` Revenue (Price): $${fmt(retryItem.revenue)}`);
console.log(` Cost: $${fmt(retryItem.cost)}`);
console.log(` Margin: $${fmt(retryItem.margin)}`);
targetItem = retryItem;
}
} catch (retryErr: any) {
log(
"EDIT",
`✗ Retry also failed: ${retryErr.response?.status ?? retryErr.message}`,
);
if (retryErr.response?.data) {
console.log(
" Response:",
JSON.stringify(retryErr.response.data, null, 2),
);
}
}
}
}
// ── Step 4: Re-fetch and confirm final forecast state ───────────────────
divider();
log("RE-FETCH", "Fetching forecast to confirm final state...");
await sleep(500);
const confirmRes = await cw.get(
`/sales/opportunities/${targetOppId}/forecast`,
);
const confirmedForecast: Forecast = confirmRes.data;
const confirmedItem = (confirmedForecast.forecastItems ?? []).find(
(fi) => fi.id === targetItem!.id,
);
if (confirmedItem) {
log("CONFIRMED STATE", "Forecast item after edit:");
console.log(` Forecast Item ID: ${confirmedItem.id}`);
console.log(` Forecast Description: ${confirmedItem.forecastDescription}`);
console.log(` Quantity: ${confirmedItem.quantity}`);
console.log(` Revenue (Price): $${fmt(confirmedItem.revenue)}`);
console.log(` Cost: $${fmt(confirmedItem.cost)}`);
console.log(` Margin: $${fmt(confirmedItem.margin)}`);
} else {
log(
"CONFIRMED STATE",
"⚠ Could not find item by original ID — it may have been regenerated.",
);
log("CONFIRMED STATE", "All current forecast items:");
for (const fi of confirmedForecast.forecastItems ?? []) {
console.log(
` id=${fi.id} "${fi.forecastDescription}" qty=${fi.quantity} rev=$${fmt(fi.revenue)} cost=$${fmt(fi.cost)}`,
);
}
}
// ── Step 5: Cancel 13 items via procurement product ─────────────────────
divider();
log("CANCEL", "Cancelling 13 units on this item via procurement product...");
// First, find existing procurement products linked to this opportunity
const procRes = await cw.get(
`/procurement/products?conditions=${encodeURIComponent(`opportunity/id=${targetOppId}`)}&pageSize=1000`,
);
const procProducts: ProcurementProduct[] = procRes.data;
log(
"CANCEL",
`Found ${procProducts.length} procurement product(s) on this opportunity.`,
);
if (procProducts.length > 0) {
for (const pp of procProducts) {
console.log(
` Proc id=${pp.id} forecastDetailId=${pp.forecastDetailId} ` +
`"${pp.description}" qty=${pp.quantity} price=$${fmt(pp.price ?? 0)} ` +
`cancelled=${pp.cancelledFlag} qtyCancelled=${pp.quantityCancelled}`,
);
}
}
// Find the procurement product linked to our forecast item
const linkedProc = procProducts.find(
(pp) => pp.forecastDetailId === targetItem!.id,
);
if (linkedProc) {
log("CANCEL", `Found linked procurement product: id=${linkedProc.id}`);
log(
"CANCEL",
`Current state: cancelled=${linkedProc.cancelledFlag}, quantityCancelled=${linkedProc.quantityCancelled}`,
);
log("CANCEL", "Patching: quantityCancelled → 13, cancelledFlag → true");
try {
const cancelRes = await cw.patch(
`/procurement/products/${linkedProc.id}`,
[
{ op: "replace", path: "cancelledFlag", value: true },
{ op: "replace", path: "quantityCancelled", value: 13 },
{
op: "replace",
path: "cancelledReason",
value: "Test cancellation — 13 units",
},
],
);
log("CANCEL", "✓ Cancellation PATCH successful.");
console.log(` cancelledFlag: ${cancelRes.data.cancelledFlag}`);
console.log(` quantityCancelled: ${cancelRes.data.quantityCancelled}`);
console.log(` cancelledReason: ${cancelRes.data.cancelledReason}`);
console.log(
` cancelledBy: ${cancelRes.data.cancelledBy ?? "N/A"}`,
);
console.log(
` cancelledDate: ${cancelRes.data.cancelledDate ?? "N/A"}`,
);
} catch (err: any) {
log(
"CANCEL",
`✗ Cancellation PATCH failed: ${err.response?.status ?? err.message}`,
);
if (err.response?.data) {
console.log(" Response:", JSON.stringify(err.response.data, null, 2));
}
}
} else {
log(
"CANCEL",
`No procurement product linked to forecast item id=${targetItem!.id}.`,
);
log(
"CANCEL",
"Creating a procurement product first, then cancelling 13...",
);
try {
// Create a procurement product linked to this forecast item
const createProcRes = await cw.post("/procurement/products", {
catalogItem: targetItem!.catalogItem?.id
? { id: targetItem!.catalogItem.id }
: undefined,
description:
targetItem!.forecastDescription || targetItem!.productDescription,
quantity: targetItem!.quantity || 67,
price: targetItem!.revenue || 72_000,
cost: targetItem!.cost || 8_500,
billableOption: "Billable",
opportunity: { id: targetOppId },
forecastDetailId: targetItem!.id,
});
const newProc = createProcRes.data;
log("CANCEL", `✓ Created procurement product id=${newProc.id}`);
console.log(` forecastDetailId: ${newProc.forecastDetailId}`);
console.log(` description: ${newProc.description}`);
console.log(` quantity: ${newProc.quantity}`);
console.log(` price: $${fmt(newProc.price ?? 0)}`);
console.log(` cost: $${fmt(newProc.cost ?? 0)}`);
// Now cancel 13 units
log("CANCEL", "Patching procurement product: quantityCancelled → 13...");
const cancelRes = await cw.patch(`/procurement/products/${newProc.id}`, [
{ op: "replace", path: "cancelledFlag", value: true },
{ op: "replace", path: "quantityCancelled", value: 13 },
{
op: "replace",
path: "cancelledReason",
value: "Test cancellation — 13 units",
},
]);
log("CANCEL", "✓ Cancellation PATCH successful.");
console.log(` cancelledFlag: ${cancelRes.data.cancelledFlag}`);
console.log(` quantityCancelled: ${cancelRes.data.quantityCancelled}`);
console.log(` cancelledReason: ${cancelRes.data.cancelledReason}`);
console.log(
` cancelledBy: ${cancelRes.data.cancelledBy ?? "N/A"}`,
);
console.log(
` cancelledDate: ${cancelRes.data.cancelledDate ?? "N/A"}`,
);
} catch (err: any) {
log("CANCEL", `✗ Failed: ${err.response?.status ?? err.message}`);
if (err.response?.data) {
console.log(" Response:", JSON.stringify(err.response.data, null, 2));
}
}
}
// ── Step 6: Final verification ──────────────────────────────────────────
divider();
log("FINAL VERIFY", "Re-fetching all data for final report...");
await sleep(500);
// Re-fetch forecast
const finalForecastRes = await cw.get(
`/sales/opportunities/${targetOppId}/forecast`,
);
const finalForecast: Forecast = finalForecastRes.data;
const finalItem =
(finalForecast.forecastItems ?? []).find(
(fi) => fi.id === targetItem!.id,
) ??
(finalForecast.forecastItems ?? []).find(
(fi) =>
fi.forecastDescription?.toLowerCase().includes("special order") ||
fi.productDescription?.toLowerCase().includes("special order"),
);
// Re-fetch procurement
const finalProcRes = await cw.get(
`/procurement/products?conditions=${encodeURIComponent(`opportunity/id=${targetOppId}`)}&pageSize=1000`,
);
const finalProcs: ProcurementProduct[] = finalProcRes.data;
log("FINAL STATE — FORECAST ITEM", "");
if (finalItem) {
console.log(` Forecast Item ID: ${finalItem.id}`);
console.log(` Forecast Description: ${finalItem.forecastDescription}`);
console.log(` Quantity: ${finalItem.quantity}`);
console.log(` Revenue (Price): $${fmt(finalItem.revenue)}`);
console.log(` Cost: $${fmt(finalItem.cost)}`);
console.log(` Margin: $${fmt(finalItem.margin)}`);
} else {
console.log(" ⚠ Target item not found in final forecast.");
}
log("FINAL STATE — PROCUREMENT", `${finalProcs.length} product(s):`);
for (const pp of finalProcs) {
console.log(
` id=${pp.id} forecastDetailId=${pp.forecastDetailId} ` +
`"${pp.description}" qty=${pp.quantity} cancelled=${pp.cancelledFlag} ` +
`qtyCancelled=${pp.quantityCancelled} reason="${pp.cancelledReason ?? ""}"`,
);
}
// ── Summary ─────────────────────────────────────────────────────────────
divider();
log("SUMMARY", "");
// After cancelling 13 of 67, CW recalculates totals for remaining 54 units
const expectedFinalRevenue = Math.round(UNIT_PRICE * (QTY - 13) * 100) / 100;
const expectedFinalCost = Math.round(UNIT_COST * (QTY - 13) * 100) / 100;
const editOk = finalItem
? Math.abs(finalItem.revenue - expectedFinalRevenue) < 1 &&
Math.abs(finalItem.cost - expectedFinalCost) < 1
: false;
const qtyOk = finalItem ? finalItem.quantity === QTY : false;
if (finalItem) {
console.log(
` Expected final revenue ($${fmt(UNIT_PRICE)} × ${QTY - 13}): $${fmt(expectedFinalRevenue)}`,
);
console.log(
` Actual final revenue: $${fmt(finalItem.revenue)}`,
);
console.log(
` Expected final cost ($${fmt(UNIT_COST)} × ${QTY - 13}): $${fmt(expectedFinalCost)}`,
);
console.log(
` Actual final cost: $${fmt(finalItem.cost)}`,
);
}
const cancelOk = finalProcs.some(
(pp) =>
pp.forecastDetailId === targetItem!.id &&
pp.cancelledFlag === true &&
pp.quantityCancelled === 13,
);
console.log(
` Unit price $${fmt(UNIT_PRICE)}/ea: `,
editOk ? "✓ PASS" : "✗ FAIL",
);
console.log(
` Unit cost $${fmt(UNIT_COST)}/ea: `,
editOk ? "✓ PASS" : "✗ FAIL",
);
console.log(
` Quantity set to ${QTY}: `,
qtyOk ? "✓ PASS" : "✗ FAIL (may be read-only)",
);
console.log(
" 13 units cancelled: ",
cancelOk ? "✓ PASS" : "✗ FAIL",
);
const allPass = editOk && qtyOk && cancelOk;
divider();
log(
"RESULT",
allPass
? "✓ ALL CHECKS PASSED"
: "⚠ SOME CHECKS DID NOT PASS — review output above",
);
divider();
}
main().catch((err) => {
console.error("\n[FATAL]", err.response?.data ?? err.message);
process.exit(1);
});
-442
View File
@@ -1,442 +0,0 @@
/**
* Test Script: Forecast Item Resequencing & Procurement Linkage
*
* Validates the CW forecast API behaviour discovered via probing:
* - `sequenceNumber` is read-only — display order = array position
* - PUT always regenerates all forecast item IDs
* - Revenue & cost are preserved through PUT
* - PATCH on /forecast with `/forecastItems/{idx}/field` paths works
* for some fields (e.g. forecastDescription) and preserves IDs
*
* Test flow:
* 1. Create opportunity under XYZ Test Company
* 2. Add 4 products via POST
* 3. Create procurement products (linked by forecastDetailId)
* 4. Cancel one procurement product
* 5. Reorder forecast items via PUT (reverse order)
* 6. Remap procurement forecastDetailId to new IDs
* 7. Verify: order correct, prices preserved, cancellation data intact
* 8. Clean up
*
* Usage: bun run test-forecast-resequence.ts
*/
import axios from "axios";
const cw = axios.create({
baseURL: "https://ttscw.totaltech.net/v4_6_release/apis/3.0/",
headers: {
Authorization: `Basic ${process.env.CW_BASIC_TOKEN}`,
clientId: `${process.env.CW_CLIENT_ID}`,
"Content-Type": "application/json",
},
});
const log = (label: string, ...args: unknown[]) =>
console.log(`\n[${label}]`, ...args);
const sleep = (ms: number) => new Promise((r) => setTimeout(r, ms));
async function main() {
// ── 1. Find company ─────────────────────────────────────────────────────
log("SETUP", "Finding XYZ Test Company...");
const compRes = await cw.get(
`/company/companies?conditions=${encodeURIComponent("name like 'XYZ Test%'")}&fields=id,identifier,name`,
);
if (compRes.data.length === 0) {
console.error("ERROR: 'XYZ Test Company' not found.");
process.exit(1);
}
const company = compRes.data[0];
log("SETUP", `Company: ${company.name} (id=${company.id})`);
// ── 2. Create opportunity ───────────────────────────────────────────────
log("SETUP", "Creating test opportunity...");
const oppRes = await cw.post("/sales/opportunities", {
name: `[TEST] Resequence ${new Date().toISOString().slice(0, 16)}`,
company: { id: company.id },
contact: { id: 1 },
primarySalesRep: { id: 153 },
expectedCloseDate: new Date(Date.now() + 30 * 86_400_000)
.toISOString()
.replace(/\.\d{3}Z$/, "Z"),
});
const oppId = oppRes.data.id;
log("SETUP", `Created opportunity id=${oppId}`);
const forecastUrl = `/sales/opportunities/${oppId}/forecast`;
// Track IDs for cleanup
const procIdsToClean: number[] = [];
try {
// ── 3. Add 4 products ───────────────────────────────────────────────────
log("PRODUCTS", "Adding 4 products...");
const postRes = await cw.post(forecastUrl, {
forecastItems: [
{
opportunity: { id: oppId },
status: { id: 1 },
forecastDescription: "Alpha",
revenue: 100,
cost: 50,
forecastType: "Product",
},
{
opportunity: { id: oppId },
status: { id: 1 },
forecastDescription: "Bravo",
revenue: 250,
cost: 125,
forecastType: "Product",
},
{
opportunity: { id: oppId },
status: { id: 1 },
forecastDescription: "Charlie",
revenue: 30,
cost: 10,
forecastType: "Product",
},
{
opportunity: { id: oppId },
status: { id: 1 },
forecastDescription: "Delta",
revenue: 75,
cost: 40,
forecastType: "Product",
},
],
});
const items: any[] = postRes.data.forecastItems ?? [];
log("PRODUCTS", `Created ${items.length} items:`);
for (const it of items) {
console.log(
` id=${it.id} desc="${it.forecastDescription}" rev=${it.revenue} cost=${it.cost}`,
);
}
// Snapshot prices
const priceSnap = new Map<string, { rev: number; cost: number }>(
items.map((i) => [
i.forecastDescription,
{ rev: i.revenue, cost: i.cost },
]),
);
// ── 4. Create procurement products ──────────────────────────────────────
log("PROCUREMENT", "Creating procurement products...");
const procProducts: any[] = [];
for (const item of items) {
try {
const pr = await cw.post("/procurement/products", {
catalogItem: { id: 87 },
description: item.forecastDescription,
quantity: 1,
price: item.revenue,
cost: item.cost,
billableOption: "Billable",
opportunity: { id: oppId },
forecastDetailId: item.id,
});
procProducts.push(pr.data);
procIdsToClean.push(pr.data.id);
console.log(
` ✓ Proc ${pr.data.id} → forecastDetailId=${pr.data.forecastDetailId} "${item.forecastDescription}"`,
);
} catch (e: any) {
console.log(
` ✗ Failed: ${e.response?.status} ${JSON.stringify(e.response?.data)}`,
);
}
}
if (procProducts.length === 0) {
log(
"PROCUREMENT",
"Could not create procurement products (permission issue?).",
);
log(
"PROCUREMENT",
"Will run reorder test without cancellation verification.",
);
}
// ── 5. Cancel "Bravo" procurement product ───────────────────────────────
const bravoProc = procProducts.find((p: any) => p.description === "Bravo");
if (bravoProc) {
log("CANCEL", `Cancelling Bravo (proc id=${bravoProc.id})...`);
try {
await cw.patch(`/procurement/products/${bravoProc.id}`, [
{ op: "replace", path: "cancelledFlag", value: true },
{ op: "replace", path: "quantityCancelled", value: 1 },
{
op: "replace",
path: "cancelledReason",
value: "Test cancellation",
},
]);
log("CANCEL", "✓ Cancelled.");
} catch (e: any) {
log(
"CANCEL",
`${e.response?.status} ${JSON.stringify(e.response?.data)}`,
);
}
}
// ── 5b. Check for auto-created forecast items ─────────────────────────
await sleep(300);
const midForecast = await cw.get(forecastUrl);
const midItems = midForecast.data.forecastItems ?? [];
log(
"OBSERVE",
`Forecast items after procurement creation: ${midItems.length} (was ${items.length})`,
);
if (midItems.length !== items.length) {
log(
"OBSERVE",
"⚠ Creating procurement products auto-created additional forecast items!",
);
for (const mi of midItems) {
const isOriginal = items.some((i: any) => i.id === mi.id);
console.log(
` id=${mi.id} desc="${mi.forecastDescription}" ${isOriginal ? "(original)" : "(AUTO-CREATED by procurement)"}`,
);
}
}
// Snapshot procurement state before reorder
const beforeProc = await cw.get(
`/procurement/products?conditions=${encodeURIComponent(`opportunity/id=${oppId}`)}&fields=id,forecastDetailId,cancelledFlag,quantityCancelled,cancelledReason,description`,
);
// Build map by description for cross-PUT comparison (IDs will change)
const beforeByDesc = new Map<string, any>();
log(
"SNAPSHOT",
`${beforeProc.data.length} procurement products before reorder:`,
);
for (const p of beforeProc.data) {
beforeByDesc.set(p.description, p);
console.log(
` Proc ${p.id}: forecastDetailId=${p.forecastDetailId} cancelled=${p.cancelledFlag} qty=${p.quantityCancelled} reason="${p.cancelledReason ?? ""}" "${p.description}"`,
);
}
// Record old procurement IDs for later comparison
const oldProcIds = new Set(beforeProc.data.map((p: any) => p.id));
// ── 6. Reorder: reverse ONLY the original 4 forecast items ──────────────
log("REORDER", "Reversing forecast item order via PUT...");
// Only reorder the original items; keep any auto-created ones in place
const originalDescs = new Set(items.map((i: any) => i.forecastDescription));
const originals = midItems.filter(
(i: any) =>
originalDescs.has(i.forecastDescription) &&
items.some((o: any) => o.id === i.id),
);
const extras = midItems.filter(
(i: any) => !originals.some((o: any) => o.id === i.id),
);
const reversedOriginals = [...originals].reverse();
const reorderedAll = [...reversedOriginals, ...extras];
const clone = JSON.parse(JSON.stringify(midForecast.data));
clone.forecastItems = JSON.parse(JSON.stringify(reorderedAll));
const putRes = await cw.put(forecastUrl, clone);
const newItems: any[] = putRes.data.forecastItems ?? [];
log("REORDER", `After PUT (${newItems.length} items):`);
for (const it of newItems) {
console.log(
` id=${it.id} desc="${it.forecastDescription}" rev=${it.revenue} cost=${it.cost}`,
);
}
// Build old→new ID map by position (for original items only)
const idMap = new Map<number, number>();
for (let i = 0; i < reversedOriginals.length && i < newItems.length; i++) {
idMap.set(reversedOriginals[i].id, newItems[i].id);
}
log("ID MAP", "Forecast item Old → New:");
for (const [oldId, newId] of idMap) {
console.log(` ${oldId}${newId}`);
}
// ── 7. Check if procurement products survived PUT ───────────────────────
await sleep(300);
const afterProc = await cw.get(
`/procurement/products?conditions=${encodeURIComponent(`opportunity/id=${oppId}`)}&fields=id,forecastDetailId,cancelledFlag,quantityCancelled,cancelledReason,description`,
);
const newProcIds = new Set(afterProc.data.map((p: any) => p.id));
log(
"PROCUREMENT SURVIVAL",
"Checking if procurement product IDs survived PUT...",
);
const procSurvived = [...oldProcIds].every((id) => newProcIds.has(id));
if (procSurvived) {
console.log(" ✓ All original procurement product IDs survived PUT.");
} else {
console.log(" ✗ PUT REGENERATED procurement product IDs!");
console.log(` Before: [${[...oldProcIds].join(", ")}]`);
console.log(` After: [${[...newProcIds].join(", ")}]`);
}
// Try remap if old IDs still exist
let remapOk = true;
if (procSurvived) {
log("REMAP", "Updating procurement products forecastDetailId...");
for (const pp of beforeProc.data) {
const oldFdId = pp.forecastDetailId as number;
const newFdId = idMap.get(oldFdId);
if (!newFdId || newFdId === oldFdId) continue;
try {
await cw.patch(`/procurement/products/${pp.id}`, [
{ op: "replace", path: "forecastDetailId", value: newFdId },
]);
console.log(
` ✓ Proc ${pp.id}: forecastDetailId ${oldFdId}${newFdId}`,
);
} catch (e: any) {
remapOk = false;
console.log(
` ✗ Proc ${pp.id} remap failed: ${e.response?.status} ${JSON.stringify(e.response?.data)}`,
);
}
}
} else {
remapOk = false;
log(
"REMAP",
"⚠ SKIPPED — procurement products were regenerated by PUT; old IDs no longer exist.",
);
}
// ── 8. Verify ───────────────────────────────────────────────────────────
await sleep(300);
// 8a. Verify order (first 4 items)
log("VERIFY ORDER", "Expected reverse: Delta, Charlie, Bravo, Alpha");
const expectedOrder = ["Delta", "Charlie", "Bravo", "Alpha"];
let orderOk = true;
for (let i = 0; i < expectedOrder.length; i++) {
const actual = newItems[i]?.forecastDescription;
const ok = actual === expectedOrder[i];
if (!ok) orderOk = false;
console.log(
` Position ${i}: ${ok ? "✓" : "✗"} expected "${expectedOrder[i]}", got "${actual}"`,
);
}
// 8b. Verify prices (by description)
log("VERIFY PRICES", "");
let pricesOk = true;
for (const item of newItems) {
const orig = priceSnap.get(item.forecastDescription);
if (!orig) continue;
if (item.revenue !== orig.rev || item.cost !== orig.cost) {
pricesOk = false;
console.log(
` ✗ "${item.forecastDescription}": rev ${orig.rev}${item.revenue}, cost ${orig.cost}${item.cost}`,
);
}
}
if (pricesOk) console.log(" ✓ All prices preserved.");
// 8c. Verify cancellation data — match by description since IDs may have changed
let cancelOk = true;
if (procProducts.length > 0) {
log(
"VERIFY CANCELLATION",
"Checking cancellation data on procurement products after PUT...",
);
const finalProc = await cw.get(
`/procurement/products?conditions=${encodeURIComponent(`opportunity/id=${oppId}`)}&fields=id,forecastDetailId,cancelledFlag,quantityCancelled,cancelledReason,description`,
);
// Track by procIdsToClean for cleanup
for (const p of finalProc.data) {
if (!procIdsToClean.includes(p.id)) procIdsToClean.push(p.id);
}
for (const pp of finalProc.data) {
const orig = beforeByDesc.get(pp.description);
if (!orig) {
console.log(
` ? Proc ${pp.id} "${pp.description}" — no matching pre-PUT record`,
);
continue;
}
const cancelledMatch =
pp.cancelledFlag === orig.cancelledFlag &&
pp.quantityCancelled === orig.quantityCancelled &&
(pp.cancelledReason ?? "") === (orig.cancelledReason ?? "");
if (!cancelledMatch) {
cancelOk = false;
console.log(
` ✗ Proc ${pp.id} "${pp.description}": CANCELLATION DATA CHANGED\n` +
` Before: cancelled=${orig.cancelledFlag} qty=${orig.quantityCancelled} reason="${orig.cancelledReason ?? ""}"\n` +
` After: cancelled=${pp.cancelledFlag} qty=${pp.quantityCancelled} reason="${pp.cancelledReason ?? ""}"`,
);
} else {
console.log(
` ✓ Proc ${pp.id} "${pp.description}": cancelled=${pp.cancelledFlag} qty=${pp.quantityCancelled} reason="${pp.cancelledReason ?? ""}"`,
);
}
}
}
// ── Summary ─────────────────────────────────────────────────────────────
log("SUMMARY", "");
console.log(
" Order correct: ",
orderOk ? "✓ PASS" : "✗ FAIL",
);
console.log(
" Prices preserved: ",
pricesOk ? "✓ PASS" : "✗ FAIL",
);
console.log(
" Proc IDs survived PUT: ",
procSurvived ? "✓ PASS" : "✗ FAIL",
);
console.log(
" Procurement remap: ",
remapOk ? "✓ PASS" : "✗ FAIL (skipped or failed)",
);
console.log(
" Cancellation data preserved:",
cancelOk ? "✓ PASS" : "✗ FAIL",
);
const allPass = orderOk && pricesOk && procSurvived && remapOk && cancelOk;
log("RESULT", allPass ? "✓ ALL TESTS PASSED" : "✗ SOME TESTS FAILED");
} finally {
// ── Cleanup ─────────────────────────────────────────────────────────────
log("CLEANUP", "Deleting procurement products...");
for (const id of procIdsToClean) {
try {
await cw.delete(`/procurement/products/${id}`);
} catch {}
}
log("CLEANUP", `Deleted ${procIdsToClean.length} procurement products.`);
log("CLEANUP", `Deleting opportunity ${oppId}...`);
try {
await cw.delete(`/sales/opportunities/${oppId}`);
log("CLEANUP", "✓ Done.");
} catch (e: any) {
log("CLEANUP", `${e.response?.status ?? e.message}`);
}
}
}
main().catch((err) => {
console.error("\n[FATAL]", err.response?.data ?? err.message);
process.exit(1);
});
-133
View File
@@ -1,133 +0,0 @@
// Test script to probe UniFi API endpoints for response shapes
import axios, { AxiosInstance } from "axios";
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
const controllerBaseUrl = "https://unifi.totaltech.net";
const site = "km9b1v8i";
const username = "admin";
const password = "Tt$Un1fiIZth3B3$t26";
class TestClient {
private client: AxiosInstance;
constructor(baseURL: string) {
this.client = axios.create({
baseURL,
validateStatus: (s) => s >= 200 && s < 400,
});
}
private persistSession(res: { headers: Record<string, unknown> }): void {
const raw = res.headers["set-cookie"];
if (raw) {
const cookies = (Array.isArray(raw) ? raw : [raw]) as string[];
const cookieString = cookies.map((c) => c.split(";")[0]).join("; ");
this.client.defaults.headers.common["Cookie"] = cookieString;
}
const csrf = res.headers["x-csrf-token"];
if (typeof csrf === "string") {
this.client.defaults.headers.common["X-CSRF-Token"] = csrf;
}
}
async login(): Promise<void> {
try {
const res = await this.client.post("/api/auth/login", {
username,
password,
});
console.log("Login OK (UniFi OS)", res.status);
this.persistSession(res);
} catch (e) {
const res = await this.client.post("/api/login", { username, password });
console.log("Login OK (legacy)", res.status);
this.persistSession(res);
}
}
async tryGet(label: string, paths: string[]): Promise<any> {
for (const path of paths) {
try {
const res = await this.client.get(path);
const data = res.data?.data ?? res.data;
console.log(`\n=== ${label} (${path}) ===`);
console.log(JSON.stringify(data, null, 2));
return data;
} catch (e: any) {
console.log(` Failed ${path}: ${e.response?.status ?? e.message}`);
}
}
console.log(` Could not fetch ${label} from any path`);
return null;
}
}
async function main() {
const client = new TestClient(controllerBaseUrl);
await client.login();
// 1. WLAN Groups (AP groups in UniFi)
await client.tryGet("WLAN Groups", [
`/proxy/network/api/s/${site}/rest/wlangroup`,
`/api/s/${site}/rest/wlangroup`,
]);
// 2. User Groups (bandwidth/speed limit profiles)
await client.tryGet("User Groups (Speed Profiles)", [
`/proxy/network/api/s/${site}/rest/usergroup`,
`/api/s/${site}/rest/usergroup`,
]);
// 3. Devices - APs only (compact)
const devices = await client.tryGet("Devices", [
`/proxy/network/api/s/${site}/stat/device`,
]);
if (devices) {
const aps = devices.filter((d: any) => d.type === "uap");
console.log(`\n=== APs (${aps.length}) - compact ===`);
aps.forEach((ap: any) => {
console.log(
JSON.stringify({
_id: ap._id,
name: ap.name,
mac: ap.mac,
model: ap.model,
radio_table: ap.radio_table?.map((r: any) => ({
radio: r.radio,
name: r.name,
})),
wlangroup_id_ng: ap.wlangroup_id_ng,
wlangroup_id_na: ap.wlangroup_id_na,
vap_table_count: ap.vap_table?.length,
}),
);
});
}
// 4. One full WLAN to see private_preshared_keys structure
const wlans = await client.tryGet("WLANs", [
`/proxy/network/api/s/${site}/rest/wlanconf`,
]);
if (wlans) {
// Log just the PPSK-related fields from each WLAN
console.log("\n=== PPSK fields per WLAN ===");
wlans.forEach((w: any) => {
console.log(
JSON.stringify({
name: w.name,
_id: w._id,
private_preshared_keys_enabled: w.private_preshared_keys_enabled,
private_preshared_keys: w.private_preshared_keys,
ap_group_ids: w.ap_group_ids,
ap_group_mode: w.ap_group_mode,
wlan_band: w.wlan_band,
wlan_bands: w.wlan_bands,
usergroup_id: w.usergroup_id,
}),
);
});
}
}
main().catch((e) => console.error(e));
-123
View File
@@ -1,123 +0,0 @@
// unifi-wifi-list.ts
import axios, { AxiosInstance } from "axios";
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
const controllerBaseUrl = "https://unifi.totaltech.net";
const site = "km9b1v8i";
const username = "admin";
const password = "Tt$Un1fiIZth3B3$t26";
interface WlanConfRaw {
_id: string;
name?: string;
ssid?: string;
x_passphrase?: string;
[key: string]: unknown;
}
interface WlanConf {
id: string;
ssid: string;
password: string | null;
}
class UnifiClient {
private client: AxiosInstance;
constructor(baseURL: string) {
this.client = axios.create({
baseURL,
validateStatus: (s) => s >= 200 && s < 400,
});
}
private persistSession(res: { headers: Record<string, unknown> }): void {
// Cookies
const raw = res.headers["set-cookie"];
if (raw) {
const cookies = (Array.isArray(raw) ? raw : [raw]) as string[];
const cookieString = cookies.map((c) => c.split(";")[0]).join("; ");
this.client.defaults.headers.common["Cookie"] = cookieString;
}
// CSRF token (UniFi OS)
const csrf = res.headers["x-csrf-token"];
if (typeof csrf === "string") {
this.client.defaults.headers.common["X-CSRF-Token"] = csrf;
}
}
async login(username: string, password: string): Promise<void> {
const body = { username, password };
try {
// UniFi OS
const res = await this.client.post("/api/auth/login", body);
console.log("Login OK (UniFi OS)", res.status);
this.persistSession(res);
} catch (e) {
// Legacy controller
console.log("UniFi OS login failed, trying legacy...");
const res = await this.client.post("/api/login", body);
console.log("Login OK (legacy)", res.status);
this.persistSession(res);
}
}
private async fetchWlanConfRaw(site: string): Promise<WlanConfRaw[]> {
const paths = [
`/proxy/network/api/s/${site}/rest/wlanconf`,
`/api/s/${site}/rest/wlanconf`,
];
for (const path of paths) {
try {
const res = await this.client.get(path);
const data = (res.data?.data ?? res.data) as WlanConfRaw[];
console.log(`Fetched wlan from ${path}`);
return data;
} catch (e) {
console.log(
`Failed ${path}:`,
axios.isAxiosError(e) ? e.response?.status : e,
);
}
}
throw new Error("Could not fetch WLAN config from any known path");
}
async getWlanConf(site: string): Promise<WlanConf[]> {
const raw = await this.fetchWlanConfRaw(site);
return raw.map(
(w): WlanConf => ({
id: w._id,
ssid: (w.name || w.ssid || "").toString(),
password: typeof w.x_passphrase === "string" ? w.x_passphrase : null,
}),
);
}
}
async function main() {
const unifi = new UnifiClient(controllerBaseUrl);
try {
await unifi.login(username, password);
const wlans = await unifi.getWlanConf(site);
wlans.forEach((wlan) => {
console.log(`${wlan.ssid}: ${wlan.password ?? "<no password>"}`);
});
} catch (err) {
if (axios.isAxiosError(err)) {
console.error("HTTP error", err.response?.status, err.response?.data);
} else {
console.error("Error", err);
}
}
}
main().catch((e) => console.error(e));
-123
View File
@@ -1,123 +0,0 @@
// unifi-wifi-list.ts
import axios, { AxiosInstance } from "axios";
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
const controllerBaseUrl = "https://unifi.totaltech.net";
const site = "km9b1v8i";
const username = "admin";
const password = "Tt$Un1fiIZth3B3$t26";
interface WlanConfRaw {
_id: string;
name?: string;
ssid?: string;
x_passphrase?: string;
[key: string]: unknown;
}
interface WlanConf {
id: string;
ssid: string;
password: string | null;
}
class UnifiClient {
private client: AxiosInstance;
constructor(baseURL: string) {
this.client = axios.create({
baseURL,
validateStatus: (s) => s >= 200 && s < 400,
});
}
private persistSession(res: { headers: Record<string, unknown> }): void {
// Cookies
const raw = res.headers["set-cookie"];
if (raw) {
const cookies = (Array.isArray(raw) ? raw : [raw]) as string[];
const cookieString = cookies.map((c) => c.split(";")[0]).join("; ");
this.client.defaults.headers.common["Cookie"] = cookieString;
}
// CSRF token (UniFi OS)
const csrf = res.headers["x-csrf-token"];
if (typeof csrf === "string") {
this.client.defaults.headers.common["X-CSRF-Token"] = csrf;
}
}
async login(username: string, password: string): Promise<void> {
const body = { username, password };
try {
// UniFi OS
const res = await this.client.post("/api/auth/login", body);
console.log("Login OK (UniFi OS)", res.status);
this.persistSession(res);
} catch (e) {
// Legacy controller
console.log("UniFi OS login failed, trying legacy...");
const res = await this.client.post("/api/login", body);
console.log("Login OK (legacy)", res.status);
this.persistSession(res);
}
}
private async fetchWlanConfRaw(site: string): Promise<WlanConfRaw[]> {
const paths = [
`/proxy/network/api/s/${site}/rest/wlanconf`,
`/api/s/${site}/rest/wlanconf`,
];
for (const path of paths) {
try {
const res = await this.client.get(path);
const data = (res.data?.data ?? res.data) as WlanConfRaw[];
console.log(`Fetched wlan from ${path}`);
return data;
} catch (e) {
console.log(
`Failed ${path}:`,
axios.isAxiosError(e) ? e.response?.status : e,
);
}
}
throw new Error("Could not fetch WLAN config from any known path");
}
async getWlanConf(site: string): Promise<WlanConf[]> {
const raw = await this.fetchWlanConfRaw(site);
return raw.map(
(w): WlanConf => ({
id: w._id,
ssid: (w.name || w.ssid || "").toString(),
password: typeof w.x_passphrase === "string" ? w.x_passphrase : null,
}),
);
}
}
async function main() {
const unifi = new UnifiClient(controllerBaseUrl);
try {
await unifi.login(username, password);
const wlans = await unifi.getWlanConf(site);
wlans.forEach((wlan) => {
console.log(`${wlan.ssid}: ${wlan.password ?? "<no password>"}`);
});
} catch (err) {
if (axios.isAxiosError(err)) {
console.error("HTTP error", err.response?.status, err.response?.data);
} else {
console.error("Error", err);
}
}
}
main().catch((e) => console.error(e));
-15
View File
@@ -1,15 +0,0 @@
import { PrismaPg } from "@prisma/adapter-pg";
import { PrismaClient } from "./generated/prisma/client";
import fs from "node:fs";
const env = fs.readFileSync(".env", "utf8");
const line = env.split(/\n/).find((l) => l.startsWith("DATABASE_URL="));
const url = line?.slice("DATABASE_URL=".length)?.replace(/^"|"$/g, "");
const prisma = new PrismaClient({
adapter: new PrismaPg({ connectionString: url }),
});
const rows = await prisma.$queryRaw`select tablename from pg_tables where schemaname = 'public' order by tablename`;
console.log(rows);
await prisma.$disconnect();
-24
View File
@@ -1,24 +0,0 @@
import { prisma } from "./src/constants";
const roles = await prisma.role.findMany({ select: { title: true, moniker: true, permissions: true } });
for (const role of roles) {
const token = role.permissions;
if (!token) { console.log(role.title, "- NO PERMISSIONS"); continue; }
try {
const payload = JSON.parse(Buffer.from(token.split(".")[1], "base64").toString());
const perms: string[] = payload.permissions || [];
const relevant = perms.filter((p: string) =>
p.includes("sales.opportunity.product") ||
p.includes("sales.opportunity.*") ||
p.includes("sales.*") ||
p === "*"
);
if (relevant.length > 0) {
console.log("=== " + role.title + " (" + role.moniker + ") ===");
relevant.forEach((p: string) => console.log(" " + p));
}
} catch(e: any) { console.log(role.title, "- parse error:", e.message); }
}
await prisma.$disconnect();
-24
View File
@@ -1,24 +0,0 @@
import { prisma } from "./src/constants";
const roles = await prisma.role.findMany({ select: { title: true, moniker: true, permissions: true } });
for (const role of roles) {
const token = role.permissions;
if (!token) { console.log(role.title, "- NO PERMISSIONS"); continue; }
try {
const payload = JSON.parse(Buffer.from(token.split(".")[1], "base64").toString());
const perms: string[] = payload.permissions || [];
// Check for obj.catalogItem or wildcard that would cover it
const relevant = perms.filter((p: string) =>
p.includes("obj.catalogItem") ||
p.includes("obj.*") ||
p === "*"
);
console.log("=== " + role.title + " (" + role.moniker + ") ===");
console.log(" obj.catalogItem-related perms:", JSON.stringify(relevant));
// Also show all perms for debugging
console.log(" Total perms:", perms.length);
} catch(e: any) { console.log(role.title, "- parse error:", e.message); }
}
await prisma.$disconnect();
+2 -2
View File
@@ -621,7 +621,7 @@
"@types/aria-query": ["@types/aria-query@5.0.4", "", {}, "sha512-rfT93uj5s0PRL7EzccGMs3brplhcrghnDoV26NqKhCAS1hVo+WdNsPvE/yb6ilfr5hi2MEk6d5EWJTKdxg8jVw=="],
"@types/bun": ["@types/bun@1.3.11", "", { "dependencies": { "bun-types": "1.3.11" } }, "sha512-5vPne5QvtpjGpsGYXiFyycfpDF2ECyPcTSsFBMa0fraoxiQyMJ3SmuQIGhzPg2WJuWxVBoxWJ2kClYTcw/4fAg=="],
"@types/bun": ["@types/bun@1.3.12", "", { "dependencies": { "bun-types": "1.3.12" } }, "sha512-DBv81elK+/VSwXHDlnH3Qduw+KxkTIWi7TXkAeh24zpi5l0B2kUg9Ga3tb4nJaPcOFswflgi/yAvMVBPrxMB+A=="],
"@types/cacheable-request": ["@types/cacheable-request@6.0.3", "", { "dependencies": { "@types/http-cache-semantics": "*", "@types/keyv": "^3.1.4", "@types/node": "*", "@types/responselike": "^1.0.0" } }, "sha512-IQ3EbTzGxIigb1I3qPZc1rWJnH0BmSKv5QYTalEwweFvyBDLSAe24zP0le/hyi7ecGfZVlIVAg4BZqb8WBwKqw=="],
@@ -841,7 +841,7 @@
"buffer-from": ["buffer-from@1.1.2", "", {}, "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ=="],
"bun-types": ["bun-types@1.3.11", "", { "dependencies": { "@types/node": "*" } }, "sha512-1KGPpoxQWl9f6wcZh57LvrPIInQMn2TQ7jsgxqpRzg+l0QPOFvJVH7HmvHo/AiPgwXy+/Thf6Ov3EdVn1vOabg=="],
"bun-types": ["bun-types@1.3.12", "", { "dependencies": { "@types/node": "*" } }, "sha512-HqOLj5PoFajAQciOMRiIZGNoKxDJSr6qigAttOX40vJuSp6DN/CxWp9s3C1Xwm4oH7ybueITwiaOcWXoYVoRkA=="],
"bundle-name": ["bundle-name@4.1.0", "", { "dependencies": { "run-applescript": "^7.0.0" } }, "sha512-tjwM5exMg6BGRI+kNmTntNsvdZS1X8BFYS6tnJ2hdH0kVxM6/eVZ2xy+FqStSWvYmtfFMDLIxurorHwDKfDz5Q=="],
+33
View File
@@ -0,0 +1,33 @@
apiVersion: batch/v1
kind: Job
metadata:
name: dalpuri-sync-RELEASE_TAG
namespace: optima
labels:
app: dalpuri-sync
spec:
backoffLimit: 0
ttlSecondsAfterFinished: 86400
activeDeadlineSeconds: 7200
template:
metadata:
labels:
app: dalpuri-sync
spec:
containers:
- name: sync
image: ghcr.io/horizonstacksoftware/optima-dalpuri-sync:RELEASE_TAG
env:
- name: CW_DATABASE_URL
valueFrom:
secretKeyRef:
name: api-env-secret
key: CW_DATABASE_URL
- name: API_DATABASE_URL
valueFrom:
secretKeyRef:
name: api-env-secret
key: DATABASE_URL
restartPolicy: Never
imagePullSecrets:
- name: github-container-registry
+27
View File
@@ -294,6 +294,22 @@ const refreshContextFromApi = async (
}
}
const cwMembers = await apiPrisma.cwMember.findMany({
select: { cwMemberId: true, identifier: true },
});
for (const member of cwMembers) {
if (
member.cwMemberId != null &&
member.identifier &&
!context.userIdentifiersByMemberRecId.has(member.cwMemberId)
) {
context.userIdentifiersByMemberRecId.set(
member.cwMemberId,
member.identifier
);
}
}
for (const board of boards) {
context.serviceTicketBoardUidsById.set(board.id, board.uid);
}
@@ -426,6 +442,12 @@ const sanitizeModelData = (
) {
sanitized.statusId = null;
}
if (
sanitized.locationId != null &&
!context.corporateLocationIds.has(sanitized.locationId as number)
) {
sanitized.locationId = null;
}
}
if (targetModel === "schedule") {
@@ -734,6 +756,11 @@ const getConfigForTable = (table: string): SyncTableConfig | null => {
secondarySalesFlag: true,
},
},
soOppStatus: {
select: {
closedFlag: true,
},
},
},
},
},
+216 -19
View File
@@ -75,6 +75,136 @@ type DeleteResult = {
let incrementalDeleteStepIndex = 0;
const CRITICAL_INCREMENTAL_RECONCILE_TABLES = new Set([
"Companies",
"Company Addresses",
"Contacts",
"Opportunities",
]);
const CRITICAL_CW_WATERMARK_TABLES = new Set([
"Companies",
"Company Addresses",
"Contacts",
"Opportunities",
]);
const criticalFullSyncIntervalMinutes = Math.max(
1,
Number.parseInt(
process.env.DALPURI_CRITICAL_FULL_SYNC_INTERVAL_MINUTES ?? "15",
10
) || 15
);
const CRITICAL_FULL_SYNC_INTERVAL_MS =
criticalFullSyncIntervalMinutes * 60 * 1000;
const criticalCwWatermarkOverlapSeconds = Math.max(
5,
Number.parseInt(
process.env.DALPURI_CRITICAL_CW_WATERMARK_OVERLAP_SECONDS ?? "60",
10
) || 60
);
const CRITICAL_CW_WATERMARK_OVERLAP_MS =
criticalCwWatermarkOverlapSeconds * 1000;
const criticalCwDeltaLimit = Math.max(
100,
Number.parseInt(process.env.DALPURI_CRITICAL_CW_DELTA_LIMIT ?? "5000", 10) ||
5000
);
const lastCriticalFullSyncByStep = new Map<string, number>();
const lastCriticalCwWatermarkByStep = new Map<string, Date>();
const shouldForceCriticalFullSync = (
step: Step,
forceIncremental: boolean
): boolean => {
if (!forceIncremental) return false;
if (!CRITICAL_INCREMENTAL_RECONCILE_TABLES.has(step.name)) return false;
const now = Date.now();
const last = lastCriticalFullSyncByStep.get(step.name) ?? 0;
if (now - last < CRITICAL_FULL_SYNC_INTERVAL_MS) {
return false;
}
lastCriticalFullSyncByStep.set(step.name, now);
return true;
};
const computeCriticalCwWatermarkDecision = async (
cwPrisma: CwPrismaClient,
step: Step,
forceIncremental: boolean
): Promise<SmartSyncDecision | null> => {
if (!forceIncremental) return null;
if (!CRITICAL_CW_WATERMARK_TABLES.has(step.name)) return null;
const cwDelegate = (
cwPrisma as unknown as Record<string, { findMany: Function } | undefined>
)[step.sourceModel];
if (!cwDelegate) {
return null;
}
const existingWhere =
(step.sourceArgs as Record<string, unknown> | undefined)?.where ?? {};
const lastWatermark = lastCriticalCwWatermarkByStep.get(step.name);
const lowerBound = lastWatermark
? new Date(lastWatermark.getTime() - CRITICAL_CW_WATERMARK_OVERLAP_MS)
: new Date(Date.now() - CRITICAL_CW_WATERMARK_OVERLAP_MS);
const rows = (await cwDelegate.findMany({
select: {
[step.sourceIdField]: true,
[step.sourceUpdatedField]: true,
},
where: {
...(existingWhere as Record<string, unknown>),
[step.sourceUpdatedField]: {
gte: lowerBound,
},
},
orderBy: { [step.sourceUpdatedField]: "asc" },
take: criticalCwDeltaLimit,
})) as Row[];
if (rows.length >= criticalCwDeltaLimit) {
console.warn(
` [smart-sync][critical-watermark] ${step.name}: delta reached limit (${criticalCwDeltaLimit}), forcing full sync`
);
return { mode: "full", differences: [] };
}
if (rows.length > 0) {
const latest = rows[rows.length - 1][step.sourceUpdatedField] as Date | null;
if (latest) {
lastCriticalCwWatermarkByStep.set(step.name, latest);
}
} else if (!lastWatermark) {
lastCriticalCwWatermarkByStep.set(step.name, new Date());
}
const sourceIds = rows.map((r) => r[step.sourceIdField] as number);
console.log(
` [smart-sync][critical-watermark] ${step.name}: ${sourceIds.length} ids since ${lowerBound.toISOString()}`
);
return {
mode: "incremental",
sourceIds,
differences: [],
};
};
const parseEnvFile = (path: string): Record<string, string> => {
const envData = readFileSync(path, "utf8");
const out: Record<string, string> = {};
@@ -107,6 +237,20 @@ const resolveApiDatabaseUrl = (): string => {
if (process.env.OPTIMA_API_DATABASE_URL)
return process.env.OPTIMA_API_DATABASE_URL;
// Worker/runtime fallback:
// In Kubernetes we often provide CW via CW_DATABASE_URL and API Postgres via
// DATABASE_URL. Only use DATABASE_URL as API when we can safely infer that.
if (process.env.CW_DATABASE_URL && process.env.DATABASE_URL) {
return process.env.DATABASE_URL;
}
if (
process.env.DATABASE_URL &&
/^(postgres|postgresql):\/\//i.test(process.env.DATABASE_URL)
) {
return process.env.DATABASE_URL;
}
const candidates = [
resolve(import.meta.dir, "../../api/.env"),
resolve(process.cwd(), "../api/.env"),
@@ -323,6 +467,22 @@ const refreshContextFromApi = async (
}
}
const cwMembers = await apiPrisma.cwMember.findMany({
select: { cwMemberId: true, identifier: true },
});
for (const member of cwMembers) {
if (
member.cwMemberId != null &&
member.identifier &&
!context.userIdentifiersByMemberRecId.has(member.cwMemberId)
) {
context.userIdentifiersByMemberRecId.set(
member.cwMemberId,
member.identifier
);
}
}
for (const board of boards) {
context.serviceTicketBoardUidsById.set(board.id, board.uid);
}
@@ -636,6 +796,13 @@ const sanitizeModelData = (
) {
sanitized.stageId = null;
}
// Nullify locationId if the corporate location doesn't exist
if (
sanitized.locationId != null &&
!context.corporateLocationIds.has(sanitized.locationId as number)
) {
sanitized.locationId = null;
}
// Nullify taxCodeId if the tax code hasn't synced yet
if (
sanitized.taxCodeId != null &&
@@ -1328,6 +1495,15 @@ export const executeFullDalpuriSync = async (options?: {
const isTimedOut = () => Date.now() - syncStartTime > timeoutMs;
const steps: Step[] = [
{
name: "CW Members",
sourceModel: "member",
targetModel: "cwMember",
translation: cwMemberTranslation as unknown as AnyTranslation,
uniqueField: "cwMemberId",
sourceIdField: "memberRecId",
sourceUpdatedField: "lastUpdatedUtc",
},
{
name: "Users",
sourceModel: "member",
@@ -1342,15 +1518,6 @@ export const executeFullDalpuriSync = async (options?: {
},
},
},
{
name: "CW Members",
sourceModel: "member",
targetModel: "cwMember",
translation: cwMemberTranslation as unknown as AnyTranslation,
uniqueField: "cwMemberId",
sourceIdField: "memberRecId",
sourceUpdatedField: "lastUpdatedUtc",
},
{
name: "Companies",
sourceModel: "company",
@@ -1585,6 +1752,11 @@ export const executeFullDalpuriSync = async (options?: {
secondarySalesFlag: true,
},
},
soOppStatus: {
select: {
closedFlag: true,
},
},
},
},
},
@@ -1729,19 +1901,40 @@ export const executeFullDalpuriSync = async (options?: {
step,
forceIncremental
);
const criticalWatermarkDecision = await computeCriticalCwWatermarkDecision(
cwPrisma,
step,
forceIncremental
);
const forceCriticalFullSync = shouldForceCriticalFullSync(
step,
forceIncremental
);
const effectiveDecision = forceCriticalFullSync
? ({ mode: "full", differences: decision.differences } as SmartSyncDecision)
: criticalWatermarkDecision ?? decision;
if (forceCriticalFullSync) {
console.log(
` [smart-sync][forced-full] ${step.name}: forcing periodic full reconciliation every ${criticalFullSyncIntervalMinutes}m`
);
}
const sourceIdsFilter =
decision.mode === "incremental" ? decision.sourceIds : undefined;
effectiveDecision.mode === "incremental"
? effectiveDecision.sourceIds
: undefined;
console.log(
` [smart-sync]${forceIncremental ? "[forced]" : ""} mode=${
decision.mode
effectiveDecision.mode
}${
decision.mode === "incremental"
? ` (${decision.sourceIds.length} ids)`
effectiveDecision.mode === "incremental"
? ` (${effectiveDecision.sourceIds.length} ids)`
: ""
}`
);
if (logAllDifferences) {
logAllSmartSyncDifferences(step, decision.differences);
logAllSmartSyncDifferences(step, effectiveDecision.differences);
}
const result = await syncStep(
cwPrisma,
@@ -1763,7 +1956,7 @@ export const executeFullDalpuriSync = async (options?: {
await writeStepLog(
step.name,
decision.mode,
effectiveDecision.mode,
result,
{ deleted: 0, failed: 0 },
Date.now() - stepStart
@@ -1860,8 +2053,12 @@ export const executeForcedIncrementalDalpuriSync = async (options?: {
};
if (import.meta.main) {
executeFullDalpuriSync().catch((error) => {
console.error("CW -> API sync failed:", error);
process.exit(1);
});
executeFullDalpuriSync()
.then(() => {
process.exit(0);
})
.catch((error) => {
console.error("CW -> API sync failed:", error);
process.exit(1);
});
}
+1 -1
View File
@@ -46,7 +46,7 @@ export const contactTranslation: Translation<CwContact, ApiContact> = {
{
from: "lastName",
to: "lastName",
process: (value) => (value ? value : "Contact"),
process: (value) => (value ? value : ""),
},
{ from: "nickName", to: "nickname" },
{ from: "title", to: "title" },
+10 -1
View File
@@ -1,6 +1,7 @@
import {
Opportunity as CwOpportunity,
OpportunityMember as CwOpportunityMember,
SoOppStatus as CwSoOppStatus,
} from "../../generated/prisma/client";
import { OpportunityInterest } from "../../../api/generated/prisma/client";
import { Translation, skipRow } from "./types";
@@ -30,6 +31,7 @@ type ApiOpportunityRecord = {
dateBecameLead?: Date | null;
closedDate?: Date | null;
closedFlag: boolean;
locationId?: number | null;
closedById?: string | null;
updatedBy: string;
eneteredBy: string;
@@ -42,6 +44,7 @@ type CwOpportunityWithMembers = CwOpportunity & {
CwOpportunityMember,
"memberRecId" | "primarySalesFlag" | "secondarySalesFlag"
>[];
soOppStatus?: Pick<CwSoOppStatus, "closedFlag"> | null;
};
const toInterest = (value: number | null): OpportunityInterest | null => {
@@ -119,13 +122,19 @@ export const opportunityTranslation: Translation<
},
{ from: "companyRecId", to: "companyId" },
{ from: "contactRecId", to: "contactId" },
{ from: "ownerLevelRecId", to: "locationId" },
{ from: "companyAddressRecId", to: "siteId" },
{ from: "poNumber", to: "customerPO" },
{ from: "dateCloseExpected", to: "expectedCloseDate" },
{ from: "datePipelineChange", to: "pipelineChangeDate" },
{ from: "dateBecameLead", to: "dateBecameLead" },
{ from: "dateClosed", to: "closedDate" },
{ from: "oldCloseFlag", to: "closedFlag" },
{
from: "oldCloseFlag",
to: "closedFlag",
process: (_value, _context, row) =>
row.soOppStatus?.closedFlag ?? row.oldCloseFlag ?? false,
},
{ from: "closedBy", to: "closedById" },
{
from: "updatedBy",
-76
View File
@@ -1,76 +0,0 @@
import { PrismaMssql } from "@prisma/adapter-mssql";
import { PrismaClient } from "./generated/prisma/client";
const connectionString = process.env.DATABASE_URL;
if (!connectionString) {
throw new Error("DATABASE_URL is not set.");
}
const prisma = new PrismaClient({
adapter: new PrismaMssql(connectionString),
});
try {
const rowSummary = await prisma.$queryRawUnsafe<
Array<{ total_rows: number; distinct_configs: number }>
>(`
SELECT
COUNT(*) AS total_rows,
COUNT(DISTINCT Config_RecID) AS distinct_configs
FROM dbo.Config_User_Defined_Field_Value;
`);
const relatedRowCounts = await prisma.$queryRawUnsafe<
Array<{
config_rows: number;
cs_result_detail_rows: number;
config_custom_field_nonempty: number;
}>
>(`
SELECT
(SELECT COUNT(*) FROM dbo.Config) AS config_rows,
(SELECT COUNT(*) FROM dbo.CS_Result_Detail) AS cs_result_detail_rows,
(SELECT COUNT(*)
FROM dbo.Config
WHERE Custom_Field IS NOT NULL
AND LEN(LTRIM(RTRIM(CONVERT(nvarchar(max), Custom_Field)))) > 0) AS config_custom_field_nonempty;
`);
const topConfigs = await prisma.$queryRawUnsafe<
Array<{ config_recid: number; field_count: number }>
>(`
SELECT TOP 10
Config_RecID AS config_recid,
COUNT(*) AS field_count
FROM dbo.Config_User_Defined_Field_Value
GROUP BY Config_RecID
ORDER BY field_count DESC, config_recid ASC;
`);
const customFieldSamples = await prisma.$queryRawUnsafe<
Array<{ config_recid: number; custom_field_prefix: string }>
>(`
SELECT TOP 5
Config_RecID AS config_recid,
LEFT(CONVERT(nvarchar(max), Custom_Field), 250) AS custom_field_prefix
FROM dbo.Config
WHERE Custom_Field IS NOT NULL
AND LEN(LTRIM(RTRIM(CONVERT(nvarchar(max), Custom_Field)))) > 0
ORDER BY Config_RecID ASC;
`);
console.log(
JSON.stringify(
{
rowSummary: rowSummary[0] ?? null,
relatedRowCounts: relatedRowCounts[0] ?? null,
topConfigs,
customFieldSamples,
},
null,
2,
),
);
} finally {
await prisma.$disconnect();
}
-43
View File
@@ -1,43 +0,0 @@
import { PrismaMssql } from "@prisma/adapter-mssql";
import { PrismaClient } from "./generated/prisma/client";
import { writeFileSync } from "node:fs";
const outputPath =
process.argv[2] ??
process.env.CONFIG_OUTPUT_FILE ??
"configurations-first-10-with-relations.json";
const connectionString = process.env.DATABASE_URL;
if (!connectionString) {
throw new Error("DATABASE_URL is not set.");
}
const adapter = new PrismaMssql(connectionString);
const prisma = new PrismaClient({ adapter });
try {
const configurations = await prisma.configuration.findMany({
take: 10,
orderBy: { configRecId: "asc" },
include: {
configStatus: true,
configurationAudits: {
orderBy: { lastUpdatedUtc: "desc" },
include: {
configurationValues: {
orderBy: { configurationAuditValueRecId: "asc" },
},
},
},
},
});
if (configurations.length === 0) {
console.error("No configurations found.");
process.exit(1);
}
writeFileSync(outputPath, JSON.stringify(configurations, null, 2));
} finally {
await prisma.$disconnect();
}
-19
View File
@@ -1,19 +0,0 @@
import fetchOpportunities from "./old-src/collectors/fetchOpportunities";
fetchOpportunities({
include: [
"company",
"activities",
"opportunityNotes",
"forecastItems",
"contacts",
],
}).then((opportunities) => {
const jsonData = JSON.stringify(opportunities, null, 2);
const { writeFileSync } = require("fs");
writeFileSync("examples/opportunity-with-relations.json", jsonData);
console.log(
`Exported ${opportunities.length} opportunities to examples/opportunity-with-relations.json`
);
process.exit(0);
});
-30
View File
@@ -1,30 +0,0 @@
import { PrismaClient } from "./generated/prisma/client";
import { PrismaMssql } from "@prisma/adapter-mssql";
import { writeFileSync } from "fs";
const connectionString = process.env.DATABASE_URL;
if (!connectionString) {
throw new Error("DATABASE_URL is not set.");
}
const adapter = new PrismaMssql(connectionString);
const prisma = new PrismaClient({ adapter });
const products = await prisma.productCatalog.findMany({
where: { inactiveFlag: false },
include: {
subcategory: { include: { category: true } },
manufacturer: true,
inventory: true,
itemVendors: true,
},
take: 100,
});
writeFileSync(
"products-with-relations.json",
JSON.stringify(products, null, 2),
);
console.log(`Exported ${products.length} products`);
await prisma.$disconnect();
@@ -1,150 +0,0 @@
import { PrismaMssql } from "@prisma/adapter-mssql";
import { Prisma, PrismaClient } from "./generated/prisma/client";
const connectionString = process.env.DATABASE_URL;
if (!connectionString) {
throw new Error("DATABASE_URL is not set.");
}
const adapter = new PrismaMssql(connectionString);
const prisma = new PrismaClient({ adapter });
type CandidateTable = { table_name: string };
type CandidateColumn = { table_name: string; column_name: string };
type DmmfField = {
name: string;
dbName: string | null;
};
type DmmfModel = {
name: string;
dbName: string | null;
fields: DmmfField[];
};
const TABLE_PATTERN = /config|configur/i;
const VALUE_COLUMN_PATTERN = /value|field|question|token/i;
const TOP_CONFIG_LIMIT = 150;
const CONFIG_KEY_COLUMNS = new Set([
"Config_RecID",
"Configuration_RecID",
"Configuration_RecId",
]);
function byName(a: string, b: string) {
return a.localeCompare(b);
}
try {
const models = Prisma.dmmf.datamodel.models as unknown as DmmfModel[];
const configModels = models
.map((model) => ({
model,
tableName: model.dbName ?? model.name,
}))
.filter(({ tableName }) => TABLE_PATTERN.test(tableName));
const candidateTables: CandidateTable[] = configModels
.map(({ tableName }) => ({ table_name: tableName }))
.sort((a, b) => byName(a.table_name, b.table_name));
const candidateColumns: CandidateColumn[] = configModels
.flatMap(({ model, tableName }) =>
model.fields
.map((field) => field.dbName ?? field.name)
.filter((columnName) => VALUE_COLUMN_PATTERN.test(columnName))
.map((columnName) => ({
table_name: tableName,
column_name: columnName,
}))
)
.sort(
(a, b) =>
byName(a.table_name, b.table_name) ||
byName(a.column_name, b.column_name)
);
const valueTablesWithConfigKey: CandidateTable[] = configModels
.filter(({ model }) => {
const columnNames = model.fields.map(
(field) => field.dbName ?? field.name
);
const hasConfigKey = columnNames.some((column) =>
CONFIG_KEY_COLUMNS.has(column)
);
const hasValueLikeColumn = columnNames.some((column) =>
VALUE_COLUMN_PATTERN.test(column)
);
return hasConfigKey && hasValueLikeColumn;
})
.map(({ tableName }) => ({ table_name: tableName }))
.sort((a, b) => byName(a.table_name, b.table_name));
const [
configRows,
auditRows,
auditValueRows,
nonNullCustomFields,
groupedAuditTokens,
topConfigs,
] = await prisma.$transaction([
prisma.configuration.count(),
prisma.configurationAudit.count(),
prisma.configurationAuditValue.count(),
prisma.configuration.findMany({
where: { customField: { not: null } },
select: { customField: true },
}),
prisma.configurationAuditValue.groupBy({
by: ["auditToken"],
_count: true,
orderBy: [{ _count: { auditToken: "desc" } }, { auditToken: "asc" }],
take: 20,
}),
prisma.configuration.findMany({
take: TOP_CONFIG_LIMIT,
orderBy: { configRecId: "asc" },
include: {
configurationAudits: {
orderBy: { configurationAuditRecId: "asc" },
include: {
configurationValues: {
orderBy: { configurationAuditValueRecId: "asc" },
},
},
},
},
}),
]);
const configCustomFieldNonempty = nonNullCustomFields.reduce((count, row) => {
return row.customField?.trim() ? count + 1 : count;
}, 0);
const rowStats = {
config_rows: configRows,
config_custom_field_nonempty: configCustomFieldNonempty,
audit_rows: auditRows,
audit_value_rows: auditValueRows,
};
const topAuditTokens = groupedAuditTokens.map(({ auditToken, _count }) => ({
audit_token: auditToken,
row_count: _count,
}));
const output = {
candidateTables,
candidateColumns,
valueTablesWithConfigKey,
rowStats,
topAuditTokens,
topConfigs,
};
console.log(JSON.stringify(output, null, 2));
} finally {
await prisma.$disconnect();
}
@@ -1,334 +0,0 @@
import { PrismaMssql } from "@prisma/adapter-mssql";
import { PrismaPg } from "@prisma/adapter-pg";
import { readFileSync } from "node:fs";
import { resolve } from "node:path";
import { PrismaClient as CwPrismaClient } from "./generated/prisma/client";
import { PrismaClient as ApiPrismaClient } from "../api/generated/prisma/client";
type EnvMap = Record<string, string>;
type Summary = {
cwTotal: number;
apiTotal: number;
eligibleForSync: number;
missingSrServiceRecId: number;
missingMemberRecId: number;
missingParentTicketInApi: number;
missingAuthorMappingInApi: number;
eligibleButMissingInApi: number;
sampleMissingParentTicketIds: number[];
sampleMissingAuthorMemberRecIds: number[];
sampleEligibleButMissingNoteIds: number[];
topMissingAuthorMemberRecIds: Array<{ memberRecId: number; count: number }>;
topMissingParentTicketIds: Array<{ srServiceRecId: number; count: number }>;
automateApi: {
total: number;
eligibleForSync: number;
missingSrServiceRecId: number;
missingMemberRecId: number;
missingParentTicketInApi: number;
missingAuthorMappingInApi: number;
eligibleButMissingInApi: number;
sampleMemberRecIds: number[];
sampleNoteIdsMissingInApi: number[];
};
};
const isAutomateApiAuthor = (
createdBy: string | null,
originalAuthor: string | null
): boolean => {
const normalizedCreatedBy = createdBy?.trim().toLowerCase() ?? "";
const normalizedOriginalAuthor = originalAuthor?.trim().toLowerCase() ?? "";
return (
normalizedCreatedBy.includes("automateapi") ||
normalizedOriginalAuthor.includes("automateapi")
);
};
const parseEnvFile = (path: string): EnvMap => {
const envData = readFileSync(path, "utf8");
const out: EnvMap = {};
for (const rawLine of envData.split(/\r?\n/)) {
const line = rawLine.trim();
if (!line || line.startsWith("#")) continue;
const index = line.indexOf("=");
if (index <= 0) continue;
const key = line.slice(0, index).trim();
let value = line.slice(index + 1).trim();
if (
(value.startsWith('"') && value.endsWith('"')) ||
(value.startsWith("'") && value.endsWith("'"))
) {
value = value.slice(1, -1);
}
out[key] = value;
}
return out;
};
const readApiEnv = (): EnvMap => {
const candidates = [
resolve(import.meta.dir, "../api/.env"),
resolve(process.cwd(), "../api/.env"),
resolve(process.cwd(), "api/.env"),
];
for (const candidate of candidates) {
try {
return parseEnvFile(candidate);
} catch {
// Try next
}
}
return {};
};
const main = async (): Promise<void> => {
const apiEnv = readApiEnv();
const cwDatabaseUrl =
process.env.CW_DATABASE_URL ||
process.env.DATABASE_URL ||
apiEnv.CW_DATABASE_URL;
const apiDatabaseUrl =
process.env.API_DATABASE_URL ||
process.env.OPTIMA_API_DATABASE_URL ||
apiEnv.API_DATABASE_URL ||
apiEnv.OPTIMA_API_DATABASE_URL ||
apiEnv.DATABASE_URL;
if (!cwDatabaseUrl) {
throw new Error("Missing CW DB URL. Set CW_DATABASE_URL or DATABASE_URL.");
}
if (!apiDatabaseUrl) {
throw new Error(
"Missing API DB URL. Set API_DATABASE_URL/OPTIMA_API_DATABASE_URL or provide api/.env DATABASE_URL."
);
}
const cwPrisma = new CwPrismaClient({
adapter: new PrismaMssql(cwDatabaseUrl),
});
const apiPrisma = new ApiPrismaClient({
adapter: new PrismaPg({ connectionString: apiDatabaseUrl }),
});
try {
console.log("[diag] Loading API reference sets...");
const [apiNotes, apiTickets, apiUsers] = await Promise.all([
apiPrisma.serviceTicketNote.findMany({ select: { id: true } }),
apiPrisma.serviceTicket.findMany({ select: { id: true } }),
apiPrisma.user.findMany({ select: { cwMemberId: true } }),
]);
const apiNoteIds = new Set<number>(apiNotes.map((r) => r.id));
const apiTicketIds = new Set<number>(apiTickets.map((r) => r.id));
const apiUserMemberIds = new Set<number>(
apiUsers
.map((r) => r.cwMemberId)
.filter((v): v is number => Number.isInteger(v))
);
console.log(
`[diag] API sets: notes=${apiNoteIds.size} tickets=${apiTicketIds.size} usersWithCwMemberId=${apiUserMemberIds.size}`
);
const cwTotal = await cwPrisma.ticketNote.count();
const apiTotal = apiNoteIds.size;
let missingSrServiceRecId = 0;
let missingMemberRecId = 0;
let missingParentTicketInApi = 0;
let missingAuthorMappingInApi = 0;
let eligibleForSync = 0;
let eligibleButMissingInApi = 0;
let automateTotal = 0;
let automateEligibleForSync = 0;
let automateMissingSrServiceRecId = 0;
let automateMissingMemberRecId = 0;
let automateMissingParentTicketInApi = 0;
let automateMissingAuthorMappingInApi = 0;
let automateEligibleButMissingInApi = 0;
const sampleMissingParentTicketIds: number[] = [];
const sampleMissingAuthorMemberRecIds: number[] = [];
const sampleEligibleButMissingNoteIds: number[] = [];
const automateSampleMemberRecIds: number[] = [];
const automateSampleNoteIdsMissingInApi: number[] = [];
const missingAuthorCounts = new Map<number, number>();
const missingParentCounts = new Map<number, number>();
let cursor = 0;
const batchSize = 5000;
console.log(
`[diag] Scanning CW TicketNote rows in batches of ${batchSize}...`
);
while (true) {
const batch = await cwPrisma.ticketNote.findMany({
where: {
ticketNoteRecId: {
gt: cursor,
},
},
orderBy: {
ticketNoteRecId: "asc",
},
select: {
ticketNoteRecId: true,
srServiceRecId: true,
memberRecId: true,
createdBy: true,
originalAuthor: true,
},
take: batchSize,
});
if (batch.length === 0) break;
for (const row of batch) {
const noteId = row.ticketNoteRecId;
const srServiceRecId = row.srServiceRecId;
const memberRecId = row.memberRecId;
const isAutomate = isAutomateApiAuthor(
row.createdBy,
row.originalAuthor
);
if (isAutomate) {
automateTotal++;
if (
memberRecId &&
automateSampleMemberRecIds.length < 20 &&
!automateSampleMemberRecIds.includes(memberRecId)
) {
automateSampleMemberRecIds.push(memberRecId);
}
}
let blocked = false;
if (!srServiceRecId) {
missingSrServiceRecId++;
if (isAutomate) {
automateMissingSrServiceRecId++;
}
blocked = true;
} else if (!apiTicketIds.has(srServiceRecId)) {
missingParentTicketInApi++;
if (isAutomate) {
automateMissingParentTicketInApi++;
}
blocked = true;
missingParentCounts.set(
srServiceRecId,
(missingParentCounts.get(srServiceRecId) ?? 0) + 1
);
if (sampleMissingParentTicketIds.length < 20) {
sampleMissingParentTicketIds.push(srServiceRecId);
}
}
if (!memberRecId) {
missingMemberRecId++;
if (isAutomate) {
automateMissingMemberRecId++;
}
blocked = true;
} else if (!apiUserMemberIds.has(memberRecId)) {
missingAuthorMappingInApi++;
if (isAutomate) {
automateMissingAuthorMappingInApi++;
}
blocked = true;
missingAuthorCounts.set(
memberRecId,
(missingAuthorCounts.get(memberRecId) ?? 0) + 1
);
if (sampleMissingAuthorMemberRecIds.length < 20) {
sampleMissingAuthorMemberRecIds.push(memberRecId);
}
}
if (!blocked) {
eligibleForSync++;
if (isAutomate) {
automateEligibleForSync++;
}
if (!apiNoteIds.has(noteId)) {
eligibleButMissingInApi++;
if (isAutomate) {
automateEligibleButMissingInApi++;
if (automateSampleNoteIdsMissingInApi.length < 20) {
automateSampleNoteIdsMissingInApi.push(noteId);
}
}
if (sampleEligibleButMissingNoteIds.length < 50) {
sampleEligibleButMissingNoteIds.push(noteId);
}
}
}
}
cursor = batch[batch.length - 1]!.ticketNoteRecId;
if (cursor % 50000 < batchSize) {
console.log(`[diag] Progress cursor=${cursor}`);
}
}
const summary: Summary = {
cwTotal,
apiTotal,
eligibleForSync,
missingSrServiceRecId,
missingMemberRecId,
missingParentTicketInApi,
missingAuthorMappingInApi,
eligibleButMissingInApi,
sampleMissingParentTicketIds,
sampleMissingAuthorMemberRecIds,
sampleEligibleButMissingNoteIds,
topMissingAuthorMemberRecIds: [...missingAuthorCounts.entries()]
.sort((a, b) => b[1] - a[1])
.slice(0, 20)
.map(([memberRecId, count]) => ({ memberRecId, count })),
topMissingParentTicketIds: [...missingParentCounts.entries()]
.sort((a, b) => b[1] - a[1])
.slice(0, 20)
.map(([srServiceRecId, count]) => ({ srServiceRecId, count })),
automateApi: {
total: automateTotal,
eligibleForSync: automateEligibleForSync,
missingSrServiceRecId: automateMissingSrServiceRecId,
missingMemberRecId: automateMissingMemberRecId,
missingParentTicketInApi: automateMissingParentTicketInApi,
missingAuthorMappingInApi: automateMissingAuthorMappingInApi,
eligibleButMissingInApi: automateEligibleButMissingInApi,
sampleMemberRecIds: automateSampleMemberRecIds,
sampleNoteIdsMissingInApi: automateSampleNoteIdsMissingInApi,
},
};
console.log("[diag] TicketNote sync gap summary:");
console.log(JSON.stringify(summary, null, 2));
} finally {
await Promise.all([cwPrisma.$disconnect(), apiPrisma.$disconnect()]);
}
};
main().catch((error) => {
console.error("[diag] Failed:", error);
process.exit(1);
});