Compare commits

...

8 Commits

Author SHA1 Message Date
HoloPanio 32bba31e72 fix(dalpuri): populate locationId and fix closedFlag on opportunities
- Add ownerLevelRecId -> locationId mapping to opportunity translation
- Include soOppStatus in opportunity query and derive closedFlag from
  status.closedFlag (with fallback to legacy oldCloseFlag field)
- Add locationId sanitization guard in both sync.ts and sync-by-table.ts

Note: departmentId is not available in CW SO_Opportunity table and
remains null for synced records.
2026-04-09 00:22:41 +00:00
HoloPanio 1233535b20 fix(dalpuri): populate userIdentifiersByMemberRecId from CwMember table
When no User accounts have cwMemberId linked, the context map was empty
and all opportunities got primarySalesRepId = null. Now also populate
the map from CwMember rows directly (User-linked entries take precedence),
so rep identifiers resolve correctly regardless of user account linkage.
2026-04-08 23:23:51 +00:00
HoloPanio 2c737b22f1 fix(dalpuri): exit(0) after sync completes to release k8s job
Prisma MSSQL adapter keeps connections open after the sync finishes,
preventing the process from exiting naturally. The k8s job was staying
in Running state indefinitely. Call process.exit(0) on success so the
job completes and the GH workflow step passes.
2026-04-08 21:50:52 +00:00
HoloPanio a3bfe9f374 fix(ci): increase dalpuri sync timeout from 30min to 2h
Full initial sync has 500k+ rows across all tables and exceeded the
30-minute activeDeadlineSeconds. Bump both the k8s job deadline and
the kubectl wait timeout to 7200s (2 hours).
2026-04-08 21:19:43 +00:00
HoloPanio a106bb15a8 fix(ci): explicit env vars in dalpuri sync job; add CW_DATABASE_URL to secret
envFrom was loading api-env-secret but CW_DATABASE_URL was absent from the
deployed secret, causing sync.ts to fall back to DATABASE_URL (Postgres) as
the MSSQL connection string -> 'Invalid port number: //optima'.

- Replaced envFrom with explicit CW_DATABASE_URL and API_DATABASE_URL env
  entries so the mapping is unambiguous
- Patched api-env-secret in cluster to add CW_DATABASE_URL
2026-04-08 20:41:49 +00:00
HoloPanio d9a431d99a fix(ci): sync-cw-to-api must wait for migrate-api to complete
Migration must finish before sync runs so the schema exists.
2026-04-08 20:27:05 +00:00
HoloPanio 83377a7d0d feat(ci): run dalpuri CW-to-API sync as a k8s Job before deploy
The CW MSSQL and API Postgres addresses are internal to the cluster and
unreachable from GitHub-hosted runners, so the sync must run inside k8s.

- Add dalpuri-sync Docker stage to api/Dockerfile: installs deps,
  generates both Prisma clients, and runs dalpuri/src/sync.ts
- Add dalpuri/kubernetes/sync-job.yaml: mounts api-env-secret (which
  already contains CW_DATABASE_URL) and maps DATABASE_URL -> API_DATABASE_URL
- build-api job now also pushes optima-dalpuri-sync:TAG image
- sync-cw-to-api CI job replaced with kubectl apply/wait pattern,
  needs [build-api, build-worker], blocks deploy-api and deploy-worker
2026-04-08 20:19:06 +00:00
HoloPanio a81618007c fix(worker): pass socket to enqueueDalpuriFullSync
The socket retrieved from ensureManagerSocketReady() was never passed to
enqueueDalpuriFullSync(), so inside createWorkerJob the socket.emit('requestId')
call crashed with 'TypeError: undefined is not an object (evaluating A.emit)'.

This caused every full sync job to fail immediately, leaving the DB empty.
The 5s incremental sync interval then flooded the queue with 4700+ jobs that
all failed too since there was no data.

Also manually cleared the backlog of 4720 failed/pending incremental jobs and
2 failed full sync jobs from the production queue.
2026-04-08 19:34:33 +00:00
7 changed files with 200 additions and 9 deletions
+62 -2
View File
@@ -130,6 +130,17 @@ jobs:
ghcr.io/horizonstacksoftware/optima-api-migrate:latest ghcr.io/horizonstacksoftware/optima-api-migrate:latest
ghcr.io/horizonstacksoftware/optima-api-migrate:${{ github.event.release.tag_name }} ghcr.io/horizonstacksoftware/optima-api-migrate:${{ github.event.release.tag_name }}
- name: Build and push the dalpuri sync image
uses: docker/build-push-action@v6
with:
context: .
file: api/Dockerfile
push: true
target: dalpuri-sync
tags: |
ghcr.io/horizonstacksoftware/optima-dalpuri-sync:latest
ghcr.io/horizonstacksoftware/optima-dalpuri-sync:${{ github.event.release.tag_name }}
build-worker: build-worker:
name: Build - Worker name: Build - Worker
needs: [test-api, test-dalpuri, test-ui] needs: [test-api, test-dalpuri, test-ui]
@@ -276,6 +287,55 @@ jobs:
files: | files: |
ui/out/make/**/*.exe ui/out/make/**/*.exe
# Runs a full CW → API data sync as a Kubernetes Job (the CW MSSQL and
# API Postgres addresses are internal to the cluster and unreachable from
# GitHub-hosted runners). Waits for both images to be built first and
# must succeed before either the API or worker deploys.
sync-cw-to-api:
name: Sync - CW to API
needs: [migrate-api, build-worker]
runs-on: ubuntu-latest
steps:
- name: Set the Kubernetes context
uses: azure/k8s-set-context@v2
with:
method: kubeconfig
kubeconfig: ${{ secrets.KUBECONFIG }}
- name: Checkout source code
uses: actions/checkout@v4
- name: Delete previous sync job if exists
run: kubectl delete job -n optima -l app=dalpuri-sync --ignore-not-found
- name: Apply sync job
run: |
TAG=${{ github.event.release.tag_name }}
sed "s/RELEASE_TAG/${TAG}/g" dalpuri/kubernetes/sync-job.yaml | kubectl apply -f -
- name: Wait for sync to complete
run: |
TAG=${{ github.event.release.tag_name }}
JOB="job/dalpuri-sync-${TAG}"
kubectl wait --for=condition=complete --timeout=7200s -n optima "$JOB" &
WAIT_COMPLETE=$!
kubectl wait --for=condition=failed --timeout=7200s -n optima "$JOB" &
WAIT_FAILED=$!
wait -n $WAIT_COMPLETE $WAIT_FAILED
echo "--- Sync job logs ---"
kubectl logs -n optima "$JOB" --tail=500 || true
if kubectl get -n optima "$JOB" -o jsonpath='{.status.conditions[?(@.type=="Complete")].status}' | grep -q "True"; then
echo "Sync completed successfully."
exit 0
else
echo "Sync FAILED."
exit 1
fi
# ========================================================================== # ==========================================================================
# Deploy jobs # Deploy jobs
# ========================================================================== # ==========================================================================
@@ -332,7 +392,7 @@ jobs:
deploy-api: deploy-api:
name: Deploy - API name: Deploy - API
needs: [migrate-api] needs: [migrate-api, sync-cw-to-api]
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Set the Kubernetes context - name: Set the Kubernetes context
@@ -402,7 +462,7 @@ jobs:
deploy-worker: deploy-worker:
name: Deploy - Worker name: Deploy - Worker
needs: [build-worker] needs: [build-worker, sync-cw-to-api]
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Set the Kubernetes context - name: Set the Kubernetes context
+30
View File
@@ -134,3 +134,33 @@ RUN chmod +x /app/api/prisma/migrate-entrypoint.sh
WORKDIR /app/api WORKDIR /app/api
CMD ["sh", "prisma/migrate-entrypoint.sh"] CMD ["sh", "prisma/migrate-entrypoint.sh"]
# ---- Stage 7: Dalpuri CW-to-API sync runner ----
FROM oven/bun:1.3.11 AS dalpuri-sync
WORKDIR /app
COPY package.json bun.lock ./
COPY api/package.json ./api/package.json
COPY dalpuri/package.json ./dalpuri/package.json
COPY ui/package.json ./ui/package.json
COPY patches ./patches
RUN bun install --frozen-lockfile
COPY dalpuri/src/ ./dalpuri/src/
COPY dalpuri/prisma/ ./dalpuri/prisma/
COPY dalpuri/prisma.config.ts ./dalpuri/prisma.config.ts
COPY api/prisma/ ./api/prisma/
COPY api/prisma.config.ts ./api/prisma.config.ts
WORKDIR /app/dalpuri
RUN DATABASE_URL="sqlserver://localhost:1433;database=dummy;user=dummy;password=dummy;trustServerCertificate=true" \
bunx prisma generate
WORKDIR /app/api
RUN DATABASE_URL="postgresql://dummy:dummy@localhost:5432/dummy" bunx prisma generate
WORKDIR /app/dalpuri
CMD ["bun", "run", "src/sync.ts"]
+1 -1
View File
@@ -165,7 +165,7 @@ if (import.meta.main) {
const { executeIncrementalSync } = await import("./modules/workers/dalpuri-sync"); const { executeIncrementalSync } = await import("./modules/workers/dalpuri-sync");
await boss.work(WorkerQueue.DALPURI_FULL_SYNC, async () => { await boss.work(WorkerQueue.DALPURI_FULL_SYNC, async () => {
const socket = await ensureManagerSocketReady(); const socket = await ensureManagerSocketReady();
await enqueueDalpuriFullSync(); await enqueueDalpuriFullSync(socket);
}); });
console.log("[worker] Registered DALPURI_FULL_SYNC job handler"); console.log("[worker] Registered DALPURI_FULL_SYNC job handler");
+33
View File
@@ -0,0 +1,33 @@
apiVersion: batch/v1
kind: Job
metadata:
name: dalpuri-sync-RELEASE_TAG
namespace: optima
labels:
app: dalpuri-sync
spec:
backoffLimit: 0
ttlSecondsAfterFinished: 86400
activeDeadlineSeconds: 7200
template:
metadata:
labels:
app: dalpuri-sync
spec:
containers:
- name: sync
image: ghcr.io/horizonstacksoftware/optima-dalpuri-sync:RELEASE_TAG
env:
- name: CW_DATABASE_URL
valueFrom:
secretKeyRef:
name: api-env-secret
key: CW_DATABASE_URL
- name: API_DATABASE_URL
valueFrom:
secretKeyRef:
name: api-env-secret
key: DATABASE_URL
restartPolicy: Never
imagePullSecrets:
- name: github-container-registry
+27
View File
@@ -294,6 +294,22 @@ const refreshContextFromApi = async (
} }
} }
const cwMembers = await apiPrisma.cwMember.findMany({
select: { cwMemberId: true, identifier: true },
});
for (const member of cwMembers) {
if (
member.cwMemberId != null &&
member.identifier &&
!context.userIdentifiersByMemberRecId.has(member.cwMemberId)
) {
context.userIdentifiersByMemberRecId.set(
member.cwMemberId,
member.identifier
);
}
}
for (const board of boards) { for (const board of boards) {
context.serviceTicketBoardUidsById.set(board.id, board.uid); context.serviceTicketBoardUidsById.set(board.id, board.uid);
} }
@@ -426,6 +442,12 @@ const sanitizeModelData = (
) { ) {
sanitized.statusId = null; sanitized.statusId = null;
} }
if (
sanitized.locationId != null &&
!context.corporateLocationIds.has(sanitized.locationId as number)
) {
sanitized.locationId = null;
}
} }
if (targetModel === "schedule") { if (targetModel === "schedule") {
@@ -734,6 +756,11 @@ const getConfigForTable = (table: string): SyncTableConfig | null => {
secondarySalesFlag: true, secondarySalesFlag: true,
}, },
}, },
soOppStatus: {
select: {
closedFlag: true,
},
},
}, },
}, },
}, },
+36 -4
View File
@@ -323,6 +323,22 @@ const refreshContextFromApi = async (
} }
} }
const cwMembers = await apiPrisma.cwMember.findMany({
select: { cwMemberId: true, identifier: true },
});
for (const member of cwMembers) {
if (
member.cwMemberId != null &&
member.identifier &&
!context.userIdentifiersByMemberRecId.has(member.cwMemberId)
) {
context.userIdentifiersByMemberRecId.set(
member.cwMemberId,
member.identifier
);
}
}
for (const board of boards) { for (const board of boards) {
context.serviceTicketBoardUidsById.set(board.id, board.uid); context.serviceTicketBoardUidsById.set(board.id, board.uid);
} }
@@ -636,6 +652,13 @@ const sanitizeModelData = (
) { ) {
sanitized.stageId = null; sanitized.stageId = null;
} }
// Nullify locationId if the corporate location doesn't exist
if (
sanitized.locationId != null &&
!context.corporateLocationIds.has(sanitized.locationId as number)
) {
sanitized.locationId = null;
}
// Nullify taxCodeId if the tax code hasn't synced yet // Nullify taxCodeId if the tax code hasn't synced yet
if ( if (
sanitized.taxCodeId != null && sanitized.taxCodeId != null &&
@@ -1585,6 +1608,11 @@ export const executeFullDalpuriSync = async (options?: {
secondarySalesFlag: true, secondarySalesFlag: true,
}, },
}, },
soOppStatus: {
select: {
closedFlag: true,
},
},
}, },
}, },
}, },
@@ -1860,8 +1888,12 @@ export const executeForcedIncrementalDalpuriSync = async (options?: {
}; };
if (import.meta.main) { if (import.meta.main) {
executeFullDalpuriSync().catch((error) => { executeFullDalpuriSync()
console.error("CW -> API sync failed:", error); .then(() => {
process.exit(1); process.exit(0);
}); })
.catch((error) => {
console.error("CW -> API sync failed:", error);
process.exit(1);
});
} }
+10 -1
View File
@@ -1,6 +1,7 @@
import { import {
Opportunity as CwOpportunity, Opportunity as CwOpportunity,
OpportunityMember as CwOpportunityMember, OpportunityMember as CwOpportunityMember,
SoOppStatus as CwSoOppStatus,
} from "../../generated/prisma/client"; } from "../../generated/prisma/client";
import { OpportunityInterest } from "../../../api/generated/prisma/client"; import { OpportunityInterest } from "../../../api/generated/prisma/client";
import { Translation, skipRow } from "./types"; import { Translation, skipRow } from "./types";
@@ -30,6 +31,7 @@ type ApiOpportunityRecord = {
dateBecameLead?: Date | null; dateBecameLead?: Date | null;
closedDate?: Date | null; closedDate?: Date | null;
closedFlag: boolean; closedFlag: boolean;
locationId?: number | null;
closedById?: string | null; closedById?: string | null;
updatedBy: string; updatedBy: string;
eneteredBy: string; eneteredBy: string;
@@ -42,6 +44,7 @@ type CwOpportunityWithMembers = CwOpportunity & {
CwOpportunityMember, CwOpportunityMember,
"memberRecId" | "primarySalesFlag" | "secondarySalesFlag" "memberRecId" | "primarySalesFlag" | "secondarySalesFlag"
>[]; >[];
soOppStatus?: Pick<CwSoOppStatus, "closedFlag"> | null;
}; };
const toInterest = (value: number | null): OpportunityInterest | null => { const toInterest = (value: number | null): OpportunityInterest | null => {
@@ -119,13 +122,19 @@ export const opportunityTranslation: Translation<
}, },
{ from: "companyRecId", to: "companyId" }, { from: "companyRecId", to: "companyId" },
{ from: "contactRecId", to: "contactId" }, { from: "contactRecId", to: "contactId" },
{ from: "ownerLevelRecId", to: "locationId" },
{ from: "companyAddressRecId", to: "siteId" }, { from: "companyAddressRecId", to: "siteId" },
{ from: "poNumber", to: "customerPO" }, { from: "poNumber", to: "customerPO" },
{ from: "dateCloseExpected", to: "expectedCloseDate" }, { from: "dateCloseExpected", to: "expectedCloseDate" },
{ from: "datePipelineChange", to: "pipelineChangeDate" }, { from: "datePipelineChange", to: "pipelineChangeDate" },
{ from: "dateBecameLead", to: "dateBecameLead" }, { from: "dateBecameLead", to: "dateBecameLead" },
{ from: "dateClosed", to: "closedDate" }, { from: "dateClosed", to: "closedDate" },
{ from: "oldCloseFlag", to: "closedFlag" }, {
from: "oldCloseFlag",
to: "closedFlag",
process: (_value, _context, row) =>
row.soOppStatus?.closedFlag ?? row.oldCloseFlag ?? false,
},
{ from: "closedBy", to: "closedById" }, { from: "closedBy", to: "closedById" },
{ {
from: "updatedBy", from: "updatedBy",