fix: refactor paymentStatus to improve error handling and response structure
Deploy Application / deploy (push) Successful in 38s Details

This commit is contained in:
faisolavolut 2025-11-11 16:00:50 +07:00
parent a19933a06e
commit e09b2517e1
2 changed files with 135 additions and 98 deletions

View File

@ -82,6 +82,119 @@ function mapRowToRvOpenitem(row: any) {
}; };
} }
// Function to process a single database sync
async function processDatabaseSync(database: any) {
const pool = await getPoolForDbId(database.db_id);
const q = "SELECT * FROM rv_openitem";
// Set timeout to 1 hour for sync operations
const client = await pool.connect();
let result;
try {
result = await client.query(q);
} finally {
client.release();
}
const rows = result.rows || [];
console.log(
`Syncing rv_openitem for database ${database.name} (${database.db_id}): fetched ${rows.length} rows`
);
const freshDB = await db.rv_openitem.count({
where: { db_id: database.db_id },
});
let dbInserted = 0;
let dbUpdated = 0;
const batchSize = 200;
// Process in batches to avoid memory issues
for (let i = 0; i < rows.length; i += batchSize) {
const batch = rows.slice(i, i + batchSize);
const ops = batch.map((r: any) => mapRowToRvOpenitem(r));
let transactions = [];
if (freshDB === 0) {
// Fresh DB - just insert all records
transactions = ops.map((op) =>
db.rv_openitem.create({
data: { ...op, db_id: database.db_id },
})
);
dbInserted += ops.length;
} else {
// Check existing records in bulk instead of one by one
const invoiceIds = ops.map((op) => op.c_invoice_id).filter(Boolean);
const existingRecords = await db.rv_openitem.findMany({
where: {
c_invoice_id: { in: invoiceIds },
db_id: database.db_id,
},
select: { id: true, c_invoice_id: true },
});
const existingMap = new Map(
existingRecords.map((record) => [record.c_invoice_id, record.id])
);
for (const op of ops) {
const existingId = existingMap.get(op.c_invoice_id);
if (existingId) {
transactions.push(
db.rv_openitem.update({
where: { id: existingId },
data: op,
})
);
dbUpdated++;
} else {
transactions.push(
db.rv_openitem.create({
data: { ...op, db_id: database.db_id },
})
);
dbInserted++;
}
}
}
// Execute batch transaction
if (transactions.length > 0) {
await db.$transaction(transactions);
}
}
// Cleanup: mark local records as not paid if they no longer exist in source
if (freshDB > 0) {
const existingIds = rows
.map((r: any) => r.c_invoice_id)
.filter((v: any) => v !== undefined && v !== null);
if (existingIds.length > 0) {
const toUpdate = await db.rv_openitem.findMany({
where: {
db_id: database.db_id,
c_invoice_id: { notIn: existingIds },
},
});
if (toUpdate.length > 0) {
const updateTransactions = toUpdate.map((item) =>
db.rv_openitem.update({
where: { id: item.id },
data: { is_pay: "N" },
})
);
await db.$transaction(updateTransactions);
}
}
}
return { inserted: dbInserted, updated: dbUpdated };
}
export default class SyncController { export default class SyncController {
static async syncRvOpenitem(req: Request, res: Response) { static async syncRvOpenitem(req: Request, res: Response) {
try { try {
@ -90,86 +203,21 @@ export default class SyncController {
is_synced: "Y", is_synced: "Y",
}, },
}); });
// sinkron per db satu per satu (each database work is wrapped in a transaction)
let totalInserted = 0;
let totalUpdated = 0;
for (const database of databases) {
const pool = await getPoolForDbId(database.db_id);
const q = "SELECT * FROM rv_openitem";
// Set timeout to 1 hour for sync operations
const client = await pool.connect();
let result;
try {
result = await client.query(q);
} finally {
client.release();
}
const rows = result.rows || [];
const freshDB = await db.rv_openitem.count({
where: { db_id: database.db_id },
});
// perform upserts in smaller transactions (per-batch) to avoid long-lived transactions
const batchSize = 200;
const transactions = [];
for (let i = 0; i < rows.length; i += batchSize) {
const batch = rows.slice(i, i + batchSize);
const ops = batch.map((r: any) => mapRowToRvOpenitem(r));
for (const op of ops) {
if (freshDB === 0) {
transactions.push(
db.rv_openitem.create({
data: { ...op, db_id: database.db_id },
})
);
totalInserted++;
} else {
const existing = await db.rv_openitem.findFirst({
where: { c_invoice_id: op.c_invoice_id },
});
if (existing) {
transactions.push(
db.rv_openitem.update({
where: { id: existing.id },
data: op,
})
);
totalUpdated++;
} else {
transactions.push(
db.rv_openitem.create({
data: { ...op, db_id: database.db_id },
})
);
totalInserted++;
}
}
}
}
// cleanup: mark local records as not paid if they no longer exist in source // Process all databases in parallel using Promise.all
if (freshDB > 0) { const results = await Promise.all(
const existingIds = rows databases.map((database) => processDatabaseSync(database))
.map((r: any) => r.c_invoice_id) );
.filter((v: any) => v !== undefined && v !== null);
if (existingIds.length > 0) { // Sum up results from all databases
const toUpdate = await db.rv_openitem.findMany({ const totalInserted = results.reduce(
where: { (sum, result) => sum + result.inserted,
db_id: database.db_id, 0
c_invoice_id: { notIn: existingIds }, );
}, const totalUpdated = results.reduce(
}); (sum, result) => sum + result.updated,
for (const item of toUpdate) { 0
transactions.push( );
db.rv_openitem.update({
where: { id: item.id },
data: { is_pay: "N" },
})
);
}
}
}
if (transactions.length > 0) await db.$transaction(transactions);
}
return res.json({ return res.json({
status: "success", status: "success",

View File

@ -446,11 +446,9 @@ export async function paymentStatus(req: Request, res: Response) {
const where: any = {}; const where: any = {};
if (startdate || enddate) { if (startdate || enddate) {
if (!startdate || !enddate) { if (!startdate || !enddate) {
return res return res.status(400).json({
.status(400) error: "startdate and enddate are required when filtering by date",
.json({ });
error: "startdate and enddate are required when filtering by date",
});
} }
where.created_at = {} as any; where.created_at = {} as any;
if (startdate) { if (startdate) {
@ -489,21 +487,12 @@ export async function paymentStatus(req: Request, res: Response) {
include: { transactions: true }, include: { transactions: true },
orderBy: { created_at: "desc" }, orderBy: { created_at: "desc" },
}); });
if (rows.length === 0) {
return res
.status(404)
.json({ responseCode: "404", responseMessage: "no payment found" });
}
return res.json({ count: rows.length, data: rows }); return res.json({ count: rows.length, data: rows });
const result = rows.map((r: any) => ({
id: r.id,
transaction_id: r.transaction_id,
amount: r.amount,
documentno: r.documentno,
c_invoice_id: r.c_invoice_id,
created_at: r.created_at,
tenant_id: r.tenant_id,
transaction_status: r.transactions ? r.transactions.status : null,
transaction_reference: r.transactions ? r.transactions.referenceNo : null,
}));
return res.json({ count: result.length, data: result });
} catch (err) { } catch (err) {
console.error("paymentStatus error", err); console.error("paymentStatus error", err);
return res.status(500).json({ return res.status(500).json({