I hope you are doing well! I have developed map reduce script to send dataset results as a csv file using map reduce script. If there is huge data, script exceeds usgae limit in the reduce stage. I appreciate if any solution on this. Thank you!
/**
*/ define(['N/email', 'N/file', 'N/runtime', 'N/dataset', 'N/log', 'N/search', 'N/compress'], function (email, file, runtime, dataset, log, search, compress) {
function getInputData() {
try {
let today = new Date();
let firstWorkingDay = getFirstWorkingDay(today.getFullYear(), today.getMonth() + 1);
// Exit if today is NOT the first working day
if (today.toDateString() !== firstWorkingDay.toDateString()) {
log.audit('Not First Working Day', `Today: ${today.toDateString()}, First Working Day: ${firstWorkingDay.toDateString()}`);
return;
}
let datasetObj = dataset.load({ id: 'custdataset70' });
let pagedData = datasetObj.runPaged({ pageSize: 1000 });
let allResults = [];
pagedData.pageRanges.forEach(function (pageRange) {
let currentPage = pagedData.fetch({ index: pageRange.index });
currentPage.data.results.forEach(function (result) {
allResults.push({
rowValues: result.values,
datasetColumns: datasetObj.columns.map(col => ({ id: col.id, alias: col.alias }))
});
});
});
log.audit("Dataset Results Count", allResults.length);
return allResults;
} catch (error) {
log.error('Error in getInputData', error);
return [];
}
}
function map(context) {
try {
let data = JSON.parse(context.value);
let rowValues = data.rowValues;
let datasetColumns = data.datasetColumns;
let poNumberIndex = -1, poStatusIndex = -1, billStatusIndex = -1, currencyIndex = -1;
let customerIndex = -1, verticalIndex = -1, subproductIndex = -1, subsidiaryIndex = -1;
//Get column indexes
datasetColumns.forEach((col, index) => {
if (col.alias === 'tranid') poNumberIndex = index;
if (col.alias === 'status') poStatusIndex = index;
if (col.alias === 'status_1') billStatusIndex = index;
if (col.alias === 'currency') currencyIndex = index;
if (col.alias === 'entity') customerIndex = index;
if (col.alias === 'cseg_vertical') verticalIndex = index;
if (col.alias === 'custbody_subdepartment') subproductIndex = index;
if (col.alias === 'subsidiary') subsidiaryIndex = index;
});
if (poNumberIndex === -1 || poStatusIndex === -1 || billStatusIndex === -1 || currencyIndex === -1 || customerIndex === -1 ||verticalIndex === -1 || subproductIndex === -1 || subsidiaryIndex === -1) {
log.error('Error', 'column not found in dataset.');
return;
}
// Replace bill status
if (billStatusIndex !== -1) {
if (rowValues[billStatusIndex] === 'A') {
rowValues[billStatusIndex] = 'Bill: Open';
} else if (rowValues[billStatusIndex] === 'B') {
rowValues[billStatusIndex] = 'Bill: Paid In Full';
}
else if (rowValues[billStatusIndex] === 'C') {
rowValues[billStatusIndex] = 'Item Fulfillment : Shipped';
}
else if (rowValues[billStatusIndex] === 'Y') {
rowValues[billStatusIndex] = 'Item Receipt : Undefined';
}
}
// Replace Purchase Order status
if (poStatusIndex !== -1) {
if (rowValues[poStatusIndex] === 'A') {
rowValues[poStatusIndex] = 'Purchase Order: Pending Supervisor Approval';
} else if (rowValues[poStatusIndex] === 'B') {
rowValues[poStatusIndex] = 'Purchase Order: Pending Receipt';
}
else if (rowValues[poStatusIndex] === 'C') {
rowValues[poStatusIndex] = 'Purchase Order : Rejected by Supervisor';
}
else if (rowValues[poStatusIndex] === 'D') {
rowValues[poStatusIndex] = 'Purchase Order : Partially Received';
}
else if (rowValues[poStatusIndex] === 'E') {
rowValues[poStatusIndex] = 'Purchase Order : Pending Billing/Partially Received';
}
else if (rowValues[poStatusIndex] === 'F') {
rowValues[poStatusIndex] = 'Purchase Order : Pending Bill';
}
else if (rowValues[poStatusIndex] === 'G') {
rowValues[poStatusIndex] = 'Purchase Order : Fully Billed';
}
else if (rowValues[poStatusIndex] === 'H') {
rowValues[poStatusIndex] = 'Purchase Order : Closed';
}
}
if(rowValues[customerIndex])
{
//Customer lookup
var customerLookup = search.lookupFields({
type: search.Type.CUSTOMER,
id: rowValues[customerIndex],
columns: ['entityid', 'altname']
});
var cEntityid = customerLookup.entityid;
var cAltName = customerLookup.altname;
var concatnatecustomer = cEntityid+ ' ' +cAltName;
}
// Lookup
rowValues[customerIndex] = concatnatecustomer;
rowValues[verticalIndex] = lookupField('customrecord_cseg_vertical', rowValues[verticalIndex]);
rowValues[subproductIndex] = lookupField('customrecord_subdepartment', rowValues[subproductIndex]);
rowValues[subsidiaryIndex] = lookupField('subsidiary', rowValues[subsidiaryIndex]);
rowValues[currencyIndex] = lookupField('currency', rowValues[currencyIndex]);
let csvRow = rowValues.map(value => {
if (typeof value === 'string' && value.includes(',')) {
return `"${value.replace(/"/g, '""')}"`;
}
return value;
}).join(',');
context.write({ key: csvRow, value: csvRow });
} catch (error) {
log.error('Error in map stage', JSON.stringify(error));
}
}
function reduce(context) {
try {
let uniqueRow = context.values[0]; // Get the first (and only) unique row
context.write({ key: context.key, value: uniqueRow });
} catch (error) {
log.error('Error in reduce stage', JSON.stringify(error));
}
}
function summarize(summary) {
try {
let scriptObj = runtime.getCurrentScript();
let recipientEmailsString = scriptObj.getParameter({ name: 'custscript_recepient_email_address' });
if (!recipientEmailsString) {
log.error('Email Error', 'Recipient emails not provided in script parameter.');
return;
}
let recipientEmails = recipientEmailsString.split(',');
let recipientEmailsArray = [];
for (let i = 0; i < recipientEmails.length; i++) {
recipientEmailsArray.push(recipientEmails[i].trim());
}
log.audit("recipientEmailsArray",recipientEmailsArray);
let csvContent = [];
let datasetObj = dataset.load({ id: 'custdataset70' });
let headers = datasetObj.columns.map(col => col.label || col.id);
csvContent.push(headers);
let today = new Date(new Date().toLocaleString("en-US", { timeZone: "Australia/Sydney" }));
let isoString = today.toISOString(); // Get ISO 8601 string
let fileName = 'Vendor Spend Report_' + isoString.replace(/:/g, '_') + '.csv';
let fileObj = file.create({
name: fileName,
fileType: file.Type.CSV,
contents: '',
isOnline: true,
folder: 5081389
});
fileObj.appendLine({
value: headers
});
summary.output.iterator().each((key, value) => {
fileObj.appendLine({
value: value
});
return true;
});
var archiver = compress.createArchiver();
archiver.add({
file: fileObj
});
var zipFile = archiver.archive({
name: `Vendor Spend Report File`+'_'+new Date(new Date().toLocaleString("en-US", { timeZone: "Australia/Sydney" })).toJSON() + '.zip',
});
zipFile.folder = 5081389;
var fileId = zipFile.save();
if (fileId) {
sendEmail(fileId,recipientEmailsArray);
}
log.audit('Success', 'Email sent with dataset results.');
log.audit('Remaining governance units', runtime.getCurrentScript().getRemainingUsage());
} catch (error) {
log.error('Error in summarize stage', error);
}
}
function sendEmail(fileId,recipientEmailsArray) {
let emailBody;
try {
if (!fileId) {
log.audit('Email Skipped', 'No file generated.');
return;
}
let lastDayOfPreviousMonth = getLastDayOfPreviousMonth("Australia/Sydney");
if (lastDayOfPreviousMonth) {
// Format dates for the email body
let startDate = '1 January 2022';
let endDate = lastDayOfPreviousMonth.toLocaleDateString('en-AU', { day: 'numeric', month: 'long', year: 'numeric' });
// Email body with date range
emailBody = `<p>*** This is an automated e-mail. Please do not respond. ****</p>` +
`<p>Dear User,</p>` +
`<p>Please find the attached vendor spend report for the period from ${startDate} to ${endDate}.</p>` +
`<p>Thank you<br>` +
`NetSuite Support.</p>` +
`<p>Please do not reply directly to this e-mail, as we are unable to process it. If you are not the appropriate recipient for this type of communication, contact us via NetSuite support<br>` +
`<a href="mailto:netsuitesupport@team.telstra.com" style="color: blue;">netsuitesupport@team.telstra.com</a>.</p>`;
}
email.send({
author: 115293,
recipients: recipientEmailsArray,
subject: 'Monthly Vendor Spend Report',
body:emailBody,
attachments: [file.load({ id: fileId })]
});
} catch (error) {
log.error('Error sending email', JSON.stringify(error));
}
}
function lookupField(recordType, internalId) {
try {
if (!internalId) return null; // Handle null values
let lookup = search.lookupFields({
type: recordType,
id: internalId,
columns: ['name']
});
return lookup.name ? lookup.name : null;
} catch (error) {
log.error(`Error looking up ${recordType} for ID ${internalId}`, JSON.stringify(error));
return 'Null';
}
}
function getFirstWorkingDay(year, month) {
let date = new Date(year, month - 1, 1);
while (date.getDay() === 0 || date.getDay() === 6) { // 0 = Sunday, 6 = Saturday
date.setDate(date.getDate() + 1);
}
return date;
}
function getLastDayOfPreviousMonth(timezone) {
try {
// Get the current date in the specified timezone
let todayDate = new Date(new Date().toLocaleString("en-US", { timeZone: timezone }));
// Calculate the last day of the previous month
return new Date(todayDate.getFullYear(), todayDate.getMonth(), 0);
} catch (error) {
log.error('Error in getLastDayOfPreviousMonth', JSON.stringify(error));
return null; // Return null in case of error
}
}
return {
getInputData: getInputData,
map: map,
reduce: reduce,
summarize: summarize
};
});
To avoid hitting the governance usage limit NetSuite provides a way to gracefully reschedule your script before hitting this limit by leveraging task.create()
and submitting the same script again.
Here’s how you can implement rescheduling logic inside your script:
var myscript = runtime.getCurrentScript();
var remainingUsage = myscript.getRemainingUsage();
log.debug("remainingUsage", remainingUsage);
// Reschedule if remaining usage drops below safe threshold (e.g., 2000)
if (remainingUsage < 2000) {
log.debug("Rescheduling Script", "Triggered due to low remaining usage");
var mrTask = task.create({
taskType: task.TaskType.MAP_REDUCE
});
// Use same script and deployment IDs to rerun current script
mrTask.scriptId = myscript.id;
mrTask.deploymentId = myscript.deploymentId;
// Optional: Pass parameters if needed using `params` property
mrTask.params = {
'custscript_param_key': value
};
var mrTaskId = mrTask.submit(); // Submits the new task
return; // Stop further execution to avoid exceeding limit
}