I’m running into a problem with my Airtable automation that connects to Plumsail. The script works perfectly when I run it manually, but when the automation triggers automatically after a record gets approved, it fails with a message saying I’ve hit the limit of 30 table queries per script run.
The automation is supposed to generate a PDF document when an item gets approved through the interface. Manual execution works fine and creates the PDF as expected, but the automated version always stops with this quota error.
Here’s my code that handles the document generation:
const settings = input.config();
const mainTable = base.getTable('Invoice Data');
const apiEndpoint = 'https://api.plumsail.com/api/v2/processes/abcd1234/efgh5678/start';
const targetRecordId = settings.RecordId;
const fieldTokens = [
'InvoiceDate',
'InvoiceNumber',
'ApprovalDate1',
'ApprovalDate2',
'Vendor',
'Vendor.VendorName',
'Vendor.Street',
'Vendor.City',
'Vendor.PostalCode',
'Vendor.Email',
'Vendor.Phone',
'Vendor.Region',
'Vendor.Region.title',
'DeliveryAddress',
'DeliveryAddress.ContactNumber',
'InvoiceItems',
'InvoiceItems.ItemDescription',
'InvoiceItems.Quantity',
'InvoiceItems.UnitPrice',
'Approver1',
'Approver1.FullName',
'Approver2',
'Approver2.FullName'
];
const requestData = await buildPayload(targetRecordId, mainTable, fieldTokens);
const apiResponse = await fetch(apiEndpoint, {
method: 'POST',
body: JSON.stringify(requestData),
headers: {
'Content-Type': 'application/json'
}
});
const resultData = await apiResponse.json();
await attachFileToRecord(resultData, targetRecordId, 'fld9x9YmcXxZgbYbt', mainTable);
output.set('fileLink', resultData.link);
async function buildPayload(recordId, dataTable, tokenList) {
const basicTokens = [...new Set(tokenList.reduce((result, token) => {
token.split('.').forEach((simpleToken) => {
result.push(simpleToken);
});
return result;
}, []))];
const requiredFields = dataTable.fields.filter((field) => basicTokens.includes(cleanTokenName(field.name.replace(/\./g, ''))));
const targetRecord = await dataTable.selectRecordAsync(recordId, {fields: requiredFields});
if (targetRecord) {
const recordData = await extractRecordData(requiredFields, targetRecord, [dataTable.id], basicTokens);
const finalPayload = filterTokenData(recordData, tokenList);
return finalPayload;
}
return {};
}
async function extractRecordData(fieldList, record, connectedTables, tokenList) {
const usedNames = [];
return fieldList.reduce(async (accumulator, field) => {
const acc = await accumulator;
const existingFieldNames = fieldList.reduce((nameList, currentField) => {
if (currentField.name === field.name) return nameList;
const cleanName = cleanTokenName(currentField.name.replace(/\./g, ''));
nameList.push(cleanName);
return nameList;
}, []);
existingFieldNames.push(...usedNames);
const cleanedName = cleanTokenName(field.name.replace(/\./g, ''));
let finalFieldName;
if (field.name.toLowerCase() === cleanedName.toLowerCase()) {
finalFieldName = cleanedName;
} else {
finalFieldName = generateUniqueName(existingFieldNames, cleanedName);
usedNames.push(finalFieldName);
}
if (field.type === 'multipleRecordLinks') {
const linkedTableId = field.options.linkedTableId;
const singleLink = field.options.prefersSingleRecordLink;
const linkedTable = base.getTable(linkedTableId);
if (connectedTables.includes(linkedTableId) || !linkedTable) {
const cellData = record.getCellValue(field);
if (singleLink) {
acc[finalFieldName] = cellData[0].name;
} else {
acc[finalFieldName] = cellData;
}
return acc;
}
const availableFields = linkedTable.fields;
const linkedIds = (record?.getCellValue(field.name) || []).map(({id}) => id);
const linkedQuery = await linkedTable.selectRecordsAsync({fields: availableFields, recordIds: linkedIds});
const linkedRecords = linkedQuery.records;
const extractedValues = await Promise.all(linkedRecords.map(async (linkedRecord) => {
return await extractRecordData(availableFields, linkedRecord, connectedTables.concat(linkedTableId), tokenList);
}))
if (singleLink) {
acc[finalFieldName] = {};
if (extractedValues?.length) {
Object.entries(extractedValues[0]).forEach(([key, value]) => {
acc[finalFieldName][key] = value;
})
}
} else {
acc[finalFieldName] = extractedValues;
}
return acc;
}
if (field.type === 'multipleLookupValues') {
const lookupData = record.getCellValue(field);
if (Array.isArray(lookupData)) {
const processedValues = lookupData.map((item) => {
if (item && Object.keys(item).length === 1 && item.text) {
return { value: item.text }
}
return { value: item };
});
acc[finalFieldName] = processedValues;
} else {
acc[finalFieldName] = lookupData;
}
return acc;
}
if (field.type === 'barcode') {
acc[finalFieldName] = record.getCellValueAsString(field);
return acc
}
if (field.type === 'checkbox') {
acc[finalFieldName] = record.getCellValue(field) || false;
return acc;
}
acc[finalFieldName] = record.getCellValue(field);
return acc;
}, Promise.resolve({}));
}
function filterTokenData(dataTree, patternList, currentPrefix = null) {
return Object.keys(dataTree).reduce((result, key) => {
const fullPrefix = currentPrefix ? String(currentPrefix) + '.' + String(key) : key;
if (!patternList.includes(currentPrefix ? String(currentPrefix) + '.' + String(key) : key)) {
return result;
}
if (Array.isArray(dataTree[key])) {
const processedArray = dataTree[key].map((element) => filterTokenData(element, patternList, fullPrefix));
result[key] = processedArray;
return result;
}
if (dataTree[key] && typeof dataTree[key] === 'object') {
const processedObject = filterTokenData(dataTree[key], patternList, fullPrefix);
result[key] = processedObject;
return result;
}
result[key] = dataTree[key];
return result;
}, {});
}
function cleanTokenName(inputName) {
return inputName.split(' ')
.map((segment) => {
const alphanumeric = segment.replace(/[^A-Za-z0-9.]/g, '');
if (!alphanumeric) return '';
return alphanumeric[0].toUpperCase() + alphanumeric.slice(1)
}).join('')
.split('.').map((segment) => {
if (!segment) return '';
return segment[0].toUpperCase() + segment.slice(1);
}).join('.');
}
function generateUniqueName(existingNames, baseName, counter = 1) {
if (!existingNames.length || !existingNames.includes(baseName)) return baseName;
const modifiedName = String(baseName) + String(counter);
if (!existingNames.includes(modifiedName)) return modifiedName;
const nextCounter = counter + 1;
return generateUniqueName(existingNames, baseName, nextCounter);
}
async function attachFileToRecord(responseData, recordId, fieldId, dataTable) {
try {
const currentRecord = await dataTable.selectRecordAsync(recordId, { fields: [fieldId] });
const fileUrl = new URL(decodeURI(responseData.link));
const fileName = fileUrl?.searchParams?.get('rscd')?.split(';')
?.find((part) => part.startsWith('filename'))?.split('"')[1];
const currentFiles = currentRecord.getCellValue(fieldId);
dataTable.updateRecordAsync(currentRecord, {
[fieldId]: [
...(currentFiles || []).filter((file) => file?.filename !== fileName),
{ url: responseData?.link, ...(fileName && { filename: fileName } ) }
]
});
} catch {
console.log('File attachment failed');
}
}
Has anyone else run into this table query limit issue? I’m not sure why the manual run works but the automated one doesn’t. Any ideas on how to optimize this code to use fewer table queries would be really helpful.