AirTable Script Error: Table Query Limit of 30 Exceeded Per Execution

I created a script extension in my AirTable base that works with Plumsail integration. The script is supposed to run automatically when a record gets approved through the interface. When I test it manually, everything works fine and my PDF gets generated correctly. But when the automation triggers based on my conditions, it fails with “Error: Exceeded quota of 30 table queries per script invocation”. Has anyone encountered this issue before? I need help figuring out why the automated execution behaves differently than manual execution.

const settings = input.config();
const mainTable = base.getTable('Invoice Header');
const apiEndpoint = 'https://api.plumsail.com/api/v2/processes/aaaaaaa/bbbbbbb/start';
const currentRecordId = settings.RecordId;
const fieldTokens = [
  'InvoiceDate',
  'InvoiceNumber',
  'ApprovalDate1',
  'ApprovalDate2',
  'Vendor',
  'Vendor.VendorName',
  'Vendor.Street',
  'Vendor.City',
  'Vendor.PostalCode',
  'Vendor.Email',
  'Vendor.Phone',
  'Vendor.State',
  'Vendor.State.name',
  'DeliveryAddress',
  'DeliveryAddress.ContactNumber',
  'InvoiceItems',
  'InvoiceItems.ItemDescription',
  'InvoiceItems.Quantity',
  'InvoiceItems.UnitPrice',
  'FirstApprover',
  'FirstApprover.Name',
  'SecondApprover',
  'SecondApprover.Name'
];

const requestData = await buildPayload(currentRecordId, mainTable, fieldTokens);
const apiResponse = await fetch(apiEndpoint, {
  method: 'POST',
  body: JSON.stringify(requestData),
  headers: {
    'Content-Type': 'application/json'
  }
});

const result = await apiResponse.json();
await attachFileToRecord(result, currentRecordId, 'fld4v4XlcWwZfaXas', mainTable);
output.set('fileLink', result.link);

async function buildPayload(recordId, dataTable, tokenList) {
  const uniqueTokens = [...new Set(tokenList.reduce((result, token) => {
    token.split('.').forEach((part) => {
      result.push(part);
    });
    return result;
  }, []))];

  const relevantFields = dataTable.fields.filter((field) => uniqueTokens.includes(cleanTokenName(field.name.replace(/\./g, ''))));
  const currentRecord = await dataTable.selectRecordAsync(recordId, {fields: relevantFields});
  if (currentRecord) {
    const recordData = await extractRecordData(relevantFields, currentRecord, [dataTable.id], uniqueTokens);
    const finalPayload = filterTokenData(recordData, tokenList);
    return finalPayload;
  }
  return {};
}

async function extractRecordData(fieldList, record, processedTables, tokenList) {
  const usedNames = [];
  return fieldList.reduce(async (promise, field) => {
    const result = await promise;

    const existingFieldNames = fieldList.reduce((names, currentField) => {
      if (currentField.name === field.name) return names;
      const cleanName = cleanTokenName(currentField.name.replace(/\./g, ''));
      names.push(cleanName);
      return names;
    }, []);

    existingFieldNames.push(...usedNames);

    const cleanName = cleanTokenName(field.name.replace(/\./g, ''));
    let finalFieldName;

    if (field.name.toLowerCase() === cleanName.toLowerCase()) {
      finalFieldName = cleanName;
    } else {
      finalFieldName = generateUniqueName(existingFieldNames, cleanName);
      usedNames.push(finalFieldName);
    }

    if (field.type === 'multipleRecordLinks') {
      const linkedTableId = field.options.linkedTableId;
      const singleLink = field.options.prefersSingleRecordLink;

      const linkedTable = base.getTable(linkedTableId);
      if (processedTables.includes(linkedTableId) || !linkedTable) {
        const cellValue = record.getCellValue(field);
        if (singleLink) {
          result[finalFieldName] = cellValue[0].name;
        } else {
          result[finalFieldName] = cellValue;
        }
        return result;
      }

      const allFields = linkedTable.fields;
      const linkedIds = (record?.getCellValue(field.name) || []).map(({id}) => id);
      const linkedQuery = await linkedTable.selectRecordsAsync({fields: allFields, recordIds: linkedIds});
      const linkedRecords = linkedQuery.records;
      const linkedValues = await Promise.all(linkedRecords.map(async (linkedRecord) => {
        return await extractRecordData(allFields, linkedRecord, processedTables.concat(linkedTableId), tokenList);
      }));

      if (singleLink) {
        result[finalFieldName] = {};
        if (linkedValues?.length) {
          Object.entries(linkedValues[0]).forEach(([key, value]) => {
            result[finalFieldName][key] = value;
          });
        }
      } else {
        result[finalFieldName] = linkedValues;
      }
      return result;
    }

    if (field.type === 'multipleLookupValues') {
      const lookupValues = record.getCellValue(field);
      if (Array.isArray(lookupValues)) {
        const processedValues = lookupValues.map((val) => {
          if (val && Object.keys(val).length === 1 && val.text) {
            return { value: val.text };
          }
          return { value: val };
        });
        result[finalFieldName] = processedValues;
      } else {
        result[finalFieldName] = lookupValues;
      }
      return result;
    }

    if (field.type === 'barcode') {
      result[finalFieldName] = record.getCellValueAsString(field);
      return result;
    }

    if (field.type === 'checkbox') {
      result[finalFieldName] = record.getCellValue(field) || false;
      return result;
    }
    result[finalFieldName] = record.getCellValue(field);
    return result;
  }, Promise.resolve({}));
}

function filterTokenData(dataTree, patterns, currentPrefix = null) {
  return Object.keys(dataTree).reduce((filtered, key) => {
    const fullPrefix = currentPrefix ? String(currentPrefix) + '.' + String(key) : key;
    if (!patterns.includes(currentPrefix ? String(currentPrefix) + '.' + String(key) : key)) {
      return filtered;
    }
    if (Array.isArray(dataTree[key])) {
      const arrayValue = dataTree[key].map((item) => filterTokenData(item, patterns, fullPrefix));
      filtered[key] = arrayValue;
      return filtered;
    }
    if (dataTree[key] && typeof dataTree[key] === 'object') {
      const objectValue = filterTokenData(dataTree[key], patterns, fullPrefix);
      filtered[key] = objectValue;
      return filtered;
    }
    filtered[key] = dataTree[key];
    return filtered;
  }, {});
}

function cleanTokenName(inputName) {
  return inputName.split(' ')
    .map((word) => {
      const cleaned = word.replace(/[^A-Za-z0-9.]/g, '');
      if (!cleaned) return '';
      return cleaned[0].toUpperCase() + cleaned.slice(1);
    }).join('')
    .split('.').map((word) => {
      if (!word) return '';
      return word[0].toUpperCase() + word.slice(1);
    }).join('.');
}

function generateUniqueName(nameList, baseName, counter = 1) {
  if (!nameList.length || !nameList.includes(baseName)) return baseName;
  const modifiedName = String(baseName) + String(counter);

  if (!nameList.includes(modifiedName)) return modifiedName;

  const nextCounter = counter + 1;
  return generateUniqueName(nameList, baseName, nextCounter);
}

async function attachFileToRecord(responseData, recordId, fieldId, table) {
  try {
    const targetRecord = await table.selectRecordAsync(recordId, { fields: [fieldId] });
    const fileUrl = new URL(decodeURI(responseData.link));
    const fileName = fileUrl?.searchParams?.get('rscd')?.split(';')
      ?.find((element) => element.startsWith('filename'))?.split('"')[1];

    const currentValue = targetRecord.getCellValue(fieldId);
    table.updateRecordAsync(targetRecord, {
      [fieldId]: [
        ...(currentValue || []).filter((value) => value?.filename !== fileName),
        { url: responseData?.link, ...(fileName && { filename: fileName }) }
      ]
    });
  } catch {
    console.log('File attachment failed');
  }
}

hey, check your automation settings - they might be triggering multiple times. add some console.logs to count how many selectRecordsAsync calls you’re getting in manual vs auto runs. probably a timing issue with the linked records.

I had the same issue with a complex linked records setup. Automated runs sometimes pass different input parameters than manual runs, so your script ends up querying way more records than expected. Add a query counter at the top of your script to track exactly how many selectRecordsAsync calls you’re making. In my case, the automation was somehow triggering with multiple record IDs in a batch, even though it looked like single record processing. You’ll want to add a guard clause to limit how many linked records get processed per run, or break this into smaller operations if you’re dealing with large datasets.

Your script’s doing recursive queries on linked records with no safeguards. I hit this exact problem last year building similar invoice automation.

The extractRecordData function is your culprit. Every time it hits a linked record, it calls selectRecordsAsync again. If those records have their own links, it keeps going. AirTable doesn’t cache these queries in automation mode like it does with manual runs.

Here’s how I fixed it:

  1. Track your query count at the top:
let queryCount = 0;
const MAX_QUERIES = 25; // Leave some buffer
  1. Check before each selectRecordsAsync call:
if (queryCount >= MAX_QUERIES) {
  console.log('Query limit approaching, skipping nested records');
  return result;
}
queryCount++;
  1. Add a depth limit to your extractRecordData function:
async function extractRecordData(fieldList, record, processedTables, tokenList, depth = 0) {
  if (depth > 2) return {}; // Prevent deep nesting
  // ... rest of your function
  // When calling recursively:
  return await extractRecordData(allFields, linkedRecord, processedTables.concat(linkedTableId), tokenList, depth + 1);
}

For invoice data, you probably don’t need more than 2-3 levels deep. This keeps you well under the 30 query limit while still getting all your data.

This happens because AirTable handles linked record queries differently in automation vs manual runs. When your script runs through automation, it processes all linked records recursively without the memory optimizations you get from manual execution. Your extractRecordData function creates a chain reaction - each linked record triggers more queries for its own linked fields. Add a depth limit parameter to extractRecordData to stop infinite recursion. Use a Map to cache already-queried records so you don’t hit the same records with selectRecordsAsync multiple times. Also check if your automation condition is accidentally triggering on multiple related records at once instead of just the target record.