Combining multiple async operations to generate single JSON output in Node.js

I’m stuck on this problem. I have multiple files in a directory and need to build one JSON containing file details, processed filenames, and data from HTTP requests.

When I run everything synchronously, it works fine and produces this structure:

[
  {
    "originalName": "document one",
    "processedName": "document one",
    "httpData": "document one"
  },
  {
    "originalName": "document two",
    "processedName": "document two",
    "httpData": "document two"
  }
]

Here’s my working synchronous approach:

fs.readdir(folderPath, function(error, fileList) {
  let results = [];
  if(error) throw error;
  
  fileList.forEach(function(currentFile){
    let originalName = currentFile;
    let processedName = originalName.replace(/pattern/i, "");
    let outputPath = './output/results.json';
    let httpResponse = fetchData(apiUrl);
    
    let fileInfo = {
      originalName: originalName,
      processedName: processedName,
      httpData: httpResponse
    };
    results.push(fileInfo);
  });
  
  let jsonOutput = JSON.stringify(results, null, 2);
  fs.writeFile(outputPath, jsonOutput, function(err) {
    if (!err) {
      console.log('Success');
    } else {
      console.log('Error:', err);
    }
  });
});

When I try making it asynchronous with promises, the variables get mixed up and I end up with wrong data in my final JSON. The HTTP call needs the processed filename to build the URL, but I also need both the original and processed names in the final output.

How can I properly handle multiple async operations while preserving the correct variable scope for each file?

I ran into something similar last month and found that wrapping each file operation in an immediately invoked function expression works well too. The problem happens because JavaScript closures capture variables by reference, not by value. So when your async operations complete, they all reference the last iteration’s variables.

fs.readdir(folderPath, function(error, fileList) {
  let results = [];
  let completed = 0;
  if(error) throw error;
  
  fileList.forEach(function(currentFile){
    (function(file) {
      let originalName = file;
      let processedName = originalName.replace(/pattern/i, "");
      
      fetchData(apiUrl + processedName).then(function(httpResponse) {
        results.push({
          originalName: originalName,
          processedName: processedName,
          httpData: httpResponse
        });
        
        completed++;
        if (completed === fileList.length) {
          let jsonOutput = JSON.stringify(results, null, 2);
          fs.writeFile('./output/results.json', jsonOutput, function(err) {
            console.log(err ? 'Error:' + err : 'Success');
          });
        }
      });
    })(currentFile);
  });
});

This creates a separate scope for each file operation and tracks completion manually. Not as elegant as Promise.all but sometimes easier to understand when debugging scope issues.

another approach is usng async/await with a regular for loop instead of forEach. forEach doesnt wait for async operations but for loop does. try replacing your forEach with for (let i = 0; i < fileList.length; i++) and await your fetchData call. this keeps everything sequential and maintains scope without promise.all complexity

The issue you’re encountering is a classic closure problem in asynchronous JavaScript. When you loop through files and make async calls, the variables get overwritten before the async operations complete.

The cleanest solution is to use Promise.all() with map() instead of forEach(). This creates a separate promise for each file and maintains proper variable scope:

fs.readdir(folderPath, async function(error, fileList) {
  if(error) throw error;
  
  const promises = fileList.map(async (currentFile) => {
    const originalName = currentFile;
    const processedName = originalName.replace(/pattern/i, "");
    const httpResponse = await fetchData(apiUrl + processedName);
    
    return {
      originalName: originalName,
      processedName: processedName,
      httpData: httpResponse
    };
  });
  
  const results = await Promise.all(promises);
  const jsonOutput = JSON.stringify(results, null, 2);
  
  fs.writeFile('./output/results.json', jsonOutput, function(err) {
    if (!err) {
      console.log('Success');
    } else {
      console.log('Error:', err);
    }
  });
});

This approach ensures each file gets its own execution context and all HTTP requests run concurrently rather than sequentially.