In this article, I have covered how to write large array to CSV file in Node.js. When writing large arrays to CSV files using Node.js, it’s crucial to prioritize memory efficiency and performance. This function has been crafted for this specific purpose, and we’ll provide several practical examples of its usage.
Function for Writing Large Array to CSV
const fs = require('fs'); const { Transform } = require('stream'); function writeToCsvFile(data, filename, headers = []) { return new Promise((resolve, reject) => { const transform = new Transform({ objectMode: true, transform(chunk, encoding, callback) { this.push(`${Object.values(chunk).join(',')}\n`); callback(); } }); const writeStream = fs.createWriteStream(filename); writeStream.on('finish', resolve); writeStream.on('error', reject); if (headers.length > 0) { writeStream.write(`${headers.join(',')}\n`); } data.forEach(item => transform.write(item)); transform.end(); transform.pipe(writeStream); }); } async function exportToCsv() { const largeArray = [...Array(100000)].map((_, i) => ({ id: i, value: `Item ${i}` })); try { await writeToCsvFile(largeArray, 'largeData.csv', ['ID', 'Value']); console.log('The CSV file was written successfully.'); } catch (e) { console.error('An error occurred while writing the CSV file.', e); } } exportToCsv();
In the code above, the writeToCsvFile
function takes three arguments:
data
: The large array you want to write to the CSV file.filename
: The path where the CSV file will be created.headers
(optional): An array of strings representing the column headers.
Finally, we check if any headers are provided, write them to the CSV first, and then write every item in the array through the Transform stream by piping it directly to the file stream.This process actively breaks down the file writing into manageable chunks, actively preventing the simultaneous loading of an excessive amount of data into memory. This is especially important when dealing with large datasets.
Now, let’s see the writeToCsvFile
function in action with a different example:
async function exportUsersToCsv(users) { try { await writeToCsvFile(users, 'users.csv', ['UserID', 'Username', 'Email']); console.log('The users CSV file was written successfully.'); } catch (e) { console.error('An error occurred while writing the users CSV file.', e); } } // Suppose you have a large array of user objects const usersArray = [ { UserID: 1, Username: 'john_doe', Email: '[email protected]' }, // ... many more user objects ]; exportUsersToCsv(usersArray);