If you want to build an endpoint to download a csv, that could contain a large number of rows; you want to use streams, so you don’t need to hold all the data in memory before writing it.
If you are already using the pg client, it has a nifty add-on for this purpose:
const { Client } = require('pg');
const QueryStream = require('pg-query-stream');
const csvWriter = require("csv-write-stream");
module.exports = function(connectionString) {
this.handle = function(req, res) {
var sql = "SELECT...";
var args = [...];
const client = new Client({connectionString});
client.connect().then(() => {
var stream = new QueryStream(sql, args);
stream.on('end', () => {
client.end();
});
var query = client.query(stream);
var writer = csvWriter();
res.contentType("text/csv");
writer.pipe(res);
query.pipe(writer);
});
};
};
If you need to transform the data, you can add another step:
...
const transform = require('stream-transform');
...
var query = client.query(stream);
var transformer = transform(r => ({
"User ID": r.user_id,
"Created": r.created.toISOString(),
...
}));
...
query.pipe(transformer).pipe(writer);