As of node v13, you can use generators within pipeline
and build your object as a string:
// const { pipeline } = require('stream/promises'); // <- node >= 16
const Util = require('util');
const pipeline = Util.promisify(Stream.pipeline);
const algorithm = 'aes-256-ctr';
const Crypto = require('crypto');
async function run() {
await pipeline(
file.stream, // <- your file read stream
Crypto.createCipher(algorithm, encryptionKey),
chunksToJson,
outStream
);
}
async function* chunksToJson(chunksAsync) {
yield '{"content": "';
for await (const chunk of chunksAsync) {
yield Buffer.isBuffer(chunk) ? chunk.toString('utf8') : JSON.stringify(chunk);
}
yield '"}';
}
Assuming a more complex case where a large amount of data is being streamed (which is usually the case when using streams), you may be tempted to do something like the following. This is not a good practice since all of the content
will build up in memory before yielding, defeating the purpose of streaming.
async function* chunksToJson(chunksAsync) {
const json = { content: [] };
for await (const chunk of chunksAsync) {
json.content.push(Buffer.isBuffer(chunk) ? chunk.toString('utf8') : JSON.stringify(chunk));
}
yield JSON.stringify(json);
}