Currently i have tried many ways like
const S3FS = require('s3fs');
s3fsImpl = new S3FS(bucketName, {
accessKeyId,
secretAccessKey,
})
var range = 'undefined' !== typeof req.headers.range ? req.headers.range : 'bytes=0-';
var positions = range.replace(/bytes=/, "").split("-");
var start = parseInt(positions[0], 10);
var total = details.ContentLength;
var end = positions[1] ? parseInt(positions[1], 10) : total - 1;
var chunksize = (end - start) + 1;
res.setHeader("Content-Type", contentType);
res.status(206);
res.setHeader("Content-Range", "bytes " + start + "-" + end + "/" + total);
res.setHeader("Accept-Ranges", "bytes");
res.setHeader("Content-Length", chunksize);
s3fsImpl.createReadStream(key,{ start: start, end: end }).pipe(res);
( same with const AWS = require('aws-sdk') ) It just hangs up and no video occured. Currently i just do pipe without chunks, but it take video too long to occur, and seems i need to implement chunks stream from s3. So how can i pipe video, pddf, or other big files from the aws s3?