I'm using s3 (aws sdk) for node.js to upload files/images into Linode Object Storage.
For really small files (less then 1 MB) is working ok, but when file is bigger then 1MB it throws the following error:
error Error [ERR_STREAM_WRITE_AFTER_END]: write after end
at writeAfterEnd (_http_outgoing.js:643:15)
at ClientRequest.end (_http_outgoing.js:763:7)
at features.constructor.writeBody ({pathtoproject}/node_modules/aws-sdk/lib/http/node.js:137:14)
at ClientRequest.<anonymous> ({pathtoproject}/node_modules/aws-sdk/lib/http/node.js:102:14)
at ClientRequest.emit (events.js:315:20)
at ClientRequest.EventEmitter.emit (domain.js:485:12)
at HTTPParser.parserOnIncomingClient [as onIncoming] (_http_client.js:605:11)
at HTTPParser.parserOnHeadersComplete (_http_common.js:117:17)
at TLSSocket.socketOnData (_http_client.js:507:22)
at TLSSocket.emit (events.js:315:20)
This is my upload function:
const s3 = require('./index')
const { v4: uuidv4 } = require('uuid');
const uploadFile = (file, bucket) => {
const params = {
Bucket: bucket, // pass your bucket name
Key: `${uuidv4()}-${file.originalname}`, // file will be saved as testBucket/contacts.csv
Body: file.buffer,
ContentType: file.mimetype,
ACL: 'public-read'
};
return new Promise((resolve, reject) => {
s3.upload(params, (s3Err, data) => {
if (s3Err) {
reject(s3Err)
} else {
console.log(`Image uploaded at: ${data}`)
resolve(data.Location)
}
});
})
}
module.exports = uploadFile
This is main config
var AWS = require("aws-sdk");
AWS.config.loadFromPath('./aws/config.json');
const s3 = new AWS.S3({ httpOptions: { timeout: 10 * 60 * 1000 }});
module.exports = s3
question from:
https://stackoverflow.com/questions/65852811/write-after-end-error-when-uploading-file-using-s3-aws-sdk 与恶龙缠斗过久,自身亦成为恶龙;凝视深渊过久,深渊将回以凝视…