I'm uploading in multipart a file in aws, i've already checked the filesize, the buffer size, everything's good, but, on aws i have a 500mbytes file (instead of 100mb...)
What could be the problem?
My code below :
The function that prepares the sending and
async function send(uploadID, path, filename, projectTitle) {
let stats = fs.statSync(path);
let data = [];
let readStream = fs.createReadStream(path,{ highWaterMark: 10 * 1024 * 1024});
readStream.on('data', (chunk)=>{
data.push(chunk)
}).on('end', async ()=>{
for (let i = 0; i<data.length; i++){
await $.post(baseUrl + "/signChunk/" + token_g, {
uploadId : uploadID,
partNumber : i+1,
id : currentProjectID
}).done(async (result)=>{
if(result.response===true){
await putData(result.signedRequest, data[i], i+1, data.length, filename, uploadID);
}
});
}
});
return 0;
}
The function that sends data to the remote (chunk by chunk)
async function putData(url, chunk, id,dataLen,fn,uploadID, attemps = 0){
let part = {"PartNumber" : id, "ETag" : ""};
let opts = {
url : url,
method : "PUT",
data : {
data: escape(chunk).toString('binary')
}
}
curSize = 0;
$.ajax(opts).done(async (data, status, xhr)=>{
if(xhr.status==200){
part.ETag = xhr.getResponseHeader("etag")
complete.push(part);
if(complete.length>=dataLen){
finalizeUpload(uploadID);
}
} else {
putData(url, chunk, id, dataLen,fn, uploadID, attemps++);
}
})
}
The function that finalizes the upload (and reconstitute the file from chunks)
async function finalizeUpload(uploadID){
let result = await $.post(baseUrl + "/endMultipart/" + token_g,{
id : currentProjectID,
uploadId : uploadID,
partsNumbers : complete
});
complete = [];
}
question from:
https://stackoverflow.com/questions/65888641/amazon-upload-multipart-multiplies-file-size-js