Issue
I have the following code to make a fetch request from my front end in my node js express web app (hosted through MS Azure).
The request works fine for smaller zip file blobs, but for large blobs it eventually times out and gives the following error – net::ERR_CONTENT_LENGTH_MISMATCH 200 (OK)
let requestURL = organisation + '-' + userID + '-' + jobRef + '.json';
fetch(`/downloadImages`, {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ requestURL: requestURL }),
})
.then(response => response.blob())
.then(function (blob) {
console.log('blob received');
download(blob, jobRef + ".zip");
document.getElementById('loaderBackground').classList.add('d-none');
})
.catch((e) => {
setTimeout(() => {
console.log(e);
alert('Error');
}, 1000);
});
And this is my app.js in the node.js express folder
//'use strict';
var debug = require('debug');
var express = require('express');
var Path = require('path');
var logger = require('morgan');
var cookieParser = require('cookie-parser');
var csrf = require('csurf');
var bodyParser = require('body-parser');
var http = require("https");
var favicon = require('serve-favicon');
const resolve = require("path").resolve;
var JsZip = require('jszip');
var fs = require('fs-extra');
const uuid = require("uuid");
var request = require('request');
var https = require('https');
const axios = require("axios");
const path = require("path");
const fsExtra = require("fs-extra");
var Scraper = require("image-scraper");
//ENVIRONMENT SETUP
const currentEnv = 'PROD'; //Either 'PROD' OR 'DEV'
const cloudinaryFolder = 'inspectAPP'; //Either 'inspectAPP' OR 'inspectAPP-dev'
// setup route middlewares
var csrfProtection = csrf({ cookie: true })
var app = express();
//Set up helmet
var helmet = require('helmet');
app.use(
helmet.contentSecurityPolicy({
directives: {
defaultSrc: ["'self'"],
scriptSrc: ["'self'", "'unsafe-inline'", "cdnjs.cloudflare.com", "ajax.googleapis.com", "maxcdn.bootstrapcdn.com", "cdn.jsdelivr.net", "stackpath.bootstrapcdn.com"],
styleSrc: ["'self'", "'unsafe-inline'", "stackpath.bootstrapcdn.com", "fonts.googleapis.com", "cdnjs.cloudflare.com", "maxcdn.bootstrapcdn.com", "use.fontawesome.com"],
imgSrc: ["*", "'self'", "cdnjs.cloudflare.com"],
fontSrc: ["*", "'self'", "fonts.gstatic.com"],
frameSrc:["'self'","jimmywarting.github.io"],
objectSrc: ["'none'"],
connectSrc: ["'self'", "res.cloudinary.com", "cloudinary.com"],
upgradeInsecureRequests: [],
},
})
);
app.disable('x-powered-by');
// view engine setup
// view engine setup
app.set('views', Path.join(__dirname, 'views'));
app.set('view engine', 'pug');
app.use(favicon(__dirname + '/public/images/favicon.ico'));
app.use(logger('dev'));
app.use(bodyParser.urlencoded({
extended: false
}));
app.use(bodyParser.json());
// parse some custom thing into a Buffer
var rawOptions = {
inflate: true,
limit: '100mb',
type: 'application/octet-stream'
};
app.use(bodyParser.raw(rawOptions));
app.use(cookieParser());
app.use(express.static(Path.join(__dirname, 'public')));
//////////////////////////////////////////////////////////////////////////////////////////
// Rate limiting
//////////////////////////////////////////////////////////////////////////////////////////
const rateLimit = require("express-rate-limit");
const apiLimiter = rateLimit({
windowMs: 24 * 60 * 60 * 1000, // 24 hours
max: 50
});
// only apply to requests that begin with /api/
app.use("/signIn", apiLimiter);
//////////////////////////////////////////////////////////////////////////////////////////
// Photo management
//////////////////////////////////////////////////////////////////////////////////////////
var JSZip = require("jszip");
var request = require('request');
var archiver = require('archiver');
downloadImage = async (uri, filename, callback) => {
request.head(uri, function(err, res, body){
request(uri).pipe(fs.createWriteStream(filename)).on('close', callback);
});
};
app.get('/imageDownload', function (req, res) {
var organisation = req.query.organisation;
var jobRef = req.query.jobRef;
res.download('secureFiles/imageZips/' + organisation + '/' + jobRef + '.zip');
});
app.post("/downloadImages", async (req, res) => {
try {
const requestURL = req.body.requestURL;
console.log(requestURL)
const rootDirectory = process.cwd();
fsExtra.emptyDirSync(`${__dirname}/upload/`);
const getJsonFile = async () => {
try {
const resp = await axios.get(
`https://res.cloudinary.com/alvari/image/list/${requestURL}`
);
return resp.data.resources;
} catch (e) {
console.log(e);
res.status(500).send(e);
}
};
await getJsonFile();
const createZipFile = async () => {
const sourceDir = resolve(rootDirectory, "upload");
let zip = new JsZip();
buildZipFromDirectory(sourceDir, zip, sourceDir);
const zipContent = await zip.generateAsync({
type: "nodebuffer",
comment: "ser-web-manangement",
compression: "DEFLATE",
compressionOptions: {
level: 9,
},
});
return zipContent;
};
const buildZipFromDirectory = (dir, zip, root) => {
const list = fs.readdirSync(dir);
for (let file of list) {
file = path.resolve(dir, file);
let stat = fs.statSync(file);
if (stat && stat.isDirectory()) {
this.buildZipFromDirectory(file, zip, root);
} else {
const filedata = fs.readFileSync(file);
zip.file(path.relative(root, file), filedata);
}
}
};
const dest = `${__dirname}/upload/`;
var count = 0;
let src = await getJsonFile();
var recursiveDowload = async function (urlArray, i) {
if (i < urlArray.length) {
console.log(urlArray[i].public_id);
request
.get(
"https://res.cloudinary.com/alvari/image/upload/" +
urlArray[i].public_id
)
.on("error", function (err) {
console.log(err);
})
.pipe(
fs.createWriteStream(
`${__dirname}/upload/` +
`${urlArray[i].public_id.split("/")[1]}.` +
urlArray[i].format
)
)
.on("close", function () {
recursiveDowload(urlArray, i + 1);
});
} else {
const fileer = await createZipFile();
fsExtra.emptyDirSync(`${__dirname}/upload/`);
res.setHeader("content-type", "arrayBuffer");
res.send(fileer);
}
};
recursiveDowload(src, 0);
} catch (e) {
console.log(e);
fsExtra.emptyDirSync(`${__dirname}/upload/`);
res.status(500).send(e);
}
});
//////////////////////////////////////////////////////////////////////////////////////////
// Error handling
//////////////////////////////////////////////////////////////////////////////////////////
// catch 404 and forward to error handler
app.use(function (req, res, next) {
var err = new Error('Not Found');
//err.status = 404;
next(err);
});
// error handlers
// development error handler
// will print stacktrace
if (app.get('env') === 'development') {
app.use(function (err, req, res, next) {
res.status(err.status || 500);
res.render('error.pug', {
message: err.message,
error: err
});
});
}
// production error handler
// no stacktraces leaked to user
app.use(function (err, req, res, next) {
res.status(err.status || 500);
res.render('error.pug', {
message: err.message,
error: {}
});
});
app.use(function (req, res, next) {
res.header("Access-Control-Allow-Origin", "*");
res.header("Access-Control-Allow-Methods", "GET,HEAD,OPTIONS,POST,PUT");
res.header(
"Access-Control-Allow-Headers",
"Origin, X-Requested-With, Content-Type, Accept,Authorization"
);
next();
});
app.set('port', process.env.PORT || 3000);
var server = app.listen(app.get('port'), function () {
debug('Express server listening on port ' + server.address().port);
});
Solution
You should probably configure the max-file-size of your express app.
app.use(bodyParser.json({limit: '100mb'}));
app.use(bodyParser.urlencoded({
limit: '100mb',
extended: true
}));
Change 100mb to a size that suits yours requirements.