cancel
Showing results for 
Search instead for 
Did you mean: 

PayloadTooLargeError: request entity too large

Mavi
Associate II

Hi, please can someone help me?
when I try to deploy I get the following error:

Spoiler
PayloadTooLargeError: request entity too large
at readStream (C:\Users\diego.mavillonio\AppData\Local\STM32CubeMonitor\resources\app\node_modules\raw-body\index.js:163:17)
at getRawBody (C:\Users\diego.mavillonio\AppData\Local\STM32CubeMonitor\resources\app\node_modules\raw-body\index.js:116:12)
at read (C:\Users\diego.mavillonio\AppData\Local\STM32CubeMonitor\resources\app\node_modules\body-parser\lib\read.js:79:3)
at jsonParser (C:\Users\diego.mavillonio\AppData\Local\STM32CubeMonitor\resources\app\node_modules\body-parser\lib\types\json.js:138:5)
at Layer.handle [as handle_request] (C:\Users\diego.mavillonio\AppData\Local\STM32CubeMonitor\resources\app\node_modules\express\lib\router\layer.js:95:5)
at trim_prefix (C:\Users\diego.mavillonio\AppData\Local\STM32CubeMonitor\resources\app\node_modules\express\lib\router\index.js:328:13)
at C:\Users\diego.mavillonio\AppData\Local\STM32CubeMonitor\resources\app\node_modules\express\lib\router\index.js:286:9
at Function.process_params (C:\Users\diego.mavillonio\AppData\Local\STM32CubeMonitor\resources\app\node_modules\express\lib\router\index.js:346:12)
at next (C:\Users\diego.mavillonio\AppData\Local\STM32CubeMonitor\resources\app\node_modules\express\lib\router\index.js:280:10)
at cors (C:\Users\diego.mavillonio\AppData\Local\STM32CubeMonitor\resources\app\node_modules\cors\lib\index.js:188:7)

The file.elf that I upload is 1930Kb, if I upload a file.elf of smaller dimensions the error does not exist.

 

 

1 ACCEPTED SOLUTION

Accepted Solutions
Mavi
Associate II

I managed to solve the problem, I'll post the solution if it helps others...
in this file: "C:\Users\diego.mavillonio\AppData\Local\STM32CubeMonitor\resources\app\node_modules\body-parser\lib\types\json.js"
I changed the "limit" variable as follows:

function json (options) {
  var opts = options || {}
  //****** OLD *********
  //var limit = typeof opts.limit !== 'number'
  //  ? bytes.parse(opts.limit || '100kb')
  //  : opts.limit
  //******* NEW ********
  var limit = bytes.parse('10Mb')
  var inflate = opts.inflate !== false
  var reviver = opts.reviver
  var strict = opts.strict !== false
  var type = opts.type || 'application/json'
  var verify = opts.verify || false
.....
....
}

View solution in original post

3 REPLIES 3
Pavel A.
Evangelist III

at readStream (C:\Users\diego.mavillonio\AppData\Local\STM32CubeMonitor\resources\app\node_modules\raw-body\index.js:163:17)

> at getRawBody (C:\Users\diego.mavillonio\AppData\Local\STM32CubeMonitor\resources\app\node_modules\raw-body\index.js:116:12)

Please look in these files and see what is the size limit there? Is this a hardcoded limit in the script, or inconsistency/corruption in the data?

 

Mavi
Associate II

I'm sorry, I'm not a javascript expert, but it seems that the limit is passed to him as an argument... it's not defined in these files or am I wrong?

C:\Users\diego.mavillonio\AppData\Local\STM32CubeMonitor\resources\app\node_modules\raw-body\index.js:163:17

Spoiler
/*!
 * raw-body
 * Copyright(c) 2013-2014 Jonathan Ong
 * Copyright(c) 2014-2022 Douglas Christopher Wilson
 * MIT Licensed
 */

'use strict'

/**
 * Module dependencies.
 * @private
 */

var asyncHooks = tryRequireAsyncHooks()
var bytes = require('bytes')
var createError = require('http-errors')
var iconv = require('iconv-lite')
var unpipe = require('unpipe')

/**
 * Module exports.
 * @public
 */

module.exports = getRawBody

/**
 * Module variables.
 * @private
 */

var ICONV_ENCODING_MESSAGE_REGEXP = /^Encoding not recognized: /

/**
 * Get the decoder for a given encoding.
 *
 * @PAram {string} encoding
 * @private
 */

function getDecoder (encoding) {
  if (!encoding) return null

  try {
    return iconv.getDecoder(encoding)
  } catch (e) {
    // error getting decoder
    if (!ICONV_ENCODING_MESSAGE_REGEXP.test(e.message)) throw e

    // the encoding was not found
    throw createError(415, 'specified encoding unsupported', {
      encoding: encoding,
      type: 'encoding.unsupported'
    })
  }
}

/**
 * Get the raw body of a stream (typically HTTP).
 *
 * @PAram {object} stream
 * @PAram {object|string|function} [options]
 * @PAram {function} [callback]
 * @public
 */

function getRawBody (stream, options, callback) {
  var done = callback
  var opts = options || {}

  // light validation
  if (stream === undefined) {
    throw new TypeError('argument stream is required')
  } else if (typeof stream !== 'object' || stream === null || typeof stream.on !== 'function') {
    throw new TypeError('argument stream must be a stream')
  }

  if (options === true || typeof options === 'string') {
    // short cut for encoding
    opts = {
      encoding: options
    }
  }

  if (typeof options === 'function') {
    done = options
    opts = {}
  }

  // validate callback is a function, if provided
  if (done !== undefined && typeof done !== 'function') {
    throw new TypeError('argument callback must be a function')
  }

  // require the callback without promises
  if (!done && !global.Promise) {
    throw new TypeError('argument callback is required')
  }

  // get encoding
  var encoding = opts.encoding !== true
    ? opts.encoding
    : 'utf-8'

  // convert the limit to an integer
  var limit = bytes.parse(opts.limit)

  // convert the expected length to an integer
  var length = opts.length != null && !isNaN(opts.length)
    ? parseInt(opts.length, 10)
    : null

  if (done) {
    // classic callback style
    return readStream(stream, encoding, length, limit, wrap(done))
  }

  return new Promise(function executor (resolve, reject) {
    readStream(stream, encoding, length, limit, function onRead (err, buf) {
      if (err) return reject(err)
      resolve(buf)
    })
  })
}

/**
 * Halt a stream.
 *
 * @PAram {Object} stream
 * @private
 */

function halt (stream) {
  // unpipe everything from the stream
  unpipe(stream)

  // pause stream
  if (typeof stream.pause === 'function') {
    stream.pause()
  }
}

/**
 * Read the data from the stream.
 *
 * @PAram {object} stream
 * @PAram {string} encoding
 * @PAram {number} length
 * @PAram {number} limit
 * @PAram {function} callback
 * @public
 */

function readStream (stream, encoding, length, limit, callback) {
  var complete = false
  var sync = true

  // check the length and limit options.
  // note: we intentionally leave the stream paused,
  // so users should handle the stream themselves.
  if (limit !== null && length !== null && length > limit) {
    return done(createError(413, 'request entity too large', {
      expected: length,
      length: length,
      limit: limit,
      type: 'entity.too.large'
    }))
  }

  // streams1: assert request encoding is buffer.
  // streams2+: assert the stream encoding is buffer.
  //   stream._decoder: streams1
  //   state.encoding: streams2
  //   state.decoder: streams2, specifically < 0.10.6
  var state = stream._readableState
  if (stream._decoder || (state && (state.encoding || state.decoder))) {
    // developer error
    return done(createError(500, 'stream encoding should not be set', {
      type: 'stream.encoding.set'
    }))
  }

  if (typeof stream.readable !== 'undefined' && !stream.readable) {
    return done(createError(500, 'stream is not readable', {
      type: 'stream.not.readable'
    }))
  }

  var received = 0
  var decoder

  try {
    decoder = getDecoder(encoding)
  } catch (err) {
    return done(err)
  }

  var buffer = decoder
    ? ''
    : []

  // attach listeners
  stream.on('aborted', onAborted)
  stream.on('close', cleanup)
  stream.on('data', onData)
  stream.on('end', onEnd)
  stream.on('error', onEnd)

  // mark sync section complete
  sync = false

  function done () {
    var args = new Array(arguments.length)

    // copy arguments
    for (var i = 0; i < args.length; i++) {
      args[i] = arguments[i]
    }

    // mark complete
    complete = true

    if (sync) {
      process.nextTick(invokeCallback)
    } else {
      invokeCallback()
    }

    function invokeCallback () {
      cleanup()

      if (args[0]) {
        // halt the stream on error
        halt(stream)
      }

      callback.apply(null, args)
    }
  }

  function onAborted () {
    if (complete) return

    done(createError(400, 'request aborted', {
      code: 'ECONNABORTED',
      expected: length,
      length: length,
      received: received,
      type: 'request.aborted'
    }))
  }

  function onData (chunk) {
    if (complete) return

    received += chunk.length

    if (limit !== null && received > limit) {
      done(createError(413, 'request entity too large', {
        limit: limit,
        received: received,
        type: 'entity.too.large'
      }))
    } else if (decoder) {
      buffer += decoder.write(chunk)
    } else {
      buffer.push(chunk)
    }
  }

  function onEnd (err) {
    if (complete) return
    if (err) return done(err)

    if (length !== null && received !== length) {
      done(createError(400, 'request size did not match content length', {
        expected: length,
        length: length,
        received: received,
        type: 'request.size.invalid'
      }))
    } else {
      var string = decoder
        ? buffer + (decoder.end() || '')
        : Buffer.concat(buffer)
      done(null, string)
    }
  }

  function cleanup () {
    buffer = null

    stream.removeListener('aborted', onAborted)
    stream.removeListener('data', onData)
    stream.removeListener('end', onEnd)
    stream.removeListener('error', onEnd)
    stream.removeListener('close', cleanup)
  }
}

/**
 * Try to require async_hooks
 * @private
 */

function tryRequireAsyncHooks () {
  try {
    return require('async_hooks')
  } catch (e) {
    return {}
  }
}

/**
 * Wrap function with async resource, if possible.
 * AsyncResource.bind static method backported.
 * @private
 */

function wrap (fn) {
  var res

  // create anonymous resource
  if (asyncHooks.AsyncResource) {
    res = new asyncHooks.AsyncResource(fn.name || 'bound-anonymous-fn')
  }

  // incompatible node.js
  if (!res || !res.runInAsyncScope) {
    return fn
  }

  // return bound function
  return res.runInAsyncScope.bind(res, fn, null)
}

C:\Users\diego.mavillonio\AppData\Local\STM32CubeMonitor\resources\app\node_modules\raw-body\index.js:116:12

Spoiler
/*!
 * raw-body
 * Copyright(c) 2013-2014 Jonathan Ong
 * Copyright(c) 2014-2022 Douglas Christopher Wilson
 * MIT Licensed
 */

'use strict'

/**
 * Module dependencies.
 * @private
 */

var asyncHooks = tryRequireAsyncHooks()
var bytes = require('bytes')
var createError = require('http-errors')
var iconv = require('iconv-lite')
var unpipe = require('unpipe')

/**
 * Module exports.
 * @public
 */

module.exports = getRawBody

/**
 * Module variables.
 * @private
 */

var ICONV_ENCODING_MESSAGE_REGEXP = /^Encoding not recognized: /

/**
 * Get the decoder for a given encoding.
 *
 * @PAram {string} encoding
 * @private
 */

function getDecoder (encoding) {
  if (!encoding) return null

  try {
    return iconv.getDecoder(encoding)
  } catch (e) {
    // error getting decoder
    if (!ICONV_ENCODING_MESSAGE_REGEXP.test(e.message)) throw e

    // the encoding was not found
    throw createError(415, 'specified encoding unsupported', {
      encoding: encoding,
      type: 'encoding.unsupported'
    })
  }
}

/**
 * Get the raw body of a stream (typically HTTP).
 *
 * @PAram {object} stream
 * @PAram {object|string|function} [options]
 * @PAram {function} [callback]
 * @public
 */

function getRawBody (stream, options, callback) {
  var done = callback
  var opts = options || {}

  // light validation
  if (stream === undefined) {
    throw new TypeError('argument stream is required')
  } else if (typeof stream !== 'object' || stream === null || typeof stream.on !== 'function') {
    throw new TypeError('argument stream must be a stream')
  }

  if (options === true || typeof options === 'string') {
    // short cut for encoding
    opts = {
      encoding: options
    }
  }

  if (typeof options === 'function') {
    done = options
    opts = {}
  }

  // validate callback is a function, if provided
  if (done !== undefined && typeof done !== 'function') {
    throw new TypeError('argument callback must be a function')
  }

  // require the callback without promises
  if (!done && !global.Promise) {
    throw new TypeError('argument callback is required')
  }

  // get encoding
  var encoding = opts.encoding !== true
    ? opts.encoding
    : 'utf-8'

  // convert the limit to an integer
  var limit = bytes.parse(opts.limit)

  // convert the expected length to an integer
  var length = opts.length != null && !isNaN(opts.length)
    ? parseInt(opts.length, 10)
    : null

  if (done) {
    // classic callback style
    return readStream(stream, encoding, length, limit, wrap(done))
  }

  return new Promise(function executor (resolve, reject) {
    readStream(stream, encoding, length, limit, function onRead (err, buf) {
      if (err) return reject(err)
      resolve(buf)
    })
  })
}

/**
 * Halt a stream.
 *
 * @PAram {Object} stream
 * @private
 */

function halt (stream) {
  // unpipe everything from the stream
  unpipe(stream)

  // pause stream
  if (typeof stream.pause === 'function') {
    stream.pause()
  }
}

/**
 * Read the data from the stream.
 *
 * @PAram {object} stream
 * @PAram {string} encoding
 * @PAram {number} length
 * @PAram {number} limit
 * @PAram {function} callback
 * @public
 */

function readStream (stream, encoding, length, limit, callback) {
  var complete = false
  var sync = true

  // check the length and limit options.
  // note: we intentionally leave the stream paused,
  // so users should handle the stream themselves.
  if (limit !== null && length !== null && length > limit) {
    return done(createError(413, 'request entity too large', {
      expected: length,
      length: length,
      limit: limit,
      type: 'entity.too.large'
    }))
  }

  // streams1: assert request encoding is buffer.
  // streams2+: assert the stream encoding is buffer.
  //   stream._decoder: streams1
  //   state.encoding: streams2
  //   state.decoder: streams2, specifically < 0.10.6
  var state = stream._readableState
  if (stream._decoder || (state && (state.encoding || state.decoder))) {
    // developer error
    return done(createError(500, 'stream encoding should not be set', {
      type: 'stream.encoding.set'
    }))
  }

  if (typeof stream.readable !== 'undefined' && !stream.readable) {
    return done(createError(500, 'stream is not readable', {
      type: 'stream.not.readable'
    }))
  }

  var received = 0
  var decoder

  try {
    decoder = getDecoder(encoding)
  } catch (err) {
    return done(err)
  }

  var buffer = decoder
    ? ''
    : []

  // attach listeners
  stream.on('aborted', onAborted)
  stream.on('close', cleanup)
  stream.on('data', onData)
  stream.on('end', onEnd)
  stream.on('error', onEnd)

  // mark sync section complete
  sync = false

  function done () {
    var args = new Array(arguments.length)

    // copy arguments
    for (var i = 0; i < args.length; i++) {
      args[i] = arguments[i]
    }

    // mark complete
    complete = true

    if (sync) {
      process.nextTick(invokeCallback)
    } else {
      invokeCallback()
    }

    function invokeCallback () {
      cleanup()

      if (args[0]) {
        // halt the stream on error
        halt(stream)
      }

      callback.apply(null, args)
    }
  }

  function onAborted () {
    if (complete) return

    done(createError(400, 'request aborted', {
      code: 'ECONNABORTED',
      expected: length,
      length: length,
      received: received,
      type: 'request.aborted'
    }))
  }

  function onData (chunk) {
    if (complete) return

    received += chunk.length

    if (limit !== null && received > limit) {
      done(createError(413, 'request entity too large', {
        limit: limit,
        received: received,
        type: 'entity.too.large'
      }))
    } else if (decoder) {
      buffer += decoder.write(chunk)
    } else {
      buffer.push(chunk)
    }
  }

  function onEnd (err) {
    if (complete) return
    if (err) return done(err)

    if (length !== null && received !== length) {
      done(createError(400, 'request size did not match content length', {
        expected: length,
        length: length,
        received: received,
        type: 'request.size.invalid'
      }))
    } else {
      var string = decoder
        ? buffer + (decoder.end() || '')
        : Buffer.concat(buffer)
      done(null, string)
    }
  }

  function cleanup () {
    buffer = null

    stream.removeListener('aborted', onAborted)
    stream.removeListener('data', onData)
    stream.removeListener('end', onEnd)
    stream.removeListener('error', onEnd)
    stream.removeListener('close', cleanup)
  }
}

/**
 * Try to require async_hooks
 * @private
 */

function tryRequireAsyncHooks () {
  try {
    return require('async_hooks')
  } catch (e) {
    return {}
  }
}

/**
 * Wrap function with async resource, if possible.
 * AsyncResource.bind static method backported.
 * @private
 */

function wrap (fn) {
  var res

  // create anonymous resource
  if (asyncHooks.AsyncResource) {
    res = new asyncHooks.AsyncResource(fn.name || 'bound-anonymous-fn')
  }

  // incompatible node.js
  if (!res || !res.runInAsyncScope) {
    return fn
  }

  // return bound function
  return res.runInAsyncScope.bind(res, fn, null)
}

I noticed that in the "exe-config node" if I select "expand variable list" the deployment gives an error while if I don't select it it proceeds normally...
If I export my "Flow" with the complete variable list I get a.json file containing the entire list of variables, not just those selected by me... Perhaps I exceed the "exevariablelist" limit

export_json_file.png

 

Mavi
Associate II

I managed to solve the problem, I'll post the solution if it helps others...
in this file: "C:\Users\diego.mavillonio\AppData\Local\STM32CubeMonitor\resources\app\node_modules\body-parser\lib\types\json.js"
I changed the "limit" variable as follows:

function json (options) {
  var opts = options || {}
  //****** OLD *********
  //var limit = typeof opts.limit !== 'number'
  //  ? bytes.parse(opts.limit || '100kb')
  //  : opts.limit
  //******* NEW ********
  var limit = bytes.parse('10Mb')
  var inflate = opts.inflate !== false
  var reviver = opts.reviver
  var strict = opts.strict !== false
  var type = opts.type || 'application/json'
  var verify = opts.verify || false
.....
....
}