This commit is contained in:
2026-03-03 15:23:00 +00:00
parent 5e3726de39
commit 8e223bfbec
3689 changed files with 955330 additions and 1011 deletions

11
node_modules/@hapi/subtext/LICENSE.md generated vendored Executable file
View File

@@ -0,0 +1,11 @@
Copyright (c) 2012-2022, Project contributors
Copyright (c) 2012-2019, Sideway Inc
Copyright (c) 2012-2014, Walmart.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* The names of any contributors may not be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS AND CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS OFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

17
node_modules/@hapi/subtext/README.md generated vendored Normal file
View File

@@ -0,0 +1,17 @@
<a href="https://hapi.dev"><img src="https://raw.githubusercontent.com/hapijs/assets/master/images/family.png" width="180px" align="right" /></a>
# @hapi/subtext
#### HTTP payload parser.
**subtext** is part of the **hapi** ecosystem and was designed to work seamlessly with the [hapi web framework](https://hapi.dev) and its other components (but works great on its own or with other frameworks). If you are using a different web framework and find this module useful, check out [hapi](https://hapi.dev) they work even better together.
### Visit the [hapi.dev](https://hapi.dev) Developer Portal for tutorials, documentation, and support
## Useful resources
- [Documentation and API](https://hapi.dev/family/subtext/)
- [Versions status](https://hapi.dev/resources/status/#subtext) (builds, dependencies, node versions, licenses, eol)
- [Changelog](https://hapi.dev/family/subtext/changelog/)
- [Project policies](https://hapi.dev/policies/)
- [Free and commercial support options](https://hapi.dev/support/)

458
node_modules/@hapi/subtext/lib/index.js generated vendored Executable file
View File

@@ -0,0 +1,458 @@
'use strict';
const Fs = require('fs');
const Fsp = require('fs/promises');
const Os = require('os');
const Querystring = require('querystring');
const Stream = require('stream');
const Zlib = require('zlib');
const Boom = require('@hapi/boom');
const Bourne = require('@hapi/bourne');
const Content = require('@hapi/content');
const File = require('@hapi/file');
const Hoek = require('@hapi/hoek');
const Pez = require('@hapi/pez');
const Wreck = require('@hapi/wreck');
const internals = {
kSubtext: Symbol('subtext'),
decoders: {
gzip: (options) => Zlib.createGunzip(options),
deflate: (options) => Zlib.createInflate(options)
}
};
exports.parse = async function (req, tap, options) {
Hoek.assert(options, 'Missing options');
Hoek.assert(options.parse !== undefined, 'Missing parse option setting');
Hoek.assert(options.output !== undefined, 'Missing output option setting');
// Content size
const contentLength = req.headers['content-length'];
if (options.maxBytes !== undefined &&
contentLength &&
parseInt(contentLength, 10) > options.maxBytes) {
throw Boom.entityTooLarge('Payload content length greater than maximum allowed: ' + options.maxBytes);
}
// Content type
const contentType = Content.type(options.override || req.headers['content-type'] || options.defaultContentType || 'application/octet-stream');
try {
if (options.allow &&
options.allow.indexOf(contentType.mime) === -1) {
throw Boom.unsupportedMediaType();
}
const parsed = { mime: contentType.mime };
// Parse: true
if (options.parse === true) {
parsed.payload = await internals.parse(req, tap, options, contentType);
return parsed;
}
// Parse: false, 'gunzip'
parsed.payload = await internals.raw(req, tap, options);
return parsed;
}
catch (err) {
err.mime = contentType.mime;
throw err;
}
};
internals.parse = async function (req, tap, options, contentType) {
const output = options.output; // Output: 'data', 'stream', 'file'
let source = internals.decoder(req, options);
// Tap request
if (tap) {
[source] = internals.pipe(source, tap);
}
// Multipart
if (contentType.mime === 'multipart/form-data') {
if (options.multipart === false) { // Defaults to true
throw Boom.unsupportedMediaType();
}
return await internals.multipart(req, options, source, contentType);
}
// Output: 'stream'
if (output === 'stream') {
return source;
}
// Output: 'file'
if (output === 'file') {
const file = await internals.writeFile(req, options, source);
return file.item;
}
// Output: 'data'
const payload = await Wreck.read(source, { timeout: options.timeout, maxBytes: options.maxBytes });
return internals.object(options, payload, contentType.mime);
};
internals.decoder = function (source, options) {
const contentEncoding = source.headers['content-encoding'];
const decoders = options.decoders ?? internals.decoders;
if (!decoders.hasOwnProperty(contentEncoding)) {
return source;
}
const decoderOptions = options.compression?.[contentEncoding] ?? null;
const stream = decoders[contentEncoding](decoderOptions);
const orig = stream.emit;
stream.emit = (event, ...args) => {
if (event === 'error') {
args = [Boom.badRequest('Invalid compressed payload', args[0])];
}
return orig.call(stream, event, ...args);
};
[source] = internals.pipe(source, stream);
return source;
};
internals.raw = async function (req, tap, options) {
const output = options.output; // Output: 'data', 'stream', 'file'
let source = req;
// Content-encoding (optional)
if (options.parse === 'gunzip') {
source = internals.decoder(source, options);
}
// Setup source
if (tap) {
[source] = internals.pipe(source, tap);
}
// Output: 'stream'
if (output === 'stream') {
return source;
}
// Output: 'file'
if (output === 'file') {
const file = await internals.writeFile(req, options, source);
return file.item;
}
// Output: 'data'
return await Wreck.read(source, { timeout: options.timeout, maxBytes: options.maxBytes });
};
internals.object = function (options, payload, mime) {
// Binary
if (mime === 'application/octet-stream') {
return payload.length ? payload : null;
}
// Text
if (mime.match(/^text\/.+$/)) {
return payload.toString('utf8');
}
// JSON
if (/^application\/(?:.+\+)?json$/.test(mime)) {
if (!payload.length) {
return null;
}
try {
return Bourne.parse(payload.toString('utf8'), { protoAction: options.protoAction });
}
catch (err) {
const error = Boom.badRequest('Invalid request payload JSON format', err);
error.raw = payload;
throw error;
}
}
// Form-encoded
if (mime === 'application/x-www-form-urlencoded') {
const parse = options.querystring ?? Querystring.parse;
return payload.length ? parse(payload.toString('utf8')) : {};
}
const error = Boom.unsupportedMediaType();
error.raw = payload;
throw error;
};
internals.multipart = function (req, options, source, contentType) {
return new Promise((resolve, reject) => {
// Set stream timeout
const clientTimeout = options.timeout;
const clientTimeoutId = clientTimeout ? setTimeout(() => reject(Boom.clientTimeout()), clientTimeout) : null;
// Create parser
const dispenserOptions = Hoek.applyToDefaults(contentType, {
maxBytes: options.maxBytes,
maxParts: options.maxParts
});
const dispenser = new Pez.Dispenser(dispenserOptions);
const data = {};
const pendingFiles = [];
const onError = (err) => {
const cleanup = internals.cleanupFiles(pendingFiles);
cleanup.catch(Hoek.ignore); // Prevent triggering node's PromiseRejectionHandledWarning
reject(Boom.badRequest('Invalid multipart payload format', err));
};
dispenser.once('error', onError);
const set = (name, value) => {
if (!data.hasOwnProperty(name)) {
data[name] = value;
}
else if (Array.isArray(data[name])) {
data[name].push(value);
}
else {
data[name] = [data[name], value];
}
};
const finalize = async () => {
// Clean up
clearTimeout(clientTimeoutId);
dispenser.removeListener('error', onError);
dispenser.removeListener('part', onPart);
dispenser.removeListener('field', onField);
dispenser.removeListener('close', onClose);
// Wait for files
try {
const files = await Promise.all(pendingFiles);
for (const { item, name } of files) {
set(name, item);
}
}
catch (err) {
reject(err);
return;
}
resolve(data);
};
const output = typeof options.multipart === 'object' ? options.multipart.output : options.output; // options.multipart can be true or object
const onPart = (part) => {
if (output === 'file') { // Output: 'file'
pendingFiles.push(internals.writeFile(req, options, part));
}
else {
internals.part(part, output, set, options); // Output: 'data' / 'stream'
}
};
dispenser.on('part', onPart);
const onField = (name, value) => set(name, value);
dispenser.on('field', onField);
const onClose = () => finalize();
dispenser.once('close', onClose);
source.pipe(dispenser);
});
};
internals.writeFile = function (req, options, stream) {
const promise = new Promise((resolve, reject) => {
const path = File.uniqueFilename(options.uploads ?? Os.tmpdir());
const file = Fs.createWriteStream(path, { flags: 'wx' });
const counter = new internals.Counter(options);
const finalize = (err) => {
req.removeListener('aborted', onAbort);
file.removeListener('close', finalize);
file.removeListener('error', finalize);
if (err) {
unpipeStreamToCounter();
unpipeCounterToFile();
file.close();
Fs.unlink(path, (/* fsErr */) => reject(err)); // Ignore unlink errors
return;
}
const result = {
item: {
path,
bytes: counter.bytes
}
};
if (stream.name) { // Multipart
result.name = stream.name;
result.item.filename = stream.filename;
result.item.headers = stream.headers;
}
resolve(result);
};
file.once('close', finalize);
file.once('error', finalize);
const onAbort = () => finalize(Boom.badRequest('Client connection aborted'));
req.once('aborted', onAbort);
const [, unpipeStreamToCounter] = internals.pipe(stream, counter);
const [, unpipeCounterToFile] = internals.pipe(counter, file);
});
promise.catch(Hoek.ignore); // Prevent triggering node's PromiseRejectionHandledWarning
return promise;
};
internals.cleanupFiles = async (pendingFiles) => {
const results = await Promise.allSettled(pendingFiles);
await Promise.all(results.map(async (result) => {
if (result.value) {
await Fsp.unlink(result.value.item.path);
}
}));
};
internals.part = async function (part, output, set, options) {
const payload = await Wreck.read(part); // Error handled by dispenser.once('error')
if (output === 'stream') { // Output: 'stream'
const item = Wreck.toReadableStream(payload);
item.hapi = {
filename: part.filename,
headers: part.headers
};
return set(part.name, item);
}
const ct = part.headers['content-type'] || ''; // Output: 'data'
const mime = ct.split(';')[0].trim().toLowerCase();
const annotate = (value) => set(part.name, output === 'annotated' ? { filename: part.filename, headers: part.headers, payload: value } : value);
if (!mime) {
return annotate(payload);
}
if (!payload.length) {
return annotate({});
}
try {
const object = internals.object(options, payload, mime);
annotate(object);
}
catch (err) {
annotate(payload);
}
};
internals.pipe = function (from, to) {
const forwardError = (err) => {
unpipe();
to.emit('error', err);
};
const unpipe = () => {
from.removeListener('error', forwardError);
return from.unpipe(to);
};
from.once('error', forwardError);
return [from.pipe(to), unpipe];
};
internals.Counter = class extends Stream.Transform {
constructor(options) {
super();
this.bytes = 0;
this._maxBytes = options.maxBytes;
}
_transform(chunk, encoding, next) {
this.bytes = this.bytes + chunk.length;
if (this._maxBytes !== undefined &&
this.bytes > this._maxBytes) {
return next(Boom.entityTooLarge('Payload content length greater than maximum allowed: ' + this._maxBytes));
}
return next(null, chunk);
}
};

42
node_modules/@hapi/subtext/package.json generated vendored Normal file
View File

@@ -0,0 +1,42 @@
{
"name": "@hapi/subtext",
"description": "HTTP payload parsing",
"version": "8.1.1",
"repository": "git://github.com/hapijs/subtext",
"main": "lib/index.js",
"files": [
"lib"
],
"keywords": [
"http",
"payload",
"file",
"stream",
"multipart"
],
"eslintConfig": {
"extends": [
"plugin:@hapi/module"
]
},
"dependencies": {
"@hapi/boom": "^10.0.1",
"@hapi/bourne": "^3.0.0",
"@hapi/content": "^6.0.0",
"@hapi/file": "^3.0.0",
"@hapi/hoek": "^11.0.2",
"@hapi/pez": "^6.1.0",
"@hapi/wreck": "^18.0.1"
},
"devDependencies": {
"@hapi/code": "^9.0.3",
"@hapi/eslint-plugin": "^6.0.0",
"@hapi/lab": "^25.1.2",
"form-data": "^4.0.0"
},
"scripts": {
"test": "lab -a @hapi/code -t 100 -L",
"test-cov-html": "lab -a @hapi/code -r html -o coverage.html"
},
"license": "BSD-3-Clause"
}