This commit is contained in:
PLBXNebulia-Formation 2025-11-21 09:23:11 +01:00
commit d1c8cae2c1
1417 changed files with 326736 additions and 0 deletions

View file

@ -0,0 +1,163 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.uncompressibleCommands = exports.Compressor = void 0;
exports.compress = compress;
exports.decompress = decompress;
exports.compressCommand = compressCommand;
exports.decompressResponse = decompressResponse;
const util_1 = require("util");
const zlib = require("zlib");
const constants_1 = require("../../constants");
const deps_1 = require("../../deps");
const error_1 = require("../../error");
const commands_1 = require("../commands");
const constants_2 = require("./constants");
/** @public */
exports.Compressor = Object.freeze({
none: 0,
snappy: 1,
zlib: 2,
zstd: 3
});
exports.uncompressibleCommands = new Set([
constants_1.LEGACY_HELLO_COMMAND,
'saslStart',
'saslContinue',
'getnonce',
'authenticate',
'createUser',
'updateUser',
'copydbSaslStart',
'copydbgetnonce',
'copydb'
]);
const ZSTD_COMPRESSION_LEVEL = 3;
const zlibInflate = (0, util_1.promisify)(zlib.inflate.bind(zlib));
const zlibDeflate = (0, util_1.promisify)(zlib.deflate.bind(zlib));
let zstd;
let Snappy = null;
function loadSnappy() {
if (Snappy == null) {
const snappyImport = (0, deps_1.getSnappy)();
if ('kModuleError' in snappyImport) {
throw snappyImport.kModuleError;
}
Snappy = snappyImport;
}
return Snappy;
}
// Facilitate compressing a message using an agreed compressor
async function compress(options, dataToBeCompressed) {
const zlibOptions = {};
switch (options.agreedCompressor) {
case 'snappy': {
Snappy ??= loadSnappy();
return await Snappy.compress(dataToBeCompressed);
}
case 'zstd': {
loadZstd();
if ('kModuleError' in zstd) {
throw zstd['kModuleError'];
}
return await zstd.compress(dataToBeCompressed, ZSTD_COMPRESSION_LEVEL);
}
case 'zlib': {
if (options.zlibCompressionLevel) {
zlibOptions.level = options.zlibCompressionLevel;
}
return await zlibDeflate(dataToBeCompressed, zlibOptions);
}
default: {
throw new error_1.MongoInvalidArgumentError(`Unknown compressor ${options.agreedCompressor} failed to compress`);
}
}
}
// Decompress a message using the given compressor
async function decompress(compressorID, compressedData) {
if (compressorID !== exports.Compressor.snappy &&
compressorID !== exports.Compressor.zstd &&
compressorID !== exports.Compressor.zlib &&
compressorID !== exports.Compressor.none) {
throw new error_1.MongoDecompressionError(`Server sent message compressed using an unsupported compressor. (Received compressor ID ${compressorID})`);
}
switch (compressorID) {
case exports.Compressor.snappy: {
Snappy ??= loadSnappy();
return await Snappy.uncompress(compressedData, { asBuffer: true });
}
case exports.Compressor.zstd: {
loadZstd();
if ('kModuleError' in zstd) {
throw zstd['kModuleError'];
}
return await zstd.decompress(compressedData);
}
case exports.Compressor.zlib: {
return await zlibInflate(compressedData);
}
default: {
return compressedData;
}
}
}
/**
* Load ZStandard if it is not already set.
*/
function loadZstd() {
if (!zstd) {
zstd = (0, deps_1.getZstdLibrary)();
}
}
const MESSAGE_HEADER_SIZE = 16;
/**
* @internal
*
* Compresses an OP_MSG or OP_QUERY message, if compression is configured. This method
* also serializes the command to BSON.
*/
async function compressCommand(command, description) {
const finalCommand = description.agreedCompressor === 'none' || !commands_1.OpCompressedRequest.canCompress(command)
? command
: new commands_1.OpCompressedRequest(command, {
agreedCompressor: description.agreedCompressor ?? 'none',
zlibCompressionLevel: description.zlibCompressionLevel ?? 0
});
const data = await finalCommand.toBin();
return Buffer.concat(data);
}
/**
* @internal
*
* Decompresses an OP_MSG or OP_QUERY response from the server, if compression is configured.
*
* This method does not parse the response's BSON.
*/
async function decompressResponse(message) {
const messageHeader = {
length: message.readInt32LE(0),
requestId: message.readInt32LE(4),
responseTo: message.readInt32LE(8),
opCode: message.readInt32LE(12)
};
if (messageHeader.opCode !== constants_2.OP_COMPRESSED) {
const ResponseType = messageHeader.opCode === constants_2.OP_MSG ? commands_1.OpMsgResponse : commands_1.OpReply;
const messageBody = message.subarray(MESSAGE_HEADER_SIZE);
return new ResponseType(message, messageHeader, messageBody);
}
const header = {
...messageHeader,
fromCompressed: true,
opCode: message.readInt32LE(MESSAGE_HEADER_SIZE),
length: message.readInt32LE(MESSAGE_HEADER_SIZE + 4)
};
const compressorID = message[MESSAGE_HEADER_SIZE + 8];
const compressedBuffer = message.slice(MESSAGE_HEADER_SIZE + 9);
// recalculate based on wrapped opcode
const ResponseType = header.opCode === constants_2.OP_MSG ? commands_1.OpMsgResponse : commands_1.OpReply;
const messageBody = await decompress(compressorID, compressedBuffer);
if (messageBody.length !== header.length) {
throw new error_1.MongoDecompressionError('Message body and message header must be the same length');
}
return new ResponseType(message, header, messageBody);
}
//# sourceMappingURL=compression.js.map

View file

@ -0,0 +1 @@
{"version":3,"file":"compression.js","sourceRoot":"","sources":["../../../src/cmap/wire_protocol/compression.ts"],"names":[],"mappings":";;;AA8DA,4BA6BC;AAGD,gCA+BC;AAmBD,0CAaC;AASD,gDA8BC;AApMD,+BAAiC;AACjC,6BAA6B;AAE7B,+CAAuD;AACvD,qCAAuF;AACvF,uCAAiF;AACjF,0CAOqB;AACrB,2CAAoD;AAEpD,cAAc;AACD,QAAA,UAAU,GAAG,MAAM,CAAC,MAAM,CAAC;IACtC,IAAI,EAAE,CAAC;IACP,MAAM,EAAE,CAAC;IACT,IAAI,EAAE,CAAC;IACP,IAAI,EAAE,CAAC;CACC,CAAC,CAAC;AAQC,QAAA,sBAAsB,GAAG,IAAI,GAAG,CAAC;IAC5C,gCAAoB;IACpB,WAAW;IACX,cAAc;IACd,UAAU;IACV,cAAc;IACd,YAAY;IACZ,YAAY;IACZ,iBAAiB;IACjB,gBAAgB;IAChB,QAAQ;CACT,CAAC,CAAC;AAEH,MAAM,sBAAsB,GAAG,CAAC,CAAC;AAEjC,MAAM,WAAW,GAAG,IAAA,gBAAS,EAAC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;AACvD,MAAM,WAAW,GAAG,IAAA,gBAAS,EAAC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;AAEvD,IAAI,IAAe,CAAC;AACpB,IAAI,MAAM,GAAqB,IAAI,CAAC;AACpC,SAAS,UAAU;IACjB,IAAI,MAAM,IAAI,IAAI,EAAE,CAAC;QACnB,MAAM,YAAY,GAAG,IAAA,gBAAS,GAAE,CAAC;QACjC,IAAI,cAAc,IAAI,YAAY,EAAE,CAAC;YACnC,MAAM,YAAY,CAAC,YAAY,CAAC;QAClC,CAAC;QACD,MAAM,GAAG,YAAY,CAAC;IACxB,CAAC;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,8DAA8D;AACvD,KAAK,UAAU,QAAQ,CAC5B,OAAmC,EACnC,kBAA0B;IAE1B,MAAM,WAAW,GAAG,EAAsB,CAAC;IAC3C,QAAQ,OAAO,CAAC,gBAAgB,EAAE,CAAC;QACjC,KAAK,QAAQ,CAAC,CAAC,CAAC;YACd,MAAM,KAAK,UAAU,EAAE,CAAC;YACxB,OAAO,MAAM,MAAM,CAAC,QAAQ,CAAC,kBAAkB,CAAC,CAAC;QACnD,CAAC;QACD,KAAK,MAAM,CAAC,CAAC,CAAC;YACZ,QAAQ,EAAE,CAAC;YACX,IAAI,cAAc,IAAI,IAAI,EAAE,CAAC;gBAC3B,MAAM,IAAI,CAAC,cAAc,CAAC,CAAC;YAC7B,CAAC;YACD,OAAO,MAAM,IAAI,CAAC,QAAQ,CAAC,kBAAkB,EAAE,sBAAsB,CAAC,CAAC;QACzE,CAAC;QACD,KAAK,MAAM,CAAC,CAAC,CAAC;YACZ,IAAI,OAAO,CAAC,oBAAoB,EAAE,CAAC;gBACjC,WAAW,CAAC,KAAK,GAAG,OAAO,CAAC,oBAAoB,CAAC;YACnD,CAAC;YACD,OAAO,MAAM,WAAW,CAAC,kBAAkB,EAAE,WAAW,CAAC,CAAC;QAC5D,CAAC;QACD,OAAO,CAAC,CAAC,CAAC;YACR,MAAM,IAAI,iCAAyB,CACjC,sBAAsB,OAAO,CAAC,gBAAgB,qBAAqB,CACpE,CAAC;QACJ,CAAC;IACH,CAAC;AACH,CAAC;AAED,kDAAkD;AAC3C,KAAK,UAAU,UAAU,CAAC,YAAoB,EAAE,cAAsB;IAC3E,IACE,YAAY,KAAK,kBAAU,CAAC,MAAM;QAClC,YAAY,KAAK,kBAAU,CAAC,IAAI;QAChC,YAAY,KAAK,kBAAU,CAAC,IAAI;QAChC,YAAY,KAAK,kBAAU,CAAC,IAAI,EAChC,CAAC;QACD,MAAM,IAAI,+BAAuB,CAC/B,2FAA2F,YAAY,GAAG,CAC3G,CAAC;IACJ,CAAC;IAED,QAAQ,YAAY,EAAE,CAAC;QACrB,KAAK,kBAAU,CAAC,MAAM,CAAC,CAAC,CAAC;YACvB,MAAM,KAAK,UAAU,EAAE,CAAC;YACxB,OAAO,MAAM,MAAM,CAAC,UAAU,CAAC,cAAc,EAAE,EAAE,QAAQ,EAAE,IAAI,EAAE,CAAC,CAAC;QACrE,CAAC;QACD,KAAK,kBAAU,CAAC,IAAI,CAAC,CAAC,CAAC;YACrB,QAAQ,EAAE,CAAC;YACX,IAAI,cAAc,IAAI,IAAI,EAAE,CAAC;gBAC3B,MAAM,IAAI,CAAC,cAAc,CAAC,CAAC;YAC7B,CAAC;YACD,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,cAAc,CAAC,CAAC;QAC/C,CAAC;QACD,KAAK,kBAAU,CAAC,IAAI,CAAC,CAAC,CAAC;YACrB,OAAO,MAAM,WAAW,CAAC,cAAc,CAAC,CAAC;QAC3C,CAAC;QACD,OAAO,CAAC,CAAC,CAAC;YACR,OAAO,cAAc,CAAC;QACxB,CAAC;IACH,CAAC;AACH,CAAC;AAED;;GAEG;AACH,SAAS,QAAQ;IACf,IAAI,CAAC,IAAI,EAAE,CAAC;QACV,IAAI,GAAG,IAAA,qBAAc,GAAE,CAAC;IAC1B,CAAC;AACH,CAAC;AAED,MAAM,mBAAmB,GAAG,EAAE,CAAC;AAE/B;;;;;GAKG;AACI,KAAK,UAAU,eAAe,CACnC,OAAiC,EACjC,WAAiF;IAEjF,MAAM,YAAY,GAChB,WAAW,CAAC,gBAAgB,KAAK,MAAM,IAAI,CAAC,8BAAmB,CAAC,WAAW,CAAC,OAAO,CAAC;QAClF,CAAC,CAAC,OAAO;QACT,CAAC,CAAC,IAAI,8BAAmB,CAAC,OAAO,EAAE;YAC/B,gBAAgB,EAAE,WAAW,CAAC,gBAAgB,IAAI,MAAM;YACxD,oBAAoB,EAAE,WAAW,CAAC,oBAAoB,IAAI,CAAC;SAC5D,CAAC,CAAC;IACT,MAAM,IAAI,GAAG,MAAM,YAAY,CAAC,KAAK,EAAE,CAAC;IACxC,OAAO,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;AAC7B,CAAC;AAED;;;;;;GAMG;AACI,KAAK,UAAU,kBAAkB,CAAC,OAAe;IACtD,MAAM,aAAa,GAAkB;QACnC,MAAM,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC,CAAC;QAC9B,SAAS,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC,CAAC;QACjC,UAAU,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC,CAAC;QAClC,MAAM,EAAE,OAAO,CAAC,WAAW,CAAC,EAAE,CAAC;KAChC,CAAC;IAEF,IAAI,aAAa,CAAC,MAAM,KAAK,yBAAa,EAAE,CAAC;QAC3C,MAAM,YAAY,GAAG,aAAa,CAAC,MAAM,KAAK,kBAAM,CAAC,CAAC,CAAC,wBAAa,CAAC,CAAC,CAAC,kBAAO,CAAC;QAC/E,MAAM,WAAW,GAAG,OAAO,CAAC,QAAQ,CAAC,mBAAmB,CAAC,CAAC;QAC1D,OAAO,IAAI,YAAY,CAAC,OAAO,EAAE,aAAa,EAAE,WAAW,CAAC,CAAC;IAC/D,CAAC;IAED,MAAM,MAAM,GAAkB;QAC5B,GAAG,aAAa;QAChB,cAAc,EAAE,IAAI;QACpB,MAAM,EAAE,OAAO,CAAC,WAAW,CAAC,mBAAmB,CAAC;QAChD,MAAM,EAAE,OAAO,CAAC,WAAW,CAAC,mBAAmB,GAAG,CAAC,CAAC;KACrD,CAAC;IACF,MAAM,YAAY,GAAG,OAAO,CAAC,mBAAmB,GAAG,CAAC,CAAC,CAAC;IACtD,MAAM,gBAAgB,GAAG,OAAO,CAAC,KAAK,CAAC,mBAAmB,GAAG,CAAC,CAAC,CAAC;IAEhE,sCAAsC;IACtC,MAAM,YAAY,GAAG,MAAM,CAAC,MAAM,KAAK,kBAAM,CAAC,CAAC,CAAC,wBAAa,CAAC,CAAC,CAAC,kBAAO,CAAC;IACxE,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,YAAY,EAAE,gBAAgB,CAAC,CAAC;IACrE,IAAI,WAAW,CAAC,MAAM,KAAK,MAAM,CAAC,MAAM,EAAE,CAAC;QACzC,MAAM,IAAI,+BAAuB,CAAC,yDAAyD,CAAC,CAAC;IAC/F,CAAC;IACD,OAAO,IAAI,YAAY,CAAC,OAAO,EAAE,MAAM,EAAE,WAAW,CAAC,CAAC;AACxD,CAAC"}

View file

@ -0,0 +1,21 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.OP_MSG = exports.OP_COMPRESSED = exports.OP_DELETE = exports.OP_QUERY = exports.OP_INSERT = exports.OP_UPDATE = exports.OP_REPLY = exports.MIN_SUPPORTED_RAW_DATA_SERVER_VERSION = exports.MIN_SUPPORTED_RAW_DATA_WIRE_VERSION = exports.MIN_SUPPORTED_QE_SERVER_VERSION = exports.MIN_SUPPORTED_QE_WIRE_VERSION = exports.MAX_SUPPORTED_WIRE_VERSION = exports.MIN_SUPPORTED_WIRE_VERSION = exports.MIN_SUPPORTED_SNAPSHOT_READS_SERVER_VERSION = exports.MIN_SUPPORTED_SNAPSHOT_READS_WIRE_VERSION = exports.MAX_SUPPORTED_SERVER_VERSION = exports.MIN_SUPPORTED_SERVER_VERSION = void 0;
exports.MIN_SUPPORTED_SERVER_VERSION = '4.2';
exports.MAX_SUPPORTED_SERVER_VERSION = '8.2';
exports.MIN_SUPPORTED_SNAPSHOT_READS_WIRE_VERSION = 13;
exports.MIN_SUPPORTED_SNAPSHOT_READS_SERVER_VERSION = '5.0';
exports.MIN_SUPPORTED_WIRE_VERSION = 8;
exports.MAX_SUPPORTED_WIRE_VERSION = 27;
exports.MIN_SUPPORTED_QE_WIRE_VERSION = 21;
exports.MIN_SUPPORTED_QE_SERVER_VERSION = '7.0';
exports.MIN_SUPPORTED_RAW_DATA_WIRE_VERSION = 27;
exports.MIN_SUPPORTED_RAW_DATA_SERVER_VERSION = '8.2';
exports.OP_REPLY = 1;
exports.OP_UPDATE = 2001;
exports.OP_INSERT = 2002;
exports.OP_QUERY = 2004;
exports.OP_DELETE = 2006;
exports.OP_COMPRESSED = 2012;
exports.OP_MSG = 2013;
//# sourceMappingURL=constants.js.map

View file

@ -0,0 +1 @@
{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../../src/cmap/wire_protocol/constants.ts"],"names":[],"mappings":";;;AAAa,QAAA,4BAA4B,GAAG,KAAK,CAAC;AACrC,QAAA,4BAA4B,GAAG,KAAK,CAAC;AACrC,QAAA,yCAAyC,GAAG,EAAE,CAAC;AAC/C,QAAA,2CAA2C,GAAG,KAAK,CAAC;AACpD,QAAA,0BAA0B,GAAG,CAAC,CAAC;AAC/B,QAAA,0BAA0B,GAAG,EAAE,CAAC;AAChC,QAAA,6BAA6B,GAAG,EAAE,CAAC;AACnC,QAAA,+BAA+B,GAAG,KAAK,CAAC;AACxC,QAAA,mCAAmC,GAAG,EAAE,CAAC;AACzC,QAAA,qCAAqC,GAAG,KAAK,CAAC;AAC9C,QAAA,QAAQ,GAAG,CAAC,CAAC;AACb,QAAA,SAAS,GAAG,IAAI,CAAC;AACjB,QAAA,SAAS,GAAG,IAAI,CAAC;AACjB,QAAA,QAAQ,GAAG,IAAI,CAAC;AAChB,QAAA,SAAS,GAAG,IAAI,CAAC;AACjB,QAAA,aAAa,GAAG,IAAI,CAAC;AACrB,QAAA,MAAM,GAAG,IAAI,CAAC"}

111
node_modules/mongodb/lib/cmap/wire_protocol/on_data.js generated vendored Normal file
View file

@ -0,0 +1,111 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.onData = onData;
const utils_1 = require("../../utils");
/**
* onData is adapted from Node.js' events.on helper
* https://nodejs.org/api/events.html#eventsonemitter-eventname-options
*
* Returns an AsyncIterator that iterates each 'data' event emitted from emitter.
* It will reject upon an error event.
*/
function onData(emitter, { timeoutContext, signal }) {
signal?.throwIfAborted();
// Setup pending events and pending promise lists
/**
* When the caller has not yet called .next(), we store the
* value from the event in this list. Next time they call .next()
* we pull the first value out of this list and resolve a promise with it.
*/
const unconsumedEvents = new utils_1.List();
/**
* When there has not yet been an event, a new promise will be created
* and implicitly stored in this list. When an event occurs we take the first
* promise in this list and resolve it.
*/
const unconsumedPromises = new utils_1.List();
/**
* Stored an error created by an error event.
* This error will turn into a rejection for the subsequent .next() call
*/
let error = null;
/** Set to true only after event listeners have been removed. */
let finished = false;
const iterator = {
next() {
// First, we consume all unread events
const value = unconsumedEvents.shift();
if (value != null) {
return Promise.resolve({ value, done: false });
}
// Then we error, if an error happened
// This happens one time if at all, because after 'error'
// we stop listening
if (error != null) {
const p = Promise.reject(error);
// Only the first element errors
error = null;
return p;
}
// If the iterator is finished, resolve to done
if (finished)
return closeHandler();
// Wait until an event happens
const { promise, resolve, reject } = (0, utils_1.promiseWithResolvers)();
unconsumedPromises.push({ resolve, reject });
return promise;
},
return() {
return closeHandler();
},
throw(err) {
errorHandler(err);
return Promise.resolve({ value: undefined, done: true });
},
[Symbol.asyncIterator]() {
return this;
},
async [Symbol.asyncDispose]() {
await closeHandler();
}
};
// Adding event handlers
emitter.on('data', eventHandler);
emitter.on('error', errorHandler);
const abortListener = (0, utils_1.addAbortListener)(signal, function () {
errorHandler(this.reason);
});
const timeoutForSocketRead = timeoutContext?.timeoutForSocketRead;
timeoutForSocketRead?.throwIfExpired();
timeoutForSocketRead?.then(undefined, errorHandler);
return iterator;
function eventHandler(value) {
const promise = unconsumedPromises.shift();
if (promise != null)
promise.resolve({ value, done: false });
else
unconsumedEvents.push(value);
}
function errorHandler(err) {
const promise = unconsumedPromises.shift();
if (promise != null)
promise.reject(err);
else
error = err;
void closeHandler();
}
function closeHandler() {
// Adding event handlers
emitter.off('data', eventHandler);
emitter.off('error', errorHandler);
abortListener?.[utils_1.kDispose]();
finished = true;
timeoutForSocketRead?.clear();
const doneResult = { value: undefined, done: finished };
for (const promise of unconsumedPromises) {
promise.resolve(doneResult);
}
return Promise.resolve(doneResult);
}
}
//# sourceMappingURL=on_data.js.map

View file

@ -0,0 +1 @@
{"version":3,"file":"on_data.js","sourceRoot":"","sources":["../../../src/cmap/wire_protocol/on_data.ts"],"names":[],"mappings":";;AAsBA,wBAoHC;AAtID,uCAAqF;AAWrF;;;;;;GAMG;AACH,SAAgB,MAAM,CACpB,OAAqB,EACrB,EAAE,cAAc,EAAE,MAAM,EAAmD;IAE3E,MAAM,EAAE,cAAc,EAAE,CAAC;IAEzB,iDAAiD;IACjD;;;;OAIG;IACH,MAAM,gBAAgB,GAAG,IAAI,YAAI,EAAU,CAAC;IAC5C;;;;OAIG;IACH,MAAM,kBAAkB,GAAG,IAAI,YAAI,EAAmB,CAAC;IAEvD;;;OAGG;IACH,IAAI,KAAK,GAAiB,IAAI,CAAC;IAE/B,gEAAgE;IAChE,IAAI,QAAQ,GAAG,KAAK,CAAC;IAErB,MAAM,QAAQ,GAA6C;QACzD,IAAI;YACF,sCAAsC;YACtC,MAAM,KAAK,GAAG,gBAAgB,CAAC,KAAK,EAAE,CAAC;YACvC,IAAI,KAAK,IAAI,IAAI,EAAE,CAAC;gBAClB,OAAO,OAAO,CAAC,OAAO,CAAC,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC;YACjD,CAAC;YAED,sCAAsC;YACtC,yDAAyD;YACzD,oBAAoB;YACpB,IAAI,KAAK,IAAI,IAAI,EAAE,CAAC;gBAClB,MAAM,CAAC,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;gBAChC,gCAAgC;gBAChC,KAAK,GAAG,IAAI,CAAC;gBACb,OAAO,CAAC,CAAC;YACX,CAAC;YAED,+CAA+C;YAC/C,IAAI,QAAQ;gBAAE,OAAO,YAAY,EAAE,CAAC;YAEpC,8BAA8B;YAC9B,MAAM,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,GAAG,IAAA,4BAAoB,GAA0B,CAAC;YACpF,kBAAkB,CAAC,IAAI,CAAC,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;YAC7C,OAAO,OAAO,CAAC;QACjB,CAAC;QAED,MAAM;YACJ,OAAO,YAAY,EAAE,CAAC;QACxB,CAAC;QAED,KAAK,CAAC,GAAU;YACd,YAAY,CAAC,GAAG,CAAC,CAAC;YAClB,OAAO,OAAO,CAAC,OAAO,CAAC,EAAE,KAAK,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC;QAC3D,CAAC;QAED,CAAC,MAAM,CAAC,aAAa,CAAC;YACpB,OAAO,IAAI,CAAC;QACd,CAAC;QAED,KAAK,CAAC,CAAC,MAAM,CAAC,YAAY,CAAC;YACzB,MAAM,YAAY,EAAE,CAAC;QACvB,CAAC;KACF,CAAC;IAEF,wBAAwB;IACxB,OAAO,CAAC,EAAE,CAAC,MAAM,EAAE,YAAY,CAAC,CAAC;IACjC,OAAO,CAAC,EAAE,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;IAClC,MAAM,aAAa,GAAG,IAAA,wBAAgB,EAAC,MAAM,EAAE;QAC7C,YAAY,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;IAC5B,CAAC,CAAC,CAAC;IAEH,MAAM,oBAAoB,GAAG,cAAc,EAAE,oBAAoB,CAAC;IAClE,oBAAoB,EAAE,cAAc,EAAE,CAAC;IACvC,oBAAoB,EAAE,IAAI,CAAC,SAAS,EAAE,YAAY,CAAC,CAAC;IAEpD,OAAO,QAAQ,CAAC;IAEhB,SAAS,YAAY,CAAC,KAAa;QACjC,MAAM,OAAO,GAAG,kBAAkB,CAAC,KAAK,EAAE,CAAC;QAC3C,IAAI,OAAO,IAAI,IAAI;YAAE,OAAO,CAAC,OAAO,CAAC,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC;;YACxD,gBAAgB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;IACpC,CAAC;IAED,SAAS,YAAY,CAAC,GAAU;QAC9B,MAAM,OAAO,GAAG,kBAAkB,CAAC,KAAK,EAAE,CAAC;QAE3C,IAAI,OAAO,IAAI,IAAI;YAAE,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;;YACpC,KAAK,GAAG,GAAG,CAAC;QACjB,KAAK,YAAY,EAAE,CAAC;IACtB,CAAC;IAED,SAAS,YAAY;QACnB,wBAAwB;QACxB,OAAO,CAAC,GAAG,CAAC,MAAM,EAAE,YAAY,CAAC,CAAC;QAClC,OAAO,CAAC,GAAG,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACnC,aAAa,EAAE,CAAC,gBAAQ,CAAC,EAAE,CAAC;QAC5B,QAAQ,GAAG,IAAI,CAAC;QAChB,oBAAoB,EAAE,KAAK,EAAE,CAAC;QAC9B,MAAM,UAAU,GAAG,EAAE,KAAK,EAAE,SAAS,EAAE,IAAI,EAAE,QAAQ,EAAW,CAAC;QAEjE,KAAK,MAAM,OAAO,IAAI,kBAAkB,EAAE,CAAC;YACzC,OAAO,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;QAC9B,CAAC;QAED,OAAO,OAAO,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;IACrC,CAAC;AACH,CAAC"}

View file

@ -0,0 +1,222 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.OnDemandDocument = void 0;
const bson_1 = require("../../../bson");
const BSONElementOffset = {
type: 0,
nameOffset: 1,
nameLength: 2,
offset: 3,
length: 4
};
/** @internal */
class OnDemandDocument {
constructor(bson, offset = 0, isArray = false,
/** If elements was already calculated */
elements) {
/**
* Maps JS strings to elements and jsValues for speeding up subsequent lookups.
* - If `false` then name does not exist in the BSON document
* - If `CachedBSONElement` instance name exists
* - If `cache[name].value == null` jsValue has not yet been parsed
* - Null/Undefined values do not get cached because they are zero-length values.
*/
this.cache = Object.create(null);
/** Caches the index of elements that have been named */
this.indexFound = Object.create(null);
this.bson = bson;
this.offset = offset;
this.isArray = isArray;
this.elements = elements ?? (0, bson_1.parseToElementsToArray)(this.bson, offset);
}
/** Only supports basic latin strings */
isElementName(name, element) {
const nameLength = element[BSONElementOffset.nameLength];
const nameOffset = element[BSONElementOffset.nameOffset];
if (name.length !== nameLength)
return false;
const nameEnd = nameOffset + nameLength;
for (let byteIndex = nameOffset, charIndex = 0; charIndex < name.length && byteIndex < nameEnd; charIndex++, byteIndex++) {
if (this.bson[byteIndex] !== name.charCodeAt(charIndex))
return false;
}
return true;
}
/**
* Seeks into the elements array for an element matching the given name.
*
* @remarks
* Caching:
* - Caches the existence of a property making subsequent look ups for non-existent properties return immediately
* - Caches names mapped to elements to avoid reiterating the array and comparing the name again
* - Caches the index at which an element has been found to prevent rechecking against elements already determined to belong to another name
*
* @param name - a basic latin string name of a BSON element
* @returns
*/
getElement(name) {
const cachedElement = this.cache[name];
if (cachedElement === false)
return null;
if (cachedElement != null) {
return cachedElement;
}
if (typeof name === 'number') {
if (this.isArray) {
if (name < this.elements.length) {
const element = this.elements[name];
const cachedElement = { element, value: undefined };
this.cache[name] = cachedElement;
this.indexFound[name] = true;
return cachedElement;
}
else {
return null;
}
}
else {
return null;
}
}
for (let index = 0; index < this.elements.length; index++) {
const element = this.elements[index];
// skip this element if it has already been associated with a name
if (!(index in this.indexFound) && this.isElementName(name, element)) {
const cachedElement = { element, value: undefined };
this.cache[name] = cachedElement;
this.indexFound[index] = true;
return cachedElement;
}
}
this.cache[name] = false;
return null;
}
toJSValue(element, as) {
const type = element[BSONElementOffset.type];
const offset = element[BSONElementOffset.offset];
const length = element[BSONElementOffset.length];
if (as !== type) {
return null;
}
switch (as) {
case bson_1.BSONType.null:
case bson_1.BSONType.undefined:
return null;
case bson_1.BSONType.double:
return (0, bson_1.getFloat64LE)(this.bson, offset);
case bson_1.BSONType.int:
return (0, bson_1.getInt32LE)(this.bson, offset);
case bson_1.BSONType.long:
return (0, bson_1.getBigInt64LE)(this.bson, offset);
case bson_1.BSONType.bool:
return Boolean(this.bson[offset]);
case bson_1.BSONType.objectId:
return new bson_1.ObjectId(this.bson.subarray(offset, offset + 12));
case bson_1.BSONType.timestamp:
return new bson_1.Timestamp((0, bson_1.getBigInt64LE)(this.bson, offset));
case bson_1.BSONType.string:
return (0, bson_1.toUTF8)(this.bson, offset + 4, offset + length - 1, false);
case bson_1.BSONType.binData: {
const totalBinarySize = (0, bson_1.getInt32LE)(this.bson, offset);
const subType = this.bson[offset + 4];
if (subType === 2) {
const subType2BinarySize = (0, bson_1.getInt32LE)(this.bson, offset + 1 + 4);
if (subType2BinarySize < 0)
throw new bson_1.BSONError('Negative binary type element size found for subtype 0x02');
if (subType2BinarySize > totalBinarySize - 4)
throw new bson_1.BSONError('Binary type with subtype 0x02 contains too long binary size');
if (subType2BinarySize < totalBinarySize - 4)
throw new bson_1.BSONError('Binary type with subtype 0x02 contains too short binary size');
return new bson_1.Binary(this.bson.subarray(offset + 1 + 4 + 4, offset + 1 + 4 + 4 + subType2BinarySize), 2);
}
return new bson_1.Binary(this.bson.subarray(offset + 1 + 4, offset + 1 + 4 + totalBinarySize), subType);
}
case bson_1.BSONType.date:
// Pretend this is correct.
return new Date(Number((0, bson_1.getBigInt64LE)(this.bson, offset)));
case bson_1.BSONType.object:
return new OnDemandDocument(this.bson, offset);
case bson_1.BSONType.array:
return new OnDemandDocument(this.bson, offset, true);
default:
throw new bson_1.BSONError(`Unsupported BSON type: ${as}`);
}
}
/**
* Returns the number of elements in this BSON document
*/
size() {
return this.elements.length;
}
/**
* Checks for the existence of an element by name.
*
* @remarks
* Uses `getElement` with the expectation that will populate caches such that a `has` call
* followed by a `getElement` call will not repeat the cost paid by the first look up.
*
* @param name - element name
*/
has(name) {
const cachedElement = this.cache[name];
if (cachedElement === false)
return false;
if (cachedElement != null)
return true;
return this.getElement(name) != null;
}
get(name, as, required) {
const element = this.getElement(name);
if (element == null) {
if (required === true) {
throw new bson_1.BSONError(`BSON element "${name}" is missing`);
}
else {
return null;
}
}
if (element.value == null) {
const value = this.toJSValue(element.element, as);
if (value == null) {
if (required === true) {
throw new bson_1.BSONError(`BSON element "${name}" is missing`);
}
else {
return null;
}
}
// It is important to never store null
element.value = value;
}
return element.value;
}
getNumber(name, required) {
const maybeBool = this.get(name, bson_1.BSONType.bool);
const bool = maybeBool == null ? null : maybeBool ? 1 : 0;
const maybeLong = this.get(name, bson_1.BSONType.long);
const long = maybeLong == null ? null : Number(maybeLong);
const result = bool ?? long ?? this.get(name, bson_1.BSONType.int) ?? this.get(name, bson_1.BSONType.double);
if (required === true && result == null) {
throw new bson_1.BSONError(`BSON element "${name}" is missing`);
}
return result;
}
/**
* Deserialize this object, DOES NOT cache result so avoid multiple invocations
* @param options - BSON deserialization options
*/
toObject(options) {
return (0, bson_1.deserialize)(this.bson, {
...options,
index: this.offset,
allowObjectSmallerThanBufferSize: true
});
}
/** Returns this document's bytes only */
toBytes() {
const size = (0, bson_1.getInt32LE)(this.bson, this.offset);
return this.bson.subarray(this.offset, this.offset + size);
}
}
exports.OnDemandDocument = OnDemandDocument;
//# sourceMappingURL=document.js.map

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,315 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ClientBulkWriteCursorResponse = exports.ExplainedCursorResponse = exports.CursorResponse = exports.MongoDBResponse = void 0;
exports.isErrorResponse = isErrorResponse;
const bson_1 = require("../../bson");
const error_1 = require("../../error");
const utils_1 = require("../../utils");
const document_1 = require("./on_demand/document");
const BSONElementOffset = {
type: 0,
nameOffset: 1,
nameLength: 2,
offset: 3,
length: 4
};
/**
* Accepts a BSON payload and checks for na "ok: 0" element.
* This utility is intended to prevent calling response class constructors
* that expect the result to be a success and demand certain properties to exist.
*
* For example, a cursor response always expects a cursor embedded document.
* In order to write the class such that the properties reflect that assertion (non-null)
* we cannot invoke the subclass constructor if the BSON represents an error.
*
* @param bytes - BSON document returned from the server
*/
function isErrorResponse(bson, elements) {
for (let eIdx = 0; eIdx < elements.length; eIdx++) {
const element = elements[eIdx];
if (element[BSONElementOffset.nameLength] === 2) {
const nameOffset = element[BSONElementOffset.nameOffset];
// 111 == "o", 107 == "k"
if (bson[nameOffset] === 111 && bson[nameOffset + 1] === 107) {
const valueOffset = element[BSONElementOffset.offset];
const valueLength = element[BSONElementOffset.length];
// If any byte in the length of the ok number (works for any type) is non zero,
// then it is considered "ok: 1"
for (let i = valueOffset; i < valueOffset + valueLength; i++) {
if (bson[i] !== 0x00)
return false;
}
return true;
}
}
}
return true;
}
/** @internal */
class MongoDBResponse extends document_1.OnDemandDocument {
get(name, as, required) {
try {
return super.get(name, as, required);
}
catch (cause) {
throw new error_1.MongoUnexpectedServerResponseError(cause.message, { cause });
}
}
static is(value) {
return value instanceof MongoDBResponse;
}
static make(bson) {
const elements = (0, bson_1.parseToElementsToArray)(bson, 0);
const isError = isErrorResponse(bson, elements);
return isError
? new MongoDBResponse(bson, 0, false, elements)
: new this(bson, 0, false, elements);
}
// {ok:1}
static { this.empty = new MongoDBResponse(new Uint8Array([13, 0, 0, 0, 16, 111, 107, 0, 1, 0, 0, 0, 0])); }
/**
* Returns true iff:
* - ok is 0 and the top-level code === 50
* - ok is 1 and the writeErrors array contains a code === 50
* - ok is 1 and the writeConcern object contains a code === 50
*/
get isMaxTimeExpiredError() {
// {ok: 0, code: 50 ... }
const isTopLevel = this.ok === 0 && this.code === error_1.MONGODB_ERROR_CODES.MaxTimeMSExpired;
if (isTopLevel)
return true;
if (this.ok === 0)
return false;
// {ok: 1, writeConcernError: {code: 50 ... }}
const isWriteConcern = this.get('writeConcernError', bson_1.BSONType.object)?.getNumber('code') ===
error_1.MONGODB_ERROR_CODES.MaxTimeMSExpired;
if (isWriteConcern)
return true;
const writeErrors = this.get('writeErrors', bson_1.BSONType.array);
if (writeErrors?.size()) {
for (let i = 0; i < writeErrors.size(); i++) {
const isWriteError = writeErrors.get(i, bson_1.BSONType.object)?.getNumber('code') ===
error_1.MONGODB_ERROR_CODES.MaxTimeMSExpired;
// {ok: 1, writeErrors: [{code: 50 ... }]}
if (isWriteError)
return true;
}
}
return false;
}
/**
* Drivers can safely assume that the `recoveryToken` field is always a BSON document but drivers MUST NOT modify the
* contents of the document.
*/
get recoveryToken() {
return (this.get('recoveryToken', bson_1.BSONType.object)?.toObject({
promoteValues: false,
promoteLongs: false,
promoteBuffers: false,
validation: { utf8: true }
}) ?? null);
}
/**
* The server creates a cursor in response to a snapshot find/aggregate command and reports atClusterTime within the cursor field in the response.
* For the distinct command the server adds a top-level atClusterTime field to the response.
* The atClusterTime field represents the timestamp of the read and is guaranteed to be majority committed.
*/
get atClusterTime() {
return (this.get('cursor', bson_1.BSONType.object)?.get('atClusterTime', bson_1.BSONType.timestamp) ??
this.get('atClusterTime', bson_1.BSONType.timestamp));
}
get operationTime() {
return this.get('operationTime', bson_1.BSONType.timestamp);
}
/** Normalizes whatever BSON value is "ok" to a JS number 1 or 0. */
get ok() {
return this.getNumber('ok') ? 1 : 0;
}
get $err() {
return this.get('$err', bson_1.BSONType.string);
}
get errmsg() {
return this.get('errmsg', bson_1.BSONType.string);
}
get code() {
return this.getNumber('code');
}
get $clusterTime() {
if (!('clusterTime' in this)) {
const clusterTimeDoc = this.get('$clusterTime', bson_1.BSONType.object);
if (clusterTimeDoc == null) {
this.clusterTime = null;
return null;
}
const clusterTime = clusterTimeDoc.get('clusterTime', bson_1.BSONType.timestamp, true);
const signature = clusterTimeDoc.get('signature', bson_1.BSONType.object)?.toObject();
// @ts-expect-error: `signature` is incorrectly typed. It is public API.
this.clusterTime = { clusterTime, signature };
}
return this.clusterTime ?? null;
}
toObject(options) {
const exactBSONOptions = {
...(0, bson_1.pluckBSONSerializeOptions)(options ?? {}),
validation: (0, bson_1.parseUtf8ValidationOption)(options)
};
return super.toObject(exactBSONOptions);
}
}
exports.MongoDBResponse = MongoDBResponse;
/** @internal */
class CursorResponse extends MongoDBResponse {
constructor() {
super(...arguments);
this._batch = null;
this.iterated = 0;
this._encryptedBatch = null;
}
/**
* This supports a feature of the FindCursor.
* It is an optimization to avoid an extra getMore when the limit has been reached
*/
static get emptyGetMore() {
return new CursorResponse((0, bson_1.serialize)({ ok: 1, cursor: { id: 0n, nextBatch: [] } }));
}
static is(value) {
return value instanceof CursorResponse || value === CursorResponse.emptyGetMore;
}
get cursor() {
return this.get('cursor', bson_1.BSONType.object, true);
}
get id() {
try {
return bson_1.Long.fromBigInt(this.cursor.get('id', bson_1.BSONType.long, true));
}
catch (cause) {
throw new error_1.MongoUnexpectedServerResponseError(cause.message, { cause });
}
}
get ns() {
const namespace = this.cursor.get('ns', bson_1.BSONType.string);
if (namespace != null)
return (0, utils_1.ns)(namespace);
return null;
}
get length() {
return Math.max(this.batchSize - this.iterated, 0);
}
get encryptedBatch() {
if (this.encryptedResponse == null)
return null;
if (this._encryptedBatch != null)
return this._encryptedBatch;
const cursor = this.encryptedResponse?.get('cursor', bson_1.BSONType.object);
if (cursor?.has('firstBatch'))
this._encryptedBatch = cursor.get('firstBatch', bson_1.BSONType.array, true);
else if (cursor?.has('nextBatch'))
this._encryptedBatch = cursor.get('nextBatch', bson_1.BSONType.array, true);
else
throw new error_1.MongoUnexpectedServerResponseError('Cursor document did not contain a batch');
return this._encryptedBatch;
}
get batch() {
if (this._batch != null)
return this._batch;
const cursor = this.cursor;
if (cursor.has('firstBatch'))
this._batch = cursor.get('firstBatch', bson_1.BSONType.array, true);
else if (cursor.has('nextBatch'))
this._batch = cursor.get('nextBatch', bson_1.BSONType.array, true);
else
throw new error_1.MongoUnexpectedServerResponseError('Cursor document did not contain a batch');
return this._batch;
}
get batchSize() {
return this.batch?.size();
}
get postBatchResumeToken() {
return (this.cursor.get('postBatchResumeToken', bson_1.BSONType.object)?.toObject({
promoteValues: false,
promoteLongs: false,
promoteBuffers: false,
validation: { utf8: true }
}) ?? null);
}
shift(options) {
if (this.iterated >= this.batchSize) {
return null;
}
const result = this.batch.get(this.iterated, bson_1.BSONType.object, true) ?? null;
const encryptedResult = this.encryptedBatch?.get(this.iterated, bson_1.BSONType.object, true) ?? null;
this.iterated += 1;
if (options?.raw) {
return result.toBytes();
}
else {
const object = result.toObject(options);
if (encryptedResult) {
(0, utils_1.decorateDecryptionResult)(object, encryptedResult.toObject(options), true);
}
return object;
}
}
clear() {
this.iterated = this.batchSize;
}
}
exports.CursorResponse = CursorResponse;
/**
* Explain responses have nothing to do with cursor responses
* This class serves to temporarily avoid refactoring how cursors handle
* explain responses which is to detect that the response is not cursor-like and return the explain
* result as the "first and only" document in the "batch" and end the "cursor"
*/
class ExplainedCursorResponse extends CursorResponse {
constructor() {
super(...arguments);
this.isExplain = true;
this._length = 1;
}
get id() {
return bson_1.Long.fromBigInt(0n);
}
get batchSize() {
return 0;
}
get ns() {
return null;
}
get length() {
return this._length;
}
shift(options) {
if (this._length === 0)
return null;
this._length -= 1;
return this.toObject(options);
}
}
exports.ExplainedCursorResponse = ExplainedCursorResponse;
/**
* Client bulk writes have some extra metadata at the top level that needs to be
* included in the result returned to the user.
*/
class ClientBulkWriteCursorResponse extends CursorResponse {
get insertedCount() {
return this.get('nInserted', bson_1.BSONType.int, true);
}
get upsertedCount() {
return this.get('nUpserted', bson_1.BSONType.int, true);
}
get matchedCount() {
return this.get('nMatched', bson_1.BSONType.int, true);
}
get modifiedCount() {
return this.get('nModified', bson_1.BSONType.int, true);
}
get deletedCount() {
return this.get('nDeleted', bson_1.BSONType.int, true);
}
get writeConcernError() {
return this.get('writeConcernError', bson_1.BSONType.object, false);
}
}
exports.ClientBulkWriteCursorResponse = ClientBulkWriteCursorResponse;
//# sourceMappingURL=responses.js.map

File diff suppressed because one or more lines are too long

35
node_modules/mongodb/lib/cmap/wire_protocol/shared.js generated vendored Normal file
View file

@ -0,0 +1,35 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.getReadPreference = getReadPreference;
exports.isSharded = isSharded;
const error_1 = require("../../error");
const read_preference_1 = require("../../read_preference");
const common_1 = require("../../sdam/common");
const topology_description_1 = require("../../sdam/topology_description");
function getReadPreference(options) {
// Default to command version of the readPreference.
let readPreference = options?.readPreference ?? read_preference_1.ReadPreference.primary;
if (typeof readPreference === 'string') {
readPreference = read_preference_1.ReadPreference.fromString(readPreference);
}
if (!(readPreference instanceof read_preference_1.ReadPreference)) {
throw new error_1.MongoInvalidArgumentError('Option "readPreference" must be a ReadPreference instance');
}
return readPreference;
}
function isSharded(topologyOrServer) {
if (topologyOrServer == null) {
return false;
}
if (topologyOrServer.description && topologyOrServer.description.type === common_1.ServerType.Mongos) {
return true;
}
// NOTE: This is incredibly inefficient, and should be removed once command construction
// happens based on `Server` not `Topology`.
if (topologyOrServer.description && topologyOrServer.description instanceof topology_description_1.TopologyDescription) {
const servers = Array.from(topologyOrServer.description.servers.values());
return servers.some((server) => server.type === common_1.ServerType.Mongos);
}
return false;
}
//# sourceMappingURL=shared.js.map

View file

@ -0,0 +1 @@
{"version":3,"file":"shared.js","sourceRoot":"","sources":["../../../src/cmap/wire_protocol/shared.ts"],"names":[],"mappings":";;AAaA,8CAeC;AAED,8BAiBC;AA/CD,uCAAwD;AACxD,2DAAgF;AAChF,8CAA+C;AAI/C,0EAAsE;AAOtE,SAAgB,iBAAiB,CAAC,OAA8B;IAC9D,oDAAoD;IACpD,IAAI,cAAc,GAAG,OAAO,EAAE,cAAc,IAAI,gCAAc,CAAC,OAAO,CAAC;IAEvE,IAAI,OAAO,cAAc,KAAK,QAAQ,EAAE,CAAC;QACvC,cAAc,GAAG,gCAAc,CAAC,UAAU,CAAC,cAAc,CAAC,CAAC;IAC7D,CAAC;IAED,IAAI,CAAC,CAAC,cAAc,YAAY,gCAAc,CAAC,EAAE,CAAC;QAChD,MAAM,IAAI,iCAAyB,CACjC,2DAA2D,CAC5D,CAAC;IACJ,CAAC;IAED,OAAO,cAAc,CAAC;AACxB,CAAC;AAED,SAAgB,SAAS,CAAC,gBAAiD;IACzE,IAAI,gBAAgB,IAAI,IAAI,EAAE,CAAC;QAC7B,OAAO,KAAK,CAAC;IACf,CAAC;IAED,IAAI,gBAAgB,CAAC,WAAW,IAAI,gBAAgB,CAAC,WAAW,CAAC,IAAI,KAAK,mBAAU,CAAC,MAAM,EAAE,CAAC;QAC5F,OAAO,IAAI,CAAC;IACd,CAAC;IAED,wFAAwF;IACxF,4CAA4C;IAC5C,IAAI,gBAAgB,CAAC,WAAW,IAAI,gBAAgB,CAAC,WAAW,YAAY,0CAAmB,EAAE,CAAC;QAChG,MAAM,OAAO,GAAwB,KAAK,CAAC,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC,OAAO,CAAC,MAAM,EAAE,CAAC,CAAC;QAC/F,OAAO,OAAO,CAAC,IAAI,CAAC,CAAC,MAAyB,EAAE,EAAE,CAAC,MAAM,CAAC,IAAI,KAAK,mBAAU,CAAC,MAAM,CAAC,CAAC;IACxF,CAAC;IAED,OAAO,KAAK,CAAC;AACf,CAAC"}