This commit is contained in:
PLBXNebulia-Formation 2025-11-21 09:23:11 +01:00
commit d1c8cae2c1
1417 changed files with 326736 additions and 0 deletions

924
node_modules/mongodb/lib/cursor/abstract_cursor.js generated vendored Normal file
View file

@ -0,0 +1,924 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.CursorTimeoutContext = exports.AbstractCursor = exports.CursorTimeoutMode = exports.CURSOR_FLAGS = void 0;
const stream_1 = require("stream");
const bson_1 = require("../bson");
const error_1 = require("../error");
const mongo_types_1 = require("../mongo_types");
const execute_operation_1 = require("../operations/execute_operation");
const get_more_1 = require("../operations/get_more");
const kill_cursors_1 = require("../operations/kill_cursors");
const read_concern_1 = require("../read_concern");
const read_preference_1 = require("../read_preference");
const sessions_1 = require("../sessions");
const timeout_1 = require("../timeout");
const utils_1 = require("../utils");
/** @public */
exports.CURSOR_FLAGS = [
'tailable',
'oplogReplay',
'noCursorTimeout',
'awaitData',
'exhaust',
'partial'
];
function removeActiveCursor() {
this.client.s.activeCursors.delete(this);
}
/**
* @public
* @experimental
* Specifies how `timeoutMS` is applied to the cursor. Can be either `'cursorLifeTime'` or `'iteration'`
* When set to `'iteration'`, the deadline specified by `timeoutMS` applies to each call of
* `cursor.next()`.
* When set to `'cursorLifetime'`, the deadline applies to the life of the entire cursor.
*
* Depending on the type of cursor being used, this option has different default values.
* For non-tailable cursors, this value defaults to `'cursorLifetime'`
* For tailable cursors, this value defaults to `'iteration'` since tailable cursors, by
* definition can have an arbitrarily long lifetime.
*
* @example
* ```ts
* const cursor = collection.find({}, {timeoutMS: 100, timeoutMode: 'iteration'});
* for await (const doc of cursor) {
* // process doc
* // This will throw a timeout error if any of the iterator's `next()` calls takes more than 100ms, but
* // will continue to iterate successfully otherwise, regardless of the number of batches.
* }
* ```
*
* @example
* ```ts
* const cursor = collection.find({}, { timeoutMS: 1000, timeoutMode: 'cursorLifetime' });
* const docs = await cursor.toArray(); // This entire line will throw a timeout error if all batches are not fetched and returned within 1000ms.
* ```
*/
exports.CursorTimeoutMode = Object.freeze({
ITERATION: 'iteration',
LIFETIME: 'cursorLifetime'
});
/** @public */
class AbstractCursor extends mongo_types_1.TypedEventEmitter {
/** @event */
static { this.CLOSE = 'close'; }
/** @internal */
constructor(client, namespace, options = {}) {
super();
/** @internal */
this.documents = null;
/** @internal */
this.hasEmittedClose = false;
this.on('error', utils_1.noop);
if (!client.s.isMongoClient) {
throw new error_1.MongoRuntimeError('Cursor must be constructed with MongoClient');
}
this.cursorClient = client;
this.cursorNamespace = namespace;
this.cursorId = null;
this.initialized = false;
this.isClosed = false;
this.isKilled = false;
this.cursorOptions = {
readPreference: options.readPreference && options.readPreference instanceof read_preference_1.ReadPreference
? options.readPreference
: read_preference_1.ReadPreference.primary,
...(0, bson_1.pluckBSONSerializeOptions)(options),
timeoutMS: options?.timeoutContext?.csotEnabled()
? options.timeoutContext.timeoutMS
: options.timeoutMS,
tailable: options.tailable,
awaitData: options.awaitData
};
if (this.cursorOptions.timeoutMS != null) {
if (options.timeoutMode == null) {
if (options.tailable) {
if (options.awaitData) {
if (options.maxAwaitTimeMS != null &&
options.maxAwaitTimeMS >= this.cursorOptions.timeoutMS)
throw new error_1.MongoInvalidArgumentError('Cannot specify maxAwaitTimeMS >= timeoutMS for a tailable awaitData cursor');
}
this.cursorOptions.timeoutMode = exports.CursorTimeoutMode.ITERATION;
}
else {
this.cursorOptions.timeoutMode = exports.CursorTimeoutMode.LIFETIME;
}
}
else {
if (options.tailable && options.timeoutMode === exports.CursorTimeoutMode.LIFETIME) {
throw new error_1.MongoInvalidArgumentError("Cannot set tailable cursor's timeoutMode to LIFETIME");
}
this.cursorOptions.timeoutMode = options.timeoutMode;
}
}
else {
if (options.timeoutMode != null)
throw new error_1.MongoInvalidArgumentError('Cannot set timeoutMode without setting timeoutMS');
}
// Set for initial command
this.cursorOptions.omitMaxTimeMS =
this.cursorOptions.timeoutMS != null &&
((this.cursorOptions.timeoutMode === exports.CursorTimeoutMode.ITERATION &&
!this.cursorOptions.tailable) ||
(this.cursorOptions.tailable && !this.cursorOptions.awaitData));
const readConcern = read_concern_1.ReadConcern.fromOptions(options);
if (readConcern) {
this.cursorOptions.readConcern = readConcern;
}
if (typeof options.batchSize === 'number') {
this.cursorOptions.batchSize = options.batchSize;
}
// we check for undefined specifically here to allow falsy values
// eslint-disable-next-line no-restricted-syntax
if (options.comment !== undefined) {
this.cursorOptions.comment = options.comment;
}
if (typeof options.maxTimeMS === 'number') {
this.cursorOptions.maxTimeMS = options.maxTimeMS;
}
if (typeof options.maxAwaitTimeMS === 'number') {
this.cursorOptions.maxAwaitTimeMS = options.maxAwaitTimeMS;
}
this.cursorSession = options.session ?? null;
this.deserializationOptions = {
...this.cursorOptions,
validation: {
utf8: options?.enableUtf8Validation === false ? false : true
}
};
this.timeoutContext = options.timeoutContext;
this.signal = options.signal;
this.abortListener = (0, utils_1.addAbortListener)(this.signal, () => void this.close().then(undefined, utils_1.squashError));
this.trackCursor();
}
/**
* The cursor has no id until it receives a response from the initial cursor creating command.
*
* It is non-zero for as long as the database has an open cursor.
*
* The initiating command may receive a zero id if the entire result is in the `firstBatch`.
*/
get id() {
return this.cursorId ?? undefined;
}
/** @internal */
get isDead() {
return (this.cursorId?.isZero() ?? false) || this.isClosed || this.isKilled;
}
/** @internal */
get client() {
return this.cursorClient;
}
/** @internal */
get server() {
return this.selectedServer;
}
get namespace() {
return this.cursorNamespace;
}
get readPreference() {
return this.cursorOptions.readPreference;
}
get readConcern() {
return this.cursorOptions.readConcern;
}
/** @internal */
get session() {
return this.cursorSession;
}
set session(clientSession) {
this.cursorSession = clientSession;
}
/**
* The cursor is closed and all remaining locally buffered documents have been iterated.
*/
get closed() {
return this.isClosed && (this.documents?.length ?? 0) === 0;
}
/**
* A `killCursors` command was attempted on this cursor.
* This is performed if the cursor id is non zero.
*/
get killed() {
return this.isKilled;
}
get loadBalanced() {
return !!this.cursorClient.topology?.loadBalanced;
}
/**
* @experimental
* An alias for {@link AbstractCursor.close|AbstractCursor.close()}.
*/
async [Symbol.asyncDispose]() {
await this.close();
}
/** Adds cursor to client's tracking so it will be closed by MongoClient.close() */
trackCursor() {
this.cursorClient.s.activeCursors.add(this);
if (!this.listeners('close').includes(removeActiveCursor)) {
this.once('close', removeActiveCursor);
}
}
/** Returns current buffered documents length */
bufferedCount() {
return this.documents?.length ?? 0;
}
/** Returns current buffered documents */
readBufferedDocuments(number) {
const bufferedDocs = [];
const documentsToRead = Math.min(number ?? this.documents?.length ?? 0, this.documents?.length ?? 0);
for (let count = 0; count < documentsToRead; count++) {
const document = this.documents?.shift(this.deserializationOptions);
if (document != null) {
bufferedDocs.push(document);
}
}
return bufferedDocs;
}
async *[Symbol.asyncIterator]() {
this.signal?.throwIfAborted();
if (this.closed) {
return;
}
try {
while (true) {
if (this.isKilled) {
return;
}
if (this.closed) {
return;
}
if (this.cursorId != null && this.isDead && (this.documents?.length ?? 0) === 0) {
return;
}
const document = await this.next();
// eslint-disable-next-line no-restricted-syntax
if (document === null) {
return;
}
yield document;
this.signal?.throwIfAborted();
}
}
finally {
// Only close the cursor if it has not already been closed. This finally clause handles
// the case when a user would break out of a for await of loop early.
if (!this.isClosed) {
try {
await this.close();
}
catch (error) {
(0, utils_1.squashError)(error);
}
}
}
}
stream() {
const readable = new ReadableCursorStream(this);
const abortListener = (0, utils_1.addAbortListener)(this.signal, function () {
readable.destroy(this.reason);
});
readable.once('end', () => {
abortListener?.[utils_1.kDispose]();
});
return readable;
}
async hasNext() {
this.signal?.throwIfAborted();
if (this.cursorId === bson_1.Long.ZERO) {
return false;
}
if (this.cursorOptions.timeoutMode === exports.CursorTimeoutMode.ITERATION && this.cursorId != null) {
this.timeoutContext?.refresh();
}
try {
do {
if ((this.documents?.length ?? 0) !== 0) {
return true;
}
await this.fetchBatch();
} while (!this.isDead || (this.documents?.length ?? 0) !== 0);
}
finally {
if (this.cursorOptions.timeoutMode === exports.CursorTimeoutMode.ITERATION) {
this.timeoutContext?.clear();
}
}
return false;
}
/** Get the next available document from the cursor, returns null if no more documents are available. */
async next() {
this.signal?.throwIfAborted();
if (this.cursorId === bson_1.Long.ZERO) {
throw new error_1.MongoCursorExhaustedError();
}
if (this.cursorOptions.timeoutMode === exports.CursorTimeoutMode.ITERATION && this.cursorId != null) {
this.timeoutContext?.refresh();
}
try {
do {
const doc = this.documents?.shift(this.deserializationOptions);
if (doc != null) {
if (this.transform != null)
return await this.transformDocument(doc);
return doc;
}
await this.fetchBatch();
} while (!this.isDead || (this.documents?.length ?? 0) !== 0);
}
finally {
if (this.cursorOptions.timeoutMode === exports.CursorTimeoutMode.ITERATION) {
this.timeoutContext?.clear();
}
}
return null;
}
/**
* Try to get the next available document from the cursor or `null` if an empty batch is returned
*/
async tryNext() {
this.signal?.throwIfAborted();
if (this.cursorId === bson_1.Long.ZERO) {
throw new error_1.MongoCursorExhaustedError();
}
if (this.cursorOptions.timeoutMode === exports.CursorTimeoutMode.ITERATION && this.cursorId != null) {
this.timeoutContext?.refresh();
}
try {
let doc = this.documents?.shift(this.deserializationOptions);
if (doc != null) {
if (this.transform != null)
return await this.transformDocument(doc);
return doc;
}
await this.fetchBatch();
doc = this.documents?.shift(this.deserializationOptions);
if (doc != null) {
if (this.transform != null)
return await this.transformDocument(doc);
return doc;
}
}
finally {
if (this.cursorOptions.timeoutMode === exports.CursorTimeoutMode.ITERATION) {
this.timeoutContext?.clear();
}
}
return null;
}
/**
* Iterates over all the documents for this cursor using the iterator, callback pattern.
*
* If the iterator returns `false`, iteration will stop.
*
* @param iterator - The iteration callback.
* @deprecated - Will be removed in a future release. Use for await...of instead.
*/
async forEach(iterator) {
this.signal?.throwIfAborted();
if (typeof iterator !== 'function') {
throw new error_1.MongoInvalidArgumentError('Argument "iterator" must be a function');
}
for await (const document of this) {
const result = iterator(document);
if (result === false) {
break;
}
}
}
/**
* Frees any client-side resources used by the cursor.
*/
async close(options) {
await this.cleanup(options?.timeoutMS);
}
/**
* Returns an array of documents. The caller is responsible for making sure that there
* is enough memory to store the results. Note that the array only contains partial
* results when this cursor had been previously accessed. In that case,
* cursor.rewind() can be used to reset the cursor.
*/
async toArray() {
this.signal?.throwIfAborted();
const array = [];
// at the end of the loop (since readBufferedDocuments is called) the buffer will be empty
// then, the 'await of' syntax will run a getMore call
for await (const document of this) {
array.push(document);
const docs = this.readBufferedDocuments();
if (this.transform != null) {
for (const doc of docs) {
array.push(await this.transformDocument(doc));
}
}
else {
// Note: previous versions of this logic used `array.push(...)`, which adds each item
// to the callstack. For large arrays, this can exceed the maximum call size.
for (const doc of docs) {
array.push(doc);
}
}
}
return array;
}
/**
* Add a cursor flag to the cursor
*
* @param flag - The flag to set, must be one of following ['tailable', 'oplogReplay', 'noCursorTimeout', 'awaitData', 'partial' -.
* @param value - The flag boolean value.
*/
addCursorFlag(flag, value) {
this.throwIfInitialized();
if (!exports.CURSOR_FLAGS.includes(flag)) {
throw new error_1.MongoInvalidArgumentError(`Flag ${flag} is not one of ${exports.CURSOR_FLAGS}`);
}
if (typeof value !== 'boolean') {
throw new error_1.MongoInvalidArgumentError(`Flag ${flag} must be a boolean value`);
}
this.cursorOptions[flag] = value;
return this;
}
/**
* Map all documents using the provided function
* If there is a transform set on the cursor, that will be called first and the result passed to
* this function's transform.
*
* @remarks
*
* **Note** Cursors use `null` internally to indicate that there are no more documents in the cursor. Providing a mapping
* function that maps values to `null` will result in the cursor closing itself before it has finished iterating
* all documents. This will **not** result in a memory leak, just surprising behavior. For example:
*
* ```typescript
* const cursor = collection.find({});
* cursor.map(() => null);
*
* const documents = await cursor.toArray();
* // documents is always [], regardless of how many documents are in the collection.
* ```
*
* Other falsey values are allowed:
*
* ```typescript
* const cursor = collection.find({});
* cursor.map(() => '');
*
* const documents = await cursor.toArray();
* // documents is now an array of empty strings
* ```
*
* **Note for Typescript Users:** adding a transform changes the return type of the iteration of this cursor,
* it **does not** return a new instance of a cursor. This means when calling map,
* you should always assign the result to a new variable in order to get a correctly typed cursor variable.
* Take note of the following example:
*
* @example
* ```typescript
* const cursor: FindCursor<Document> = coll.find();
* const mappedCursor: FindCursor<number> = cursor.map(doc => Object.keys(doc).length);
* const keyCounts: number[] = await mappedCursor.toArray(); // cursor.toArray() still returns Document[]
* ```
* @param transform - The mapping transformation method.
*/
map(transform) {
this.throwIfInitialized();
const oldTransform = this.transform;
if (oldTransform) {
this.transform = doc => {
return transform(oldTransform(doc));
};
}
else {
this.transform = transform;
}
return this;
}
/**
* Set the ReadPreference for the cursor.
*
* @param readPreference - The new read preference for the cursor.
*/
withReadPreference(readPreference) {
this.throwIfInitialized();
if (readPreference instanceof read_preference_1.ReadPreference) {
this.cursorOptions.readPreference = readPreference;
}
else if (typeof readPreference === 'string') {
this.cursorOptions.readPreference = read_preference_1.ReadPreference.fromString(readPreference);
}
else {
throw new error_1.MongoInvalidArgumentError(`Invalid read preference: ${readPreference}`);
}
return this;
}
/**
* Set the ReadPreference for the cursor.
*
* @param readPreference - The new read preference for the cursor.
*/
withReadConcern(readConcern) {
this.throwIfInitialized();
const resolvedReadConcern = read_concern_1.ReadConcern.fromOptions({ readConcern });
if (resolvedReadConcern) {
this.cursorOptions.readConcern = resolvedReadConcern;
}
return this;
}
/**
* Set a maxTimeMS on the cursor query, allowing for hard timeout limits on queries (Only supported on MongoDB 2.6 or higher)
*
* @param value - Number of milliseconds to wait before aborting the query.
*/
maxTimeMS(value) {
this.throwIfInitialized();
if (typeof value !== 'number') {
throw new error_1.MongoInvalidArgumentError('Argument for maxTimeMS must be a number');
}
this.cursorOptions.maxTimeMS = value;
return this;
}
/**
* Set the batch size for the cursor.
*
* @param value - The number of documents to return per batch. See {@link https://www.mongodb.com/docs/manual/reference/command/find/|find command documentation}.
*/
batchSize(value) {
this.throwIfInitialized();
if (this.cursorOptions.tailable) {
throw new error_1.MongoTailableCursorError('Tailable cursor does not support batchSize');
}
if (typeof value !== 'number') {
throw new error_1.MongoInvalidArgumentError('Operation "batchSize" requires an integer');
}
this.cursorOptions.batchSize = value;
return this;
}
/**
* Rewind this cursor to its uninitialized state. Any options that are present on the cursor will
* remain in effect. Iterating this cursor will cause new queries to be sent to the server, even
* if the resultant data has already been retrieved by this cursor.
*/
rewind() {
if (this.timeoutContext && this.timeoutContext.owner !== this) {
throw new error_1.MongoAPIError(`Cannot rewind cursor that does not own its timeout context.`);
}
if (!this.initialized) {
return;
}
this.cursorId = null;
this.documents?.clear();
this.timeoutContext?.clear();
this.timeoutContext = undefined;
this.isClosed = false;
this.isKilled = false;
this.initialized = false;
this.hasEmittedClose = false;
this.trackCursor();
// We only want to end this session if we created it, and it hasn't ended yet
if (this.cursorSession?.explicit === false) {
if (!this.cursorSession.hasEnded) {
this.cursorSession.endSession().then(undefined, utils_1.squashError);
}
this.cursorSession = null;
}
}
/** @internal */
async getMore() {
if (this.cursorId == null) {
throw new error_1.MongoRuntimeError('Unexpected null cursor id. A cursor creating command should have set this');
}
if (this.selectedServer == null) {
throw new error_1.MongoRuntimeError('Unexpected null selectedServer. A cursor creating command should have set this');
}
if (this.cursorSession == null) {
throw new error_1.MongoRuntimeError('Unexpected null session. A cursor creating command should have set this');
}
const getMoreOptions = {
...this.cursorOptions,
session: this.cursorSession,
batchSize: this.cursorOptions.batchSize
};
const getMoreOperation = new get_more_1.GetMoreOperation(this.cursorNamespace, this.cursorId, this.selectedServer, getMoreOptions);
return await (0, execute_operation_1.executeOperation)(this.cursorClient, getMoreOperation, this.timeoutContext);
}
/**
* @internal
*
* This function is exposed for the unified test runner's createChangeStream
* operation. We cannot refactor to use the abstract _initialize method without
* a significant refactor.
*/
async cursorInit() {
if (this.cursorOptions.timeoutMS != null) {
this.timeoutContext ??= new CursorTimeoutContext(timeout_1.TimeoutContext.create({
serverSelectionTimeoutMS: this.client.s.options.serverSelectionTimeoutMS,
timeoutMS: this.cursorOptions.timeoutMS
}), this);
}
try {
this.cursorSession ??= this.cursorClient.startSession({ owner: this, explicit: false });
const state = await this._initialize(this.cursorSession);
// Set omitMaxTimeMS to the value needed for subsequent getMore calls
this.cursorOptions.omitMaxTimeMS = this.cursorOptions.timeoutMS != null;
const response = state.response;
this.selectedServer = state.server;
this.cursorId = response.id;
this.cursorNamespace = response.ns ?? this.namespace;
this.documents = response;
this.initialized = true; // the cursor is now initialized, even if it is dead
}
catch (error) {
// the cursor is now initialized, even if an error occurred
this.initialized = true;
await this.cleanup(undefined, error);
throw error;
}
if (this.isDead) {
await this.cleanup();
}
return;
}
/** @internal Attempt to obtain more documents */
async fetchBatch() {
if (this.isClosed) {
return;
}
if (this.isDead) {
// if the cursor is dead, we clean it up
// cleanupCursor should never throw, but if it does it indicates a bug in the driver
// and we should surface the error
await this.cleanup();
return;
}
if (this.cursorId == null) {
await this.cursorInit();
// If the cursor died or returned documents, return
if ((this.documents?.length ?? 0) !== 0 || this.isDead)
return;
}
// Otherwise, run a getMore
try {
const response = await this.getMore();
this.cursorId = response.id;
this.documents = response;
}
catch (error) {
try {
await this.cleanup(undefined, error);
}
catch (cleanupError) {
// `cleanupCursor` should never throw, squash and throw the original error
(0, utils_1.squashError)(cleanupError);
}
throw error;
}
if (this.isDead) {
// If we successfully received a response from a cursor BUT the cursor indicates that it is exhausted,
// we intentionally clean up the cursor to release its session back into the pool before the cursor
// is iterated. This prevents a cursor that is exhausted on the server from holding
// onto a session indefinitely until the AbstractCursor is iterated.
//
// cleanupCursorAsync should never throw, but if it does it indicates a bug in the driver
// and we should surface the error
await this.cleanup();
}
}
/** @internal */
async cleanup(timeoutMS, error) {
this.abortListener?.[utils_1.kDispose]();
this.isClosed = true;
const timeoutContextForKillCursors = () => {
if (timeoutMS != null) {
this.timeoutContext?.clear();
return new CursorTimeoutContext(timeout_1.TimeoutContext.create({
serverSelectionTimeoutMS: this.client.s.options.serverSelectionTimeoutMS,
timeoutMS
}), this);
}
else {
return this.timeoutContext?.refreshed();
}
};
const withEmitClose = async (fn) => {
try {
await fn();
}
finally {
this.emitClose();
}
};
const close = async () => {
// if no session has been defined on the cursor, the cursor was never initialized
// or the cursor was re-wound and never re-iterated. In either case, we
// 1. do not need to end the session (there is no session after all)
// 2. do not need to kill the cursor server-side
const session = this.cursorSession;
if (!session)
return;
try {
if (!this.isKilled &&
this.cursorId &&
!this.cursorId.isZero() &&
this.cursorNamespace &&
this.selectedServer &&
!session.hasEnded) {
this.isKilled = true;
const cursorId = this.cursorId;
this.cursorId = bson_1.Long.ZERO;
await (0, execute_operation_1.executeOperation)(this.cursorClient, new kill_cursors_1.KillCursorsOperation(cursorId, this.cursorNamespace, this.selectedServer, {
session
}), timeoutContextForKillCursors());
}
}
catch (error) {
(0, utils_1.squashError)(error);
}
finally {
if (session.owner === this) {
await session.endSession({ error });
}
if (!session.inTransaction()) {
(0, sessions_1.maybeClearPinnedConnection)(session, { error });
}
}
};
await withEmitClose(close);
}
/** @internal */
emitClose() {
try {
if (!this.hasEmittedClose && ((this.documents?.length ?? 0) === 0 || this.isClosed)) {
// @ts-expect-error: CursorEvents is generic so Parameters<CursorEvents["close"]> may not be assignable to `[]`. Not sure how to require extenders do not add parameters.
this.emit('close');
}
}
finally {
this.hasEmittedClose = true;
}
}
/** @internal */
async transformDocument(document) {
if (this.transform == null)
return document;
try {
const transformedDocument = this.transform(document);
// eslint-disable-next-line no-restricted-syntax
if (transformedDocument === null) {
const TRANSFORM_TO_NULL_ERROR = 'Cursor returned a `null` document, but the cursor is not exhausted. Mapping documents to `null` is not supported in the cursor transform.';
throw new error_1.MongoAPIError(TRANSFORM_TO_NULL_ERROR);
}
return transformedDocument;
}
catch (transformError) {
try {
await this.close();
}
catch (closeError) {
(0, utils_1.squashError)(closeError);
}
throw transformError;
}
}
/** @internal */
throwIfInitialized() {
if (this.initialized)
throw new error_1.MongoCursorInUseError();
}
}
exports.AbstractCursor = AbstractCursor;
class ReadableCursorStream extends stream_1.Readable {
constructor(cursor) {
super({
objectMode: true,
autoDestroy: false,
highWaterMark: 1
});
this._readInProgress = false;
this._cursor = cursor;
}
// eslint-disable-next-line @typescript-eslint/no-unused-vars
_read(size) {
if (!this._readInProgress) {
this._readInProgress = true;
this._readNext();
}
}
_destroy(error, callback) {
this._cursor.close().then(() => callback(error), closeError => callback(closeError));
}
_readNext() {
if (this._cursor.id === bson_1.Long.ZERO) {
this.push(null);
return;
}
this._cursor
.next()
.then(
// result from next()
result => {
if (result == null) {
this.push(null);
}
else if (this.destroyed) {
this._cursor.close().then(undefined, utils_1.squashError);
}
else {
if (this.push(result)) {
return this._readNext();
}
this._readInProgress = false;
}
},
// error from next()
err => {
// NOTE: This is questionable, but we have a test backing the behavior. It seems the
// desired behavior is that a stream ends cleanly when a user explicitly closes
// a client during iteration. Alternatively, we could do the "right" thing and
// propagate the error message by removing this special case.
if (err.message.match(/server is closed/)) {
this._cursor.close().then(undefined, utils_1.squashError);
return this.push(null);
}
// NOTE: This is also perhaps questionable. The rationale here is that these errors tend
// to be "operation was interrupted", where a cursor has been closed but there is an
// active getMore in-flight. This used to check if the cursor was killed but once
// that changed to happen in cleanup legitimate errors would not destroy the
// stream. There are change streams test specifically test these cases.
if (err.message.match(/operation was interrupted/)) {
return this.push(null);
}
// NOTE: The two above checks on the message of the error will cause a null to be pushed
// to the stream, thus closing the stream before the destroy call happens. This means
// that either of those error messages on a change stream will not get a proper
// 'error' event to be emitted (the error passed to destroy). Change stream resumability
// relies on that error event to be emitted to create its new cursor and thus was not
// working on 4.4 servers because the error emitted on failover was "interrupted at
// shutdown" while on 5.0+ it is "The server is in quiesce mode and will shut down".
// See NODE-4475.
return this.destroy(err);
})
// if either of the above handlers throw
.catch(error => {
this._readInProgress = false;
this.destroy(error);
});
}
}
/**
* @internal
* The cursor timeout context is a wrapper around a timeout context
* that keeps track of the "owner" of the cursor. For timeout contexts
* instantiated inside a cursor, the owner will be the cursor.
*
* All timeout behavior is exactly the same as the wrapped timeout context's.
*/
class CursorTimeoutContext extends timeout_1.TimeoutContext {
constructor(timeoutContext, owner) {
super();
this.timeoutContext = timeoutContext;
this.owner = owner;
}
get serverSelectionTimeout() {
return this.timeoutContext.serverSelectionTimeout;
}
get connectionCheckoutTimeout() {
return this.timeoutContext.connectionCheckoutTimeout;
}
get clearServerSelectionTimeout() {
return this.timeoutContext.clearServerSelectionTimeout;
}
get timeoutForSocketWrite() {
return this.timeoutContext.timeoutForSocketWrite;
}
get timeoutForSocketRead() {
return this.timeoutContext.timeoutForSocketRead;
}
csotEnabled() {
return this.timeoutContext.csotEnabled();
}
refresh() {
if (typeof this.owner !== 'symbol')
return this.timeoutContext.refresh();
}
clear() {
if (typeof this.owner !== 'symbol')
return this.timeoutContext.clear();
}
get maxTimeMS() {
return this.timeoutContext.maxTimeMS;
}
get timeoutMS() {
return this.timeoutContext.csotEnabled() ? this.timeoutContext.timeoutMS : null;
}
refreshed() {
return new CursorTimeoutContext(this.timeoutContext.refreshed(), this.owner);
}
addMaxTimeMSToCommand(command, options) {
this.timeoutContext.addMaxTimeMSToCommand(command, options);
}
getSocketTimeoutMS() {
return this.timeoutContext.getSocketTimeoutMS();
}
}
exports.CursorTimeoutContext = CursorTimeoutContext;
//# sourceMappingURL=abstract_cursor.js.map

File diff suppressed because one or more lines are too long

164
node_modules/mongodb/lib/cursor/aggregation_cursor.js generated vendored Normal file
View file

@ -0,0 +1,164 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.AggregationCursor = void 0;
const error_1 = require("../error");
const explain_1 = require("../explain");
const aggregate_1 = require("../operations/aggregate");
const execute_operation_1 = require("../operations/execute_operation");
const utils_1 = require("../utils");
const abstract_cursor_1 = require("./abstract_cursor");
const explainable_cursor_1 = require("./explainable_cursor");
/**
* The **AggregationCursor** class is an internal class that embodies an aggregation cursor on MongoDB
* allowing for iteration over the results returned from the underlying query. It supports
* one by one document iteration, conversion to an array or can be iterated as a Node 4.X
* or higher stream
* @public
*/
class AggregationCursor extends explainable_cursor_1.ExplainableCursor {
/** @internal */
constructor(client, namespace, pipeline = [], options = {}) {
super(client, namespace, options);
this.pipeline = pipeline;
this.aggregateOptions = options;
const lastStage = this.pipeline[this.pipeline.length - 1];
if (this.cursorOptions.timeoutMS != null &&
this.cursorOptions.timeoutMode === abstract_cursor_1.CursorTimeoutMode.ITERATION &&
(lastStage?.$merge != null || lastStage?.$out != null))
throw new error_1.MongoAPIError('Cannot use $out or $merge stage with ITERATION timeoutMode');
}
clone() {
const clonedOptions = (0, utils_1.mergeOptions)({}, this.aggregateOptions);
delete clonedOptions.session;
return new AggregationCursor(this.client, this.namespace, this.pipeline, {
...clonedOptions
});
}
map(transform) {
return super.map(transform);
}
/** @internal */
async _initialize(session) {
const options = {
...this.aggregateOptions,
...this.cursorOptions,
session,
signal: this.signal
};
if (options.explain) {
try {
(0, explain_1.validateExplainTimeoutOptions)(options, explain_1.Explain.fromOptions(options));
}
catch {
throw new error_1.MongoAPIError('timeoutMS cannot be used with explain when explain is specified in aggregateOptions');
}
}
const aggregateOperation = new aggregate_1.AggregateOperation(this.namespace, this.pipeline, options);
const response = await (0, execute_operation_1.executeOperation)(this.client, aggregateOperation, this.timeoutContext);
return { server: aggregateOperation.server, session, response };
}
async explain(verbosity, options) {
const { explain, timeout } = this.resolveExplainTimeoutOptions(verbosity, options);
return (await (0, execute_operation_1.executeOperation)(this.client, new aggregate_1.AggregateOperation(this.namespace, this.pipeline, {
...this.aggregateOptions, // NOTE: order matters here, we may need to refine this
...this.cursorOptions,
...timeout,
explain: explain ?? true
}))).shift(this.deserializationOptions);
}
addStage(stage) {
this.throwIfInitialized();
if (this.cursorOptions.timeoutMS != null &&
this.cursorOptions.timeoutMode === abstract_cursor_1.CursorTimeoutMode.ITERATION &&
(stage.$out != null || stage.$merge != null)) {
throw new error_1.MongoAPIError('Cannot use $out or $merge stage with ITERATION timeoutMode');
}
this.pipeline.push(stage);
return this;
}
group($group) {
return this.addStage({ $group });
}
/** Add a limit stage to the aggregation pipeline */
limit($limit) {
return this.addStage({ $limit });
}
/** Add a match stage to the aggregation pipeline */
match($match) {
return this.addStage({ $match });
}
/** Add an out stage to the aggregation pipeline */
out($out) {
return this.addStage({ $out });
}
/**
* Add a project stage to the aggregation pipeline
*
* @remarks
* In order to strictly type this function you must provide an interface
* that represents the effect of your projection on the result documents.
*
* By default chaining a projection to your cursor changes the returned type to the generic {@link Document} type.
* You should specify a parameterized type to have assertions on your final results.
*
* @example
* ```typescript
* // Best way
* const docs: AggregationCursor<{ a: number }> = cursor.project<{ a: number }>({ _id: 0, a: true });
* // Flexible way
* const docs: AggregationCursor<Document> = cursor.project({ _id: 0, a: true });
* ```
*
* @remarks
* In order to strictly type this function you must provide an interface
* that represents the effect of your projection on the result documents.
*
* **Note for Typescript Users:** adding a transform changes the return type of the iteration of this cursor,
* it **does not** return a new instance of a cursor. This means when calling project,
* you should always assign the result to a new variable in order to get a correctly typed cursor variable.
* Take note of the following example:
*
* @example
* ```typescript
* const cursor: AggregationCursor<{ a: number; b: string }> = coll.aggregate([]);
* const projectCursor = cursor.project<{ a: number }>({ _id: 0, a: true });
* const aPropOnlyArray: {a: number}[] = await projectCursor.toArray();
*
* // or always use chaining and save the final cursor
*
* const cursor = coll.aggregate().project<{ a: string }>({
* _id: 0,
* a: { $convert: { input: '$a', to: 'string' }
* }});
* ```
*/
project($project) {
return this.addStage({ $project });
}
/** Add a lookup stage to the aggregation pipeline */
lookup($lookup) {
return this.addStage({ $lookup });
}
/** Add a redact stage to the aggregation pipeline */
redact($redact) {
return this.addStage({ $redact });
}
/** Add a skip stage to the aggregation pipeline */
skip($skip) {
return this.addStage({ $skip });
}
/** Add a sort stage to the aggregation pipeline */
sort($sort) {
return this.addStage({ $sort });
}
/** Add a unwind stage to the aggregation pipeline */
unwind($unwind) {
return this.addStage({ $unwind });
}
/** Add a geoNear stage to the aggregation pipeline */
geoNear($geoNear) {
return this.addStage({ $geoNear });
}
}
exports.AggregationCursor = AggregationCursor;
//# sourceMappingURL=aggregation_cursor.js.map

View file

@ -0,0 +1 @@
{"version":3,"file":"aggregation_cursor.js","sourceRoot":"","sources":["../../src/cursor/aggregation_cursor.ts"],"names":[],"mappings":";;;AACA,oCAAyC;AACzC,wCAKoB;AAGpB,uDAAoF;AACpF,uEAAmE;AAGnE,oCAA+D;AAC/D,uDAI2B;AAC3B,6DAAyD;AAKzD;;;;;;GAMG;AACH,MAAa,iBAAiC,SAAQ,sCAA0B;IAK9E,gBAAgB;IAChB,YACE,MAAmB,EACnB,SAA2B,EAC3B,WAAuB,EAAE,EACzB,UAAwC,EAAE;QAE1C,KAAK,CAAC,MAAM,EAAE,SAAS,EAAE,OAAO,CAAC,CAAC;QAElC,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,IAAI,CAAC,gBAAgB,GAAG,OAAO,CAAC;QAEhC,MAAM,SAAS,GAAyB,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;QAEhF,IACE,IAAI,CAAC,aAAa,CAAC,SAAS,IAAI,IAAI;YACpC,IAAI,CAAC,aAAa,CAAC,WAAW,KAAK,mCAAiB,CAAC,SAAS;YAC9D,CAAC,SAAS,EAAE,MAAM,IAAI,IAAI,IAAI,SAAS,EAAE,IAAI,IAAI,IAAI,CAAC;YAEtD,MAAM,IAAI,qBAAa,CAAC,4DAA4D,CAAC,CAAC;IAC1F,CAAC;IAED,KAAK;QACH,MAAM,aAAa,GAAG,IAAA,oBAAY,EAAC,EAAE,EAAE,IAAI,CAAC,gBAAgB,CAAC,CAAC;QAC9D,OAAO,aAAa,CAAC,OAAO,CAAC;QAC7B,OAAO,IAAI,iBAAiB,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,QAAQ,EAAE;YACvE,GAAG,aAAa;SACjB,CAAC,CAAC;IACL,CAAC;IAEQ,GAAG,CAAI,SAA8B;QAC5C,OAAO,KAAK,CAAC,GAAG,CAAC,SAAS,CAAyB,CAAC;IACtD,CAAC;IAED,gBAAgB;IAChB,KAAK,CAAC,WAAW,CAAC,OAAsB;QACtC,MAAM,OAAO,GAAG;YACd,GAAG,IAAI,CAAC,gBAAgB;YACxB,GAAG,IAAI,CAAC,aAAa;YACrB,OAAO;YACP,MAAM,EAAE,IAAI,CAAC,MAAM;SACpB,CAAC;QACF,IAAI,OAAO,CAAC,OAAO,EAAE,CAAC;YACpB,IAAI,CAAC;gBACH,IAAA,uCAA6B,EAAC,OAAO,EAAE,iBAAO,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,CAAC;YACvE,CAAC;YAAC,MAAM,CAAC;gBACP,MAAM,IAAI,qBAAa,CACrB,qFAAqF,CACtF,CAAC;YACJ,CAAC;QACH,CAAC;QAED,MAAM,kBAAkB,GAAG,IAAI,8BAAkB,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;QAE1F,MAAM,QAAQ,GAAG,MAAM,IAAA,oCAAgB,EAAC,IAAI,CAAC,MAAM,EAAE,kBAAkB,EAAE,IAAI,CAAC,cAAc,CAAC,CAAC;QAE9F,OAAO,EAAE,MAAM,EAAE,kBAAkB,CAAC,MAAM,EAAE,OAAO,EAAE,QAAQ,EAAE,CAAC;IAClE,CAAC;IAUD,KAAK,CAAC,OAAO,CACX,SAAiF,EACjF,OAAgC;QAEhC,MAAM,EAAE,OAAO,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,4BAA4B,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;QACnF,OAAO,CACL,MAAM,IAAA,oCAAgB,EACpB,IAAI,CAAC,MAAM,EACX,IAAI,8BAAkB,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,QAAQ,EAAE;YACpD,GAAG,IAAI,CAAC,gBAAgB,EAAE,uDAAuD;YACjF,GAAG,IAAI,CAAC,aAAa;YACrB,GAAG,OAAO;YACV,OAAO,EAAE,OAAO,IAAI,IAAI;SACzB,CAAC,CACH,CACF,CAAC,KAAK,CAAC,IAAI,CAAC,sBAAsB,CAAC,CAAC;IACvC,CAAC;IAgBD,QAAQ,CAAe,KAAe;QACpC,IAAI,CAAC,kBAAkB,EAAE,CAAC;QAC1B,IACE,IAAI,CAAC,aAAa,CAAC,SAAS,IAAI,IAAI;YACpC,IAAI,CAAC,aAAa,CAAC,WAAW,KAAK,mCAAiB,CAAC,SAAS;YAC9D,CAAC,KAAK,CAAC,IAAI,IAAI,IAAI,IAAI,KAAK,CAAC,MAAM,IAAI,IAAI,CAAC,EAC5C,CAAC;YACD,MAAM,IAAI,qBAAa,CAAC,4DAA4D,CAAC,CAAC;QACxF,CAAC;QACD,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QAC1B,OAAO,IAAuC,CAAC;IACjD,CAAC;IAID,KAAK,CAAC,MAAgB;QACpB,OAAO,IAAI,CAAC,QAAQ,CAAC,EAAE,MAAM,EAAE,CAAC,CAAC;IACnC,CAAC;IAED,oDAAoD;IACpD,KAAK,CAAC,MAAc;QAClB,OAAO,IAAI,CAAC,QAAQ,CAAC,EAAE,MAAM,EAAE,CAAC,CAAC;IACnC,CAAC;IAED,oDAAoD;IACpD,KAAK,CAAC,MAAgB;QACpB,OAAO,IAAI,CAAC,QAAQ,CAAC,EAAE,MAAM,EAAE,CAAC,CAAC;IACnC,CAAC;IAED,mDAAmD;IACnD,GAAG,CAAC,IAA2C;QAC7C,OAAO,IAAI,CAAC,QAAQ,CAAC,EAAE,IAAI,EAAE,CAAC,CAAC;IACjC,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAwCG;IACH,OAAO,CAAgC,QAAkB;QACvD,OAAO,IAAI,CAAC,QAAQ,CAAI,EAAE,QAAQ,EAAE,CAAC,CAAC;IACxC,CAAC;IAED,qDAAqD;IACrD,MAAM,CAAC,OAAiB;QACtB,OAAO,IAAI,CAAC,QAAQ,CAAC,EAAE,OAAO,EAAE,CAAC,CAAC;IACpC,CAAC;IAED,qDAAqD;IACrD,MAAM,CAAC,OAAiB;QACtB,OAAO,IAAI,CAAC,QAAQ,CAAC,EAAE,OAAO,EAAE,CAAC,CAAC;IACpC,CAAC;IAED,mDAAmD;IACnD,IAAI,CAAC,KAAa;QAChB,OAAO,IAAI,CAAC,QAAQ,CAAC,EAAE,KAAK,EAAE,CAAC,CAAC;IAClC,CAAC;IAED,mDAAmD;IACnD,IAAI,CAAC,KAAW;QACd,OAAO,IAAI,CAAC,QAAQ,CAAC,EAAE,KAAK,EAAE,CAAC,CAAC;IAClC,CAAC;IAED,qDAAqD;IACrD,MAAM,CAAC,OAA0B;QAC/B,OAAO,IAAI,CAAC,QAAQ,CAAC,EAAE,OAAO,EAAE,CAAC,CAAC;IACpC,CAAC;IAED,sDAAsD;IACtD,OAAO,CAAC,QAAkB;QACxB,OAAO,IAAI,CAAC,QAAQ,CAAC,EAAE,QAAQ,EAAE,CAAC,CAAC;IACrC,CAAC;CACF;AApND,8CAoNC"}

104
node_modules/mongodb/lib/cursor/change_stream_cursor.js generated vendored Normal file
View file

@ -0,0 +1,104 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ChangeStreamCursor = void 0;
const change_stream_1 = require("../change_stream");
const constants_1 = require("../constants");
const aggregate_1 = require("../operations/aggregate");
const execute_operation_1 = require("../operations/execute_operation");
const utils_1 = require("../utils");
const abstract_cursor_1 = require("./abstract_cursor");
/** @internal */
class ChangeStreamCursor extends abstract_cursor_1.AbstractCursor {
constructor(client, namespace, pipeline = [], options = {}) {
super(client, namespace, { ...options, tailable: true, awaitData: true });
this.pipeline = pipeline;
this.changeStreamCursorOptions = options;
this._resumeToken = null;
this.startAtOperationTime = options.startAtOperationTime ?? null;
if (options.startAfter) {
this.resumeToken = options.startAfter;
}
else if (options.resumeAfter) {
this.resumeToken = options.resumeAfter;
}
}
set resumeToken(token) {
this._resumeToken = token;
this.emit(change_stream_1.ChangeStream.RESUME_TOKEN_CHANGED, token);
}
get resumeToken() {
return this._resumeToken;
}
get resumeOptions() {
const options = {
...this.changeStreamCursorOptions
};
for (const key of ['resumeAfter', 'startAfter', 'startAtOperationTime']) {
delete options[key];
}
if (this.resumeToken != null) {
if (this.changeStreamCursorOptions.startAfter && !this.hasReceived) {
options.startAfter = this.resumeToken;
}
else {
options.resumeAfter = this.resumeToken;
}
}
else if (this.startAtOperationTime != null) {
options.startAtOperationTime = this.startAtOperationTime;
}
return options;
}
cacheResumeToken(resumeToken) {
if (this.bufferedCount() === 0 && this.postBatchResumeToken) {
this.resumeToken = this.postBatchResumeToken;
}
else {
this.resumeToken = resumeToken;
}
this.hasReceived = true;
}
_processBatch(response) {
const { postBatchResumeToken } = response;
if (postBatchResumeToken) {
this.postBatchResumeToken = postBatchResumeToken;
if (response.batchSize === 0) {
this.resumeToken = postBatchResumeToken;
}
}
}
clone() {
return new ChangeStreamCursor(this.client, this.namespace, this.pipeline, {
...this.cursorOptions
});
}
async _initialize(session) {
const aggregateOperation = new aggregate_1.AggregateOperation(this.namespace, this.pipeline, {
...this.cursorOptions,
...this.changeStreamCursorOptions,
session
});
const response = await (0, execute_operation_1.executeOperation)(session.client, aggregateOperation, this.timeoutContext);
const server = aggregateOperation.server;
this.maxWireVersion = (0, utils_1.maxWireVersion)(server);
if (this.startAtOperationTime == null &&
this.changeStreamCursorOptions.resumeAfter == null &&
this.changeStreamCursorOptions.startAfter == null) {
this.startAtOperationTime = response.operationTime;
}
this._processBatch(response);
this.emit(constants_1.INIT, response);
this.emit(constants_1.RESPONSE);
return { server, session, response };
}
async getMore() {
const response = await super.getMore();
this.maxWireVersion = (0, utils_1.maxWireVersion)(this.server);
this._processBatch(response);
this.emit(change_stream_1.ChangeStream.MORE, response);
this.emit(change_stream_1.ChangeStream.RESPONSE);
return response;
}
}
exports.ChangeStreamCursor = ChangeStreamCursor;
//# sourceMappingURL=change_stream_cursor.js.map

View file

@ -0,0 +1 @@
{"version":3,"file":"change_stream_cursor.js","sourceRoot":"","sources":["../../src/cursor/change_stream_cursor.ts"],"names":[],"mappings":";;;AACA,oDAM0B;AAE1B,4CAA8C;AAE9C,uDAA6D;AAE7D,uEAAmE;AAEnE,oCAAiE;AACjE,uDAI2B;AAY3B,gBAAgB;AAChB,MAAa,kBAGX,SAAQ,gCAA2C;IAenD,YACE,MAAmB,EACnB,SAA2B,EAC3B,WAAuB,EAAE,EACzB,UAAqC,EAAE;QAEvC,KAAK,CAAC,MAAM,EAAE,SAAS,EAAE,EAAE,GAAG,OAAO,EAAE,QAAQ,EAAE,IAAI,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAE1E,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,IAAI,CAAC,yBAAyB,GAAG,OAAO,CAAC;QACzC,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC;QACzB,IAAI,CAAC,oBAAoB,GAAG,OAAO,CAAC,oBAAoB,IAAI,IAAI,CAAC;QAEjE,IAAI,OAAO,CAAC,UAAU,EAAE,CAAC;YACvB,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,UAAU,CAAC;QACxC,CAAC;aAAM,IAAI,OAAO,CAAC,WAAW,EAAE,CAAC;YAC/B,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;QACzC,CAAC;IACH,CAAC;IAED,IAAI,WAAW,CAAC,KAAkB;QAChC,IAAI,CAAC,YAAY,GAAG,KAAK,CAAC;QAC1B,IAAI,CAAC,IAAI,CAAC,4BAAY,CAAC,oBAAoB,EAAE,KAAK,CAAC,CAAC;IACtD,CAAC;IAED,IAAI,WAAW;QACb,OAAO,IAAI,CAAC,YAAY,CAAC;IAC3B,CAAC;IAED,IAAI,aAAa;QACf,MAAM,OAAO,GAA8B;YACzC,GAAG,IAAI,CAAC,yBAAyB;SAClC,CAAC;QAEF,KAAK,MAAM,GAAG,IAAI,CAAC,aAAa,EAAE,YAAY,EAAE,sBAAsB,CAAU,EAAE,CAAC;YACjF,OAAO,OAAO,CAAC,GAAG,CAAC,CAAC;QACtB,CAAC;QAED,IAAI,IAAI,CAAC,WAAW,IAAI,IAAI,EAAE,CAAC;YAC7B,IAAI,IAAI,CAAC,yBAAyB,CAAC,UAAU,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC;gBACnE,OAAO,CAAC,UAAU,GAAG,IAAI,CAAC,WAAW,CAAC;YACxC,CAAC;iBAAM,CAAC;gBACN,OAAO,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,CAAC;YACzC,CAAC;QACH,CAAC;aAAM,IAAI,IAAI,CAAC,oBAAoB,IAAI,IAAI,EAAE,CAAC;YAC7C,OAAO,CAAC,oBAAoB,GAAG,IAAI,CAAC,oBAAoB,CAAC;QAC3D,CAAC;QAED,OAAO,OAAO,CAAC;IACjB,CAAC;IAED,gBAAgB,CAAC,WAAwB;QACvC,IAAI,IAAI,CAAC,aAAa,EAAE,KAAK,CAAC,IAAI,IAAI,CAAC,oBAAoB,EAAE,CAAC;YAC5D,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC,oBAAoB,CAAC;QAC/C,CAAC;aAAM,CAAC;YACN,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QACjC,CAAC;QACD,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC;IAC1B,CAAC;IAED,aAAa,CAAC,QAAwB;QACpC,MAAM,EAAE,oBAAoB,EAAE,GAAG,QAAQ,CAAC;QAC1C,IAAI,oBAAoB,EAAE,CAAC;YACzB,IAAI,CAAC,oBAAoB,GAAG,oBAAoB,CAAC;YAEjD,IAAI,QAAQ,CAAC,SAAS,KAAK,CAAC,EAAE,CAAC;gBAC7B,IAAI,CAAC,WAAW,GAAG,oBAAoB,CAAC;YAC1C,CAAC;QACH,CAAC;IACH,CAAC;IAED,KAAK;QACH,OAAO,IAAI,kBAAkB,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,QAAQ,EAAE;YACxE,GAAG,IAAI,CAAC,aAAa;SACtB,CAAC,CAAC;IACL,CAAC;IAED,KAAK,CAAC,WAAW,CAAC,OAAsB;QACtC,MAAM,kBAAkB,GAAG,IAAI,8BAAkB,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,QAAQ,EAAE;YAC/E,GAAG,IAAI,CAAC,aAAa;YACrB,GAAG,IAAI,CAAC,yBAAyB;YACjC,OAAO;SACR,CAAC,CAAC;QAEH,MAAM,QAAQ,GAAG,MAAM,IAAA,oCAAgB,EACrC,OAAO,CAAC,MAAM,EACd,kBAAkB,EAClB,IAAI,CAAC,cAAc,CACpB,CAAC;QAEF,MAAM,MAAM,GAAG,kBAAkB,CAAC,MAAM,CAAC;QACzC,IAAI,CAAC,cAAc,GAAG,IAAA,sBAAc,EAAC,MAAM,CAAC,CAAC;QAE7C,IACE,IAAI,CAAC,oBAAoB,IAAI,IAAI;YACjC,IAAI,CAAC,yBAAyB,CAAC,WAAW,IAAI,IAAI;YAClD,IAAI,CAAC,yBAAyB,CAAC,UAAU,IAAI,IAAI,EACjD,CAAC;YACD,IAAI,CAAC,oBAAoB,GAAG,QAAQ,CAAC,aAAa,CAAC;QACrD,CAAC;QAED,IAAI,CAAC,aAAa,CAAC,QAAQ,CAAC,CAAC;QAE7B,IAAI,CAAC,IAAI,CAAC,gBAAI,EAAE,QAAQ,CAAC,CAAC;QAC1B,IAAI,CAAC,IAAI,CAAC,oBAAQ,CAAC,CAAC;QAEpB,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,QAAQ,EAAE,CAAC;IACvC,CAAC;IAEQ,KAAK,CAAC,OAAO;QACpB,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,OAAO,EAAE,CAAC;QAEvC,IAAI,CAAC,cAAc,GAAG,IAAA,sBAAc,EAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAClD,IAAI,CAAC,aAAa,CAAC,QAAQ,CAAC,CAAC;QAE7B,IAAI,CAAC,IAAI,CAAC,4BAAY,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;QACvC,IAAI,CAAC,IAAI,CAAC,4BAAY,CAAC,QAAQ,CAAC,CAAC;QACjC,OAAO,QAAQ,CAAC;IAClB,CAAC;CACF;AAzID,gDAyIC"}

View file

@ -0,0 +1,52 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ClientBulkWriteCursor = void 0;
const client_bulk_write_1 = require("../operations/client_bulk_write/client_bulk_write");
const execute_operation_1 = require("../operations/execute_operation");
const utils_1 = require("../utils");
const abstract_cursor_1 = require("./abstract_cursor");
/**
* This is the cursor that handles client bulk write operations. Note this is never
* exposed directly to the user and is always immediately exhausted.
* @internal
*/
class ClientBulkWriteCursor extends abstract_cursor_1.AbstractCursor {
/** @internal */
constructor(client, commandBuilder, options = {}) {
super(client, new utils_1.MongoDBNamespace('admin', '$cmd'), options);
this.commandBuilder = commandBuilder;
this.clientBulkWriteOptions = options;
}
/**
* We need a way to get the top level cursor response fields for
* generating the bulk write result, so we expose this here.
*/
get response() {
if (this.cursorResponse)
return this.cursorResponse;
return null;
}
get operations() {
return this.commandBuilder.lastOperations;
}
clone() {
const clonedOptions = (0, utils_1.mergeOptions)({}, this.clientBulkWriteOptions);
delete clonedOptions.session;
return new ClientBulkWriteCursor(this.client, this.commandBuilder, {
...clonedOptions
});
}
/** @internal */
async _initialize(session) {
const clientBulkWriteOperation = new client_bulk_write_1.ClientBulkWriteOperation(this.commandBuilder, {
...this.clientBulkWriteOptions,
...this.cursorOptions,
session
});
const response = await (0, execute_operation_1.executeOperation)(this.client, clientBulkWriteOperation, this.timeoutContext);
this.cursorResponse = response;
return { server: clientBulkWriteOperation.server, session, response };
}
}
exports.ClientBulkWriteCursor = ClientBulkWriteCursor;
//# sourceMappingURL=client_bulk_write_cursor.js.map

View file

@ -0,0 +1 @@
{"version":3,"file":"client_bulk_write_cursor.js","sourceRoot":"","sources":["../../src/cursor/client_bulk_write_cursor.ts"],"names":[],"mappings":";;;AAGA,yFAA6F;AAG7F,uEAAmE;AAEnE,oCAA0D;AAC1D,uDAI2B;AAO3B;;;;GAIG;AACH,MAAa,qBAAsB,SAAQ,gCAAc;IAOvD,gBAAgB;IAChB,YACE,MAAmB,EACnB,cAA6C,EAC7C,UAAwC,EAAE;QAE1C,KAAK,CAAC,MAAM,EAAE,IAAI,wBAAgB,CAAC,OAAO,EAAE,MAAM,CAAC,EAAE,OAAO,CAAC,CAAC;QAE9D,IAAI,CAAC,cAAc,GAAG,cAAc,CAAC;QACrC,IAAI,CAAC,sBAAsB,GAAG,OAAO,CAAC;IACxC,CAAC;IAED;;;OAGG;IACH,IAAI,QAAQ;QACV,IAAI,IAAI,CAAC,cAAc;YAAE,OAAO,IAAI,CAAC,cAAc,CAAC;QACpD,OAAO,IAAI,CAAC;IACd,CAAC;IAED,IAAI,UAAU;QACZ,OAAO,IAAI,CAAC,cAAc,CAAC,cAAc,CAAC;IAC5C,CAAC;IAED,KAAK;QACH,MAAM,aAAa,GAAG,IAAA,oBAAY,EAAC,EAAE,EAAE,IAAI,CAAC,sBAAsB,CAAC,CAAC;QACpE,OAAO,aAAa,CAAC,OAAO,CAAC;QAC7B,OAAO,IAAI,qBAAqB,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,cAAc,EAAE;YACjE,GAAG,aAAa;SACjB,CAAC,CAAC;IACL,CAAC;IAED,gBAAgB;IAChB,KAAK,CAAC,WAAW,CAAC,OAAsB;QACtC,MAAM,wBAAwB,GAAG,IAAI,4CAAwB,CAAC,IAAI,CAAC,cAAc,EAAE;YACjF,GAAG,IAAI,CAAC,sBAAsB;YAC9B,GAAG,IAAI,CAAC,aAAa;YACrB,OAAO;SACR,CAAC,CAAC;QAEH,MAAM,QAAQ,GAAG,MAAM,IAAA,oCAAgB,EACrC,IAAI,CAAC,MAAM,EACX,wBAAwB,EACxB,IAAI,CAAC,cAAc,CACpB,CAAC;QACF,IAAI,CAAC,cAAc,GAAG,QAAQ,CAAC;QAE/B,OAAO,EAAE,MAAM,EAAE,wBAAwB,CAAC,MAAM,EAAE,OAAO,EAAE,QAAQ,EAAE,CAAC;IACxE,CAAC;CACF;AAzDD,sDAyDC"}

36
node_modules/mongodb/lib/cursor/explainable_cursor.js generated vendored Normal file
View file

@ -0,0 +1,36 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ExplainableCursor = void 0;
const abstract_cursor_1 = require("./abstract_cursor");
/**
* @public
*
* A base class for any cursors that have `explain()` methods.
*/
class ExplainableCursor extends abstract_cursor_1.AbstractCursor {
resolveExplainTimeoutOptions(verbosity, options) {
let explain;
let timeout;
if (verbosity == null && options == null) {
explain = undefined;
timeout = undefined;
}
else if (verbosity != null && options == null) {
explain =
typeof verbosity !== 'object'
? verbosity
: 'verbosity' in verbosity
? verbosity
: undefined;
timeout = typeof verbosity === 'object' && 'timeoutMS' in verbosity ? verbosity : undefined;
}
else {
// @ts-expect-error TS isn't smart enough to determine that if both options are provided, the first is explain options
explain = verbosity;
timeout = options;
}
return { timeout, explain };
}
}
exports.ExplainableCursor = ExplainableCursor;
//# sourceMappingURL=explainable_cursor.js.map

View file

@ -0,0 +1 @@
{"version":3,"file":"explainable_cursor.js","sourceRoot":"","sources":["../../src/cursor/explainable_cursor.ts"],"names":[],"mappings":";;;AAEA,uDAAmD;AAEnD;;;;GAIG;AACH,MAAsB,iBAA2B,SAAQ,gCAAuB;IAcpE,4BAA4B,CACpC,SAAiF,EACjF,OAAgC;QAEhC,IAAI,OAAiE,CAAC;QACtE,IAAI,OAA2C,CAAC;QAEhD,IAAI,SAAS,IAAI,IAAI,IAAI,OAAO,IAAI,IAAI,EAAE,CAAC;YACzC,OAAO,GAAG,SAAS,CAAC;YACpB,OAAO,GAAG,SAAS,CAAC;QACtB,CAAC;aAAM,IAAI,SAAS,IAAI,IAAI,IAAI,OAAO,IAAI,IAAI,EAAE,CAAC;YAChD,OAAO;gBACL,OAAO,SAAS,KAAK,QAAQ;oBAC3B,CAAC,CAAC,SAAS;oBACX,CAAC,CAAC,WAAW,IAAI,SAAS;wBACxB,CAAC,CAAC,SAAS;wBACX,CAAC,CAAC,SAAS,CAAC;YAElB,OAAO,GAAG,OAAO,SAAS,KAAK,QAAQ,IAAI,WAAW,IAAI,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC;QAC9F,CAAC;aAAM,CAAC;YACN,sHAAsH;YACtH,OAAO,GAAG,SAAS,CAAC;YACpB,OAAO,GAAG,OAAO,CAAC;QACpB,CAAC;QAED,OAAO,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC;IAC9B,CAAC;CACF;AAzCD,8CAyCC"}

399
node_modules/mongodb/lib/cursor/find_cursor.js generated vendored Normal file
View file

@ -0,0 +1,399 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.FindCursor = exports.FLAGS = void 0;
const responses_1 = require("../cmap/wire_protocol/responses");
const error_1 = require("../error");
const explain_1 = require("../explain");
const count_1 = require("../operations/count");
const execute_operation_1 = require("../operations/execute_operation");
const find_1 = require("../operations/find");
const sort_1 = require("../sort");
const utils_1 = require("../utils");
const explainable_cursor_1 = require("./explainable_cursor");
/** @public Flags allowed for cursor */
exports.FLAGS = [
'tailable',
'oplogReplay',
'noCursorTimeout',
'awaitData',
'exhaust',
'partial'
];
/** @public */
class FindCursor extends explainable_cursor_1.ExplainableCursor {
/** @internal */
constructor(client, namespace, filter = {}, options = {}) {
super(client, namespace, options);
/** @internal */
this.numReturned = 0;
this.cursorFilter = filter;
this.findOptions = options;
if (options.sort != null) {
this.findOptions.sort = (0, sort_1.formatSort)(options.sort);
}
}
clone() {
const clonedOptions = (0, utils_1.mergeOptions)({}, this.findOptions);
delete clonedOptions.session;
return new FindCursor(this.client, this.namespace, this.cursorFilter, {
...clonedOptions
});
}
map(transform) {
return super.map(transform);
}
/** @internal */
async _initialize(session) {
const options = {
...this.findOptions, // NOTE: order matters here, we may need to refine this
...this.cursorOptions,
session,
signal: this.signal
};
if (options.explain) {
try {
(0, explain_1.validateExplainTimeoutOptions)(options, explain_1.Explain.fromOptions(options));
}
catch {
throw new error_1.MongoAPIError('timeoutMS cannot be used with explain when explain is specified in findOptions');
}
}
const findOperation = new find_1.FindOperation(this.namespace, this.cursorFilter, options);
const response = await (0, execute_operation_1.executeOperation)(this.client, findOperation, this.timeoutContext);
// the response is not a cursor when `explain` is enabled
this.numReturned = response.batchSize;
return { server: findOperation.server, session, response };
}
/** @internal */
async getMore() {
const numReturned = this.numReturned;
const limit = this.findOptions.limit ?? Infinity;
const remaining = limit - numReturned;
if (numReturned === limit && !this.id?.isZero()) {
// this is an optimization for the special case of a limit for a find command to avoid an
// extra getMore when the limit has been reached and the limit is a multiple of the batchSize.
// This is a consequence of the new query engine in 5.0 having no knowledge of the limit as it
// produces results for the find command. Once a batch is filled up, it is returned and only
// on the subsequent getMore will the query framework consider the limit, determine the cursor
// is exhausted and return a cursorId of zero.
// instead, if we determine there are no more documents to request from the server, we preemptively
// close the cursor
try {
await this.close();
}
catch (error) {
(0, utils_1.squashError)(error);
}
return responses_1.CursorResponse.emptyGetMore;
}
// TODO(DRIVERS-1448): Remove logic to enforce `limit` in the driver
let cleanup = utils_1.noop;
const { batchSize } = this.cursorOptions;
if (batchSize != null && batchSize > remaining) {
this.cursorOptions.batchSize = remaining;
// After executing the final getMore, re-assign the batchSize back to its original value so that
// if the cursor is rewound and executed, the batchSize is still correct.
cleanup = () => {
this.cursorOptions.batchSize = batchSize;
};
}
try {
const response = await super.getMore();
this.numReturned = this.numReturned + response.batchSize;
return response;
}
finally {
cleanup?.();
}
}
/**
* Get the count of documents for this cursor
* @deprecated Use `collection.estimatedDocumentCount` or `collection.countDocuments` instead
*/
async count(options) {
(0, utils_1.emitWarningOnce)('cursor.count is deprecated and will be removed in the next major version, please use `collection.estimatedDocumentCount` or `collection.countDocuments` instead ');
if (typeof options === 'boolean') {
throw new error_1.MongoInvalidArgumentError('Invalid first parameter to count');
}
return await (0, execute_operation_1.executeOperation)(this.client, new count_1.CountOperation(this.namespace, this.cursorFilter, {
...this.findOptions, // NOTE: order matters here, we may need to refine this
...this.cursorOptions,
...options
}));
}
async explain(verbosity, options) {
const { explain, timeout } = this.resolveExplainTimeoutOptions(verbosity, options);
return (await (0, execute_operation_1.executeOperation)(this.client, new find_1.FindOperation(this.namespace, this.cursorFilter, {
...this.findOptions, // NOTE: order matters here, we may need to refine this
...this.cursorOptions,
...timeout,
explain: explain ?? true
}))).shift(this.deserializationOptions);
}
/** Set the cursor query */
filter(filter) {
this.throwIfInitialized();
this.cursorFilter = filter;
return this;
}
/**
* Set the cursor hint
*
* @param hint - If specified, then the query system will only consider plans using the hinted index.
*/
hint(hint) {
this.throwIfInitialized();
this.findOptions.hint = hint;
return this;
}
/**
* Set the cursor min
*
* @param min - Specify a $min value to specify the inclusive lower bound for a specific index in order to constrain the results of find(). The $min specifies the lower bound for all keys of a specific index in order.
*/
min(min) {
this.throwIfInitialized();
this.findOptions.min = min;
return this;
}
/**
* Set the cursor max
*
* @param max - Specify a $max value to specify the exclusive upper bound for a specific index in order to constrain the results of find(). The $max specifies the upper bound for all keys of a specific index in order.
*/
max(max) {
this.throwIfInitialized();
this.findOptions.max = max;
return this;
}
/**
* Set the cursor returnKey.
* If set to true, modifies the cursor to only return the index field or fields for the results of the query, rather than documents.
* If set to true and the query does not use an index to perform the read operation, the returned documents will not contain any fields.
*
* @param value - the returnKey value.
*/
returnKey(value) {
this.throwIfInitialized();
this.findOptions.returnKey = value;
return this;
}
/**
* Modifies the output of a query by adding a field $recordId to matching documents. $recordId is the internal key which uniquely identifies a document in a collection.
*
* @param value - The $showDiskLoc option has now been deprecated and replaced with the showRecordId field. $showDiskLoc will still be accepted for OP_QUERY stye find.
*/
showRecordId(value) {
this.throwIfInitialized();
this.findOptions.showRecordId = value;
return this;
}
/**
* Add a query modifier to the cursor query
*
* @param name - The query modifier (must start with $, such as $orderby etc)
* @param value - The modifier value.
*/
addQueryModifier(name, value) {
this.throwIfInitialized();
if (name[0] !== '$') {
throw new error_1.MongoInvalidArgumentError(`${name} is not a valid query modifier`);
}
// Strip of the $
const field = name.substr(1);
// NOTE: consider some TS magic for this
switch (field) {
case 'comment':
this.findOptions.comment = value;
break;
case 'explain':
this.findOptions.explain = value;
break;
case 'hint':
this.findOptions.hint = value;
break;
case 'max':
this.findOptions.max = value;
break;
case 'maxTimeMS':
this.findOptions.maxTimeMS = value;
break;
case 'min':
this.findOptions.min = value;
break;
case 'orderby':
this.findOptions.sort = (0, sort_1.formatSort)(value);
break;
case 'query':
this.cursorFilter = value;
break;
case 'returnKey':
this.findOptions.returnKey = value;
break;
case 'showDiskLoc':
this.findOptions.showRecordId = value;
break;
default:
throw new error_1.MongoInvalidArgumentError(`Invalid query modifier: ${name}`);
}
return this;
}
/**
* Add a comment to the cursor query allowing for tracking the comment in the log.
*
* @param value - The comment attached to this query.
*/
comment(value) {
this.throwIfInitialized();
this.findOptions.comment = value;
return this;
}
/**
* Set a maxAwaitTimeMS on a tailing cursor query to allow to customize the timeout value for the option awaitData (Only supported on MongoDB 3.2 or higher, ignored otherwise)
*
* @param value - Number of milliseconds to wait before aborting the tailed query.
*/
maxAwaitTimeMS(value) {
this.throwIfInitialized();
if (typeof value !== 'number') {
throw new error_1.MongoInvalidArgumentError('Argument for maxAwaitTimeMS must be a number');
}
this.findOptions.maxAwaitTimeMS = value;
return this;
}
/**
* Set a maxTimeMS on the cursor query, allowing for hard timeout limits on queries (Only supported on MongoDB 2.6 or higher)
*
* @param value - Number of milliseconds to wait before aborting the query.
*/
maxTimeMS(value) {
this.throwIfInitialized();
if (typeof value !== 'number') {
throw new error_1.MongoInvalidArgumentError('Argument for maxTimeMS must be a number');
}
this.findOptions.maxTimeMS = value;
return this;
}
/**
* Add a project stage to the aggregation pipeline
*
* @remarks
* In order to strictly type this function you must provide an interface
* that represents the effect of your projection on the result documents.
*
* By default chaining a projection to your cursor changes the returned type to the generic
* {@link Document} type.
* You should specify a parameterized type to have assertions on your final results.
*
* @example
* ```typescript
* // Best way
* const docs: FindCursor<{ a: number }> = cursor.project<{ a: number }>({ _id: 0, a: true });
* // Flexible way
* const docs: FindCursor<Document> = cursor.project({ _id: 0, a: true });
* ```
*
* @remarks
*
* **Note for Typescript Users:** adding a transform changes the return type of the iteration of this cursor,
* it **does not** return a new instance of a cursor. This means when calling project,
* you should always assign the result to a new variable in order to get a correctly typed cursor variable.
* Take note of the following example:
*
* @example
* ```typescript
* const cursor: FindCursor<{ a: number; b: string }> = coll.find();
* const projectCursor = cursor.project<{ a: number }>({ _id: 0, a: true });
* const aPropOnlyArray: {a: number}[] = await projectCursor.toArray();
*
* // or always use chaining and save the final cursor
*
* const cursor = coll.find().project<{ a: string }>({
* _id: 0,
* a: { $convert: { input: '$a', to: 'string' }
* }});
* ```
*/
project(value) {
this.throwIfInitialized();
this.findOptions.projection = value;
return this;
}
/**
* Sets the sort order of the cursor query.
*
* @param sort - The key or keys set for the sort.
* @param direction - The direction of the sorting (1 or -1).
*/
sort(sort, direction) {
this.throwIfInitialized();
if (this.findOptions.tailable) {
throw new error_1.MongoTailableCursorError('Tailable cursor does not support sorting');
}
this.findOptions.sort = (0, sort_1.formatSort)(sort, direction);
return this;
}
/**
* Allows disk use for blocking sort operations exceeding 100MB memory. (MongoDB 3.2 or higher)
*
* @remarks
* {@link https://www.mongodb.com/docs/manual/reference/command/find/#find-cmd-allowdiskuse | find command allowDiskUse documentation}
*/
allowDiskUse(allow = true) {
this.throwIfInitialized();
if (!this.findOptions.sort) {
throw new error_1.MongoInvalidArgumentError('Option "allowDiskUse" requires a sort specification');
}
// As of 6.0 the default is true. This allows users to get back to the old behavior.
if (!allow) {
this.findOptions.allowDiskUse = false;
return this;
}
this.findOptions.allowDiskUse = true;
return this;
}
/**
* Set the collation options for the cursor.
*
* @param value - The cursor collation options (MongoDB 3.4 or higher) settings for update operation (see 3.4 documentation for available fields).
*/
collation(value) {
this.throwIfInitialized();
this.findOptions.collation = value;
return this;
}
/**
* Set the limit for the cursor.
*
* @param value - The limit for the cursor query.
*/
limit(value) {
this.throwIfInitialized();
if (this.findOptions.tailable) {
throw new error_1.MongoTailableCursorError('Tailable cursor does not support limit');
}
if (typeof value !== 'number') {
throw new error_1.MongoInvalidArgumentError('Operation "limit" requires an integer');
}
this.findOptions.limit = value;
return this;
}
/**
* Set the skip for the cursor.
*
* @param value - The skip for the cursor query.
*/
skip(value) {
this.throwIfInitialized();
if (this.findOptions.tailable) {
throw new error_1.MongoTailableCursorError('Tailable cursor does not support skip');
}
if (typeof value !== 'number') {
throw new error_1.MongoInvalidArgumentError('Operation "skip" requires an integer');
}
this.findOptions.skip = value;
return this;
}
}
exports.FindCursor = FindCursor;
//# sourceMappingURL=find_cursor.js.map

1
node_modules/mongodb/lib/cursor/find_cursor.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,34 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ListCollectionsCursor = void 0;
const execute_operation_1 = require("../operations/execute_operation");
const list_collections_1 = require("../operations/list_collections");
const abstract_cursor_1 = require("./abstract_cursor");
/** @public */
class ListCollectionsCursor extends abstract_cursor_1.AbstractCursor {
constructor(db, filter, options) {
super(db.client, db.s.namespace, options);
this.parent = db;
this.filter = filter;
this.options = options;
}
clone() {
return new ListCollectionsCursor(this.parent, this.filter, {
...this.options,
...this.cursorOptions
});
}
/** @internal */
async _initialize(session) {
const operation = new list_collections_1.ListCollectionsOperation(this.parent, this.filter, {
...this.cursorOptions,
...this.options,
session,
signal: this.signal
});
const response = await (0, execute_operation_1.executeOperation)(this.parent.client, operation, this.timeoutContext);
return { server: operation.server, session, response };
}
}
exports.ListCollectionsCursor = ListCollectionsCursor;
//# sourceMappingURL=list_collections_cursor.js.map

View file

@ -0,0 +1 @@
{"version":3,"file":"list_collections_cursor.js","sourceRoot":"","sources":["../../src/cursor/list_collections_cursor.ts"],"names":[],"mappings":";;;AAGA,uEAAmE;AACnE,qEAIwC;AAExC,uDAA+E;AAE/E,cAAc;AACd,MAAa,qBAIX,SAAQ,gCAAiB;IAKzB,YAAY,EAAM,EAAE,MAAgB,EAAE,OAA4C;QAChF,KAAK,CAAC,EAAE,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;QAC1C,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC;QACjB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;IACzB,CAAC;IAED,KAAK;QACH,OAAO,IAAI,qBAAqB,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,EAAE;YACzD,GAAG,IAAI,CAAC,OAAO;YACf,GAAG,IAAI,CAAC,aAAa;SACtB,CAAC,CAAC;IACL,CAAC;IAED,gBAAgB;IAChB,KAAK,CAAC,WAAW,CAAC,OAAkC;QAClD,MAAM,SAAS,GAAG,IAAI,2CAAwB,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,EAAE;YACvE,GAAG,IAAI,CAAC,aAAa;YACrB,GAAG,IAAI,CAAC,OAAO;YACf,OAAO;YACP,MAAM,EAAE,IAAI,CAAC,MAAM;SACpB,CAAC,CAAC;QAEH,MAAM,QAAQ,GAAG,MAAM,IAAA,oCAAgB,EAAC,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,SAAS,EAAE,IAAI,CAAC,cAAc,CAAC,CAAC;QAE5F,OAAO,EAAE,MAAM,EAAE,SAAS,CAAC,MAAM,EAAE,OAAO,EAAE,QAAQ,EAAE,CAAC;IACzD,CAAC;CACF;AApCD,sDAoCC"}

32
node_modules/mongodb/lib/cursor/list_indexes_cursor.js generated vendored Normal file
View file

@ -0,0 +1,32 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ListIndexesCursor = void 0;
const execute_operation_1 = require("../operations/execute_operation");
const indexes_1 = require("../operations/indexes");
const abstract_cursor_1 = require("./abstract_cursor");
/** @public */
class ListIndexesCursor extends abstract_cursor_1.AbstractCursor {
constructor(collection, options) {
super(collection.client, collection.s.namespace, options);
this.parent = collection;
this.options = options;
}
clone() {
return new ListIndexesCursor(this.parent, {
...this.options,
...this.cursorOptions
});
}
/** @internal */
async _initialize(session) {
const operation = new indexes_1.ListIndexesOperation(this.parent, {
...this.cursorOptions,
...this.options,
session
});
const response = await (0, execute_operation_1.executeOperation)(this.parent.client, operation, this.timeoutContext);
return { server: operation.server, session, response };
}
}
exports.ListIndexesCursor = ListIndexesCursor;
//# sourceMappingURL=list_indexes_cursor.js.map

View file

@ -0,0 +1 @@
{"version":3,"file":"list_indexes_cursor.js","sourceRoot":"","sources":["../../src/cursor/list_indexes_cursor.ts"],"names":[],"mappings":";;;AACA,uEAAmE;AACnE,mDAAsF;AAEtF,uDAA+E;AAE/E,cAAc;AACd,MAAa,iBAAkB,SAAQ,gCAAc;IAInD,YAAY,UAAsB,EAAE,OAA4B;QAC9D,KAAK,CAAC,UAAU,CAAC,MAAM,EAAE,UAAU,CAAC,CAAC,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;QAC1D,IAAI,CAAC,MAAM,GAAG,UAAU,CAAC;QACzB,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;IACzB,CAAC;IAED,KAAK;QACH,OAAO,IAAI,iBAAiB,CAAC,IAAI,CAAC,MAAM,EAAE;YACxC,GAAG,IAAI,CAAC,OAAO;YACf,GAAG,IAAI,CAAC,aAAa;SACtB,CAAC,CAAC;IACL,CAAC;IAED,gBAAgB;IAChB,KAAK,CAAC,WAAW,CAAC,OAAkC;QAClD,MAAM,SAAS,GAAG,IAAI,8BAAoB,CAAC,IAAI,CAAC,MAAM,EAAE;YACtD,GAAG,IAAI,CAAC,aAAa;YACrB,GAAG,IAAI,CAAC,OAAO;YACf,OAAO;SACR,CAAC,CAAC;QAEH,MAAM,QAAQ,GAAG,MAAM,IAAA,oCAAgB,EAAC,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,SAAS,EAAE,IAAI,CAAC,cAAc,CAAC,CAAC;QAE5F,OAAO,EAAE,MAAM,EAAE,SAAS,CAAC,MAAM,EAAE,OAAO,EAAE,QAAQ,EAAE,CAAC;IACzD,CAAC;CACF;AA7BD,8CA6BC"}

View file

@ -0,0 +1,14 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ListSearchIndexesCursor = void 0;
const aggregation_cursor_1 = require("./aggregation_cursor");
/** @public */
class ListSearchIndexesCursor extends aggregation_cursor_1.AggregationCursor {
/** @internal */
constructor({ fullNamespace: ns, client }, name, options = {}) {
const pipeline = name == null ? [{ $listSearchIndexes: {} }] : [{ $listSearchIndexes: { name } }];
super(client, ns, pipeline, options);
}
}
exports.ListSearchIndexesCursor = ListSearchIndexesCursor;
//# sourceMappingURL=list_search_indexes_cursor.js.map

View file

@ -0,0 +1 @@
{"version":3,"file":"list_search_indexes_cursor.js","sourceRoot":"","sources":["../../src/cursor/list_search_indexes_cursor.ts"],"names":[],"mappings":";;;AAEA,6DAAyD;AAKzD,cAAc;AACd,MAAa,uBAAwB,SAAQ,sCAAmC;IAC9E,gBAAgB;IAChB,YACE,EAAE,aAAa,EAAE,EAAE,EAAE,MAAM,EAAc,EACzC,IAAmB,EACnB,UAAoC,EAAE;QAEtC,MAAM,QAAQ,GACZ,IAAI,IAAI,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,kBAAkB,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,kBAAkB,EAAE,EAAE,IAAI,EAAE,EAAE,CAAC,CAAC;QACnF,KAAK,CAAC,MAAM,EAAE,EAAE,EAAE,QAAQ,EAAE,OAAO,CAAC,CAAC;IACvC,CAAC;CACF;AAXD,0DAWC"}

94
node_modules/mongodb/lib/cursor/run_command_cursor.js generated vendored Normal file
View file

@ -0,0 +1,94 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.RunCommandCursor = void 0;
const error_1 = require("../error");
const execute_operation_1 = require("../operations/execute_operation");
const get_more_1 = require("../operations/get_more");
const run_command_1 = require("../operations/run_command");
const utils_1 = require("../utils");
const abstract_cursor_1 = require("./abstract_cursor");
/** @public */
class RunCommandCursor extends abstract_cursor_1.AbstractCursor {
/**
* Controls the `getMore.comment` field
* @param comment - any BSON value
*/
setComment(comment) {
this.getMoreOptions.comment = comment;
return this;
}
/**
* Controls the `getMore.maxTimeMS` field. Only valid when cursor is tailable await
* @param maxTimeMS - the number of milliseconds to wait for new data
*/
setMaxTimeMS(maxTimeMS) {
this.getMoreOptions.maxAwaitTimeMS = maxTimeMS;
return this;
}
/**
* Controls the `getMore.batchSize` field
* @param batchSize - the number documents to return in the `nextBatch`
*/
setBatchSize(batchSize) {
this.getMoreOptions.batchSize = batchSize;
return this;
}
/** Unsupported for RunCommandCursor */
clone() {
throw new error_1.MongoAPIError('Clone not supported, create a new cursor with db.runCursorCommand');
}
/** Unsupported for RunCommandCursor: readConcern must be configured directly on command document */
withReadConcern(_) {
throw new error_1.MongoAPIError('RunCommandCursor does not support readConcern it must be attached to the command being run');
}
/** Unsupported for RunCommandCursor: various cursor flags must be configured directly on command document */
addCursorFlag(_, __) {
throw new error_1.MongoAPIError('RunCommandCursor does not support cursor flags, they must be attached to the command being run');
}
/**
* Unsupported for RunCommandCursor: maxTimeMS must be configured directly on command document
*/
maxTimeMS(_) {
throw new error_1.MongoAPIError('maxTimeMS must be configured on the command document directly, to configure getMore.maxTimeMS use cursor.setMaxTimeMS()');
}
/** Unsupported for RunCommandCursor: batchSize must be configured directly on command document */
batchSize(_) {
throw new error_1.MongoAPIError('batchSize must be configured on the command document directly, to configure getMore.batchSize use cursor.setBatchSize()');
}
/** @internal */
constructor(db, command, options = {}) {
super(db.client, (0, utils_1.ns)(db.namespace), options);
this.getMoreOptions = {};
this.db = db;
this.command = Object.freeze({ ...command });
}
/** @internal */
async _initialize(session) {
const operation = new run_command_1.RunCursorCommandOperation(this.db.s.namespace, this.command, {
...this.cursorOptions,
session: session,
readPreference: this.cursorOptions.readPreference
});
const response = await (0, execute_operation_1.executeOperation)(this.client, operation, this.timeoutContext);
return {
server: operation.server,
session,
response
};
}
/** @internal */
async getMore() {
if (!this.session) {
throw new error_1.MongoRuntimeError('Unexpected null session. A cursor creating command should have set this');
}
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
const getMoreOperation = new get_more_1.GetMoreOperation(this.namespace, this.id, this.server, {
...this.cursorOptions,
session: this.session,
...this.getMoreOptions
});
return await (0, execute_operation_1.executeOperation)(this.client, getMoreOperation, this.timeoutContext);
}
}
exports.RunCommandCursor = RunCommandCursor;
//# sourceMappingURL=run_command_cursor.js.map

View file

@ -0,0 +1 @@
{"version":3,"file":"run_command_cursor.js","sourceRoot":"","sources":["../../src/cursor/run_command_cursor.ts"],"names":[],"mappings":";;;AAGA,oCAA4D;AAC5D,uEAAmE;AACnE,qDAA0D;AAC1D,2DAAsE;AAItE,oCAA8B;AAC9B,uDAI2B;AA+C3B,cAAc;AACd,MAAa,gBAAiB,SAAQ,gCAAc;IAQlD;;;OAGG;IACI,UAAU,CAAC,OAAY;QAC5B,IAAI,CAAC,cAAc,CAAC,OAAO,GAAG,OAAO,CAAC;QACtC,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;;OAGG;IACI,YAAY,CAAC,SAAiB;QACnC,IAAI,CAAC,cAAc,CAAC,cAAc,GAAG,SAAS,CAAC;QAC/C,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;;OAGG;IACI,YAAY,CAAC,SAAiB;QACnC,IAAI,CAAC,cAAc,CAAC,SAAS,GAAG,SAAS,CAAC;QAC1C,OAAO,IAAI,CAAC;IACd,CAAC;IAED,uCAAuC;IACvB,KAAK;QACnB,MAAM,IAAI,qBAAa,CAAC,mEAAmE,CAAC,CAAC;IAC/F,CAAC;IAED,oGAAoG;IACpF,eAAe,CAAC,CAAkB;QAChD,MAAM,IAAI,qBAAa,CACrB,4FAA4F,CAC7F,CAAC;IACJ,CAAC;IAED,6GAA6G;IAC7F,aAAa,CAAC,CAAS,EAAE,EAAW;QAClD,MAAM,IAAI,qBAAa,CACrB,gGAAgG,CACjG,CAAC;IACJ,CAAC;IAED;;OAEG;IACa,SAAS,CAAC,CAAS;QACjC,MAAM,IAAI,qBAAa,CACrB,yHAAyH,CAC1H,CAAC;IACJ,CAAC;IAED,kGAAkG;IAClF,SAAS,CAAC,CAAS;QACjC,MAAM,IAAI,qBAAa,CACrB,yHAAyH,CAC1H,CAAC;IACJ,CAAC;IAKD,gBAAgB;IAChB,YAAY,EAAM,EAAE,OAAiB,EAAE,UAAmC,EAAE;QAC1E,KAAK,CAAC,EAAE,CAAC,MAAM,EAAE,IAAA,UAAE,EAAC,EAAE,CAAC,SAAS,CAAC,EAAE,OAAO,CAAC,CAAC;QAzE9B,mBAAc,GAI1B,EAAE,CAAC;QAsEL,IAAI,CAAC,EAAE,GAAG,EAAE,CAAC;QACb,IAAI,CAAC,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,GAAG,OAAO,EAAE,CAAC,CAAC;IAC/C,CAAC;IAED,gBAAgB;IACN,KAAK,CAAC,WAAW,CAAC,OAAsB;QAChD,MAAM,SAAS,GAAG,IAAI,uCAAyB,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,SAAS,EAAE,IAAI,CAAC,OAAO,EAAE;YACjF,GAAG,IAAI,CAAC,aAAa;YACrB,OAAO,EAAE,OAAO;YAChB,cAAc,EAAE,IAAI,CAAC,aAAa,CAAC,cAAc;SAClD,CAAC,CAAC;QAEH,MAAM,QAAQ,GAAG,MAAM,IAAA,oCAAgB,EAAC,IAAI,CAAC,MAAM,EAAE,SAAS,EAAE,IAAI,CAAC,cAAc,CAAC,CAAC;QAErF,OAAO;YACL,MAAM,EAAE,SAAS,CAAC,MAAM;YACxB,OAAO;YACP,QAAQ;SACT,CAAC;IACJ,CAAC;IAED,gBAAgB;IACP,KAAK,CAAC,OAAO;QACpB,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC;YAClB,MAAM,IAAI,yBAAiB,CACzB,yEAAyE,CAC1E,CAAC;QACJ,CAAC;QAED,oEAAoE;QACpE,MAAM,gBAAgB,GAAG,IAAI,2BAAgB,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,EAAG,EAAE,IAAI,CAAC,MAAO,EAAE;YACpF,GAAG,IAAI,CAAC,aAAa;YACrB,OAAO,EAAE,IAAI,CAAC,OAAO;YACrB,GAAG,IAAI,CAAC,cAAc;SACvB,CAAC,CAAC;QAEH,OAAO,MAAM,IAAA,oCAAgB,EAAC,IAAI,CAAC,MAAM,EAAE,gBAAgB,EAAE,IAAI,CAAC,cAAc,CAAC,CAAC;IACpF,CAAC;CACF;AAlHD,4CAkHC"}