Initial Sample.
This commit is contained in:
159
graphql-subscription/node_modules/@apollo/client/link/batch/batch.cjs
generated
vendored
Normal file
159
graphql-subscription/node_modules/@apollo/client/link/batch/batch.cjs
generated
vendored
Normal file
@@ -0,0 +1,159 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
var tslib = require('tslib');
|
||||
var core = require('../core');
|
||||
var utilities = require('../../utilities');
|
||||
|
||||
var OperationBatcher = (function () {
|
||||
function OperationBatcher(_a) {
|
||||
var batchDebounce = _a.batchDebounce, batchInterval = _a.batchInterval, batchMax = _a.batchMax, batchHandler = _a.batchHandler, batchKey = _a.batchKey;
|
||||
this.batchesByKey = new Map();
|
||||
this.scheduledBatchTimerByKey = new Map();
|
||||
this.batchDebounce = batchDebounce;
|
||||
this.batchInterval = batchInterval;
|
||||
this.batchMax = batchMax || 0;
|
||||
this.batchHandler = batchHandler;
|
||||
this.batchKey = batchKey || (function () { return ""; });
|
||||
}
|
||||
OperationBatcher.prototype.enqueueRequest = function (request) {
|
||||
var _this = this;
|
||||
var requestCopy = tslib.__assign(tslib.__assign({}, request), { next: [], error: [], complete: [], subscribers: new Set() });
|
||||
var key = this.batchKey(request.operation);
|
||||
if (!requestCopy.observable) {
|
||||
requestCopy.observable = new utilities.Observable(function (observer) {
|
||||
var batch = _this.batchesByKey.get(key);
|
||||
if (!batch)
|
||||
_this.batchesByKey.set(key, (batch = new Set()));
|
||||
var isFirstEnqueuedRequest = batch.size === 0;
|
||||
var isFirstSubscriber = requestCopy.subscribers.size === 0;
|
||||
requestCopy.subscribers.add(observer);
|
||||
if (isFirstSubscriber) {
|
||||
batch.add(requestCopy);
|
||||
}
|
||||
if (observer.next) {
|
||||
requestCopy.next.push(observer.next.bind(observer));
|
||||
}
|
||||
if (observer.error) {
|
||||
requestCopy.error.push(observer.error.bind(observer));
|
||||
}
|
||||
if (observer.complete) {
|
||||
requestCopy.complete.push(observer.complete.bind(observer));
|
||||
}
|
||||
if (isFirstEnqueuedRequest || _this.batchDebounce) {
|
||||
_this.scheduleQueueConsumption(key);
|
||||
}
|
||||
if (batch.size === _this.batchMax) {
|
||||
_this.consumeQueue(key);
|
||||
}
|
||||
return function () {
|
||||
var _a;
|
||||
if (requestCopy.subscribers.delete(observer) &&
|
||||
requestCopy.subscribers.size < 1) {
|
||||
if (batch.delete(requestCopy) && batch.size < 1) {
|
||||
_this.consumeQueue(key);
|
||||
(_a = batch.subscription) === null || _a === void 0 ? void 0 : _a.unsubscribe();
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
||||
}
|
||||
return requestCopy.observable;
|
||||
};
|
||||
OperationBatcher.prototype.consumeQueue = function (key) {
|
||||
if (key === void 0) { key = ""; }
|
||||
var batch = this.batchesByKey.get(key);
|
||||
this.batchesByKey.delete(key);
|
||||
if (!batch || !batch.size) {
|
||||
return;
|
||||
}
|
||||
var operations = [];
|
||||
var forwards = [];
|
||||
var observables = [];
|
||||
var nexts = [];
|
||||
var errors = [];
|
||||
var completes = [];
|
||||
batch.forEach(function (request) {
|
||||
operations.push(request.operation);
|
||||
forwards.push(request.forward);
|
||||
observables.push(request.observable);
|
||||
nexts.push(request.next);
|
||||
errors.push(request.error);
|
||||
completes.push(request.complete);
|
||||
});
|
||||
var batchedObservable = this.batchHandler(operations, forwards) || utilities.Observable.of();
|
||||
var onError = function (error) {
|
||||
errors.forEach(function (rejecters) {
|
||||
if (rejecters) {
|
||||
rejecters.forEach(function (e) { return e(error); });
|
||||
}
|
||||
});
|
||||
};
|
||||
batch.subscription = batchedObservable.subscribe({
|
||||
next: function (results) {
|
||||
if (!Array.isArray(results)) {
|
||||
results = [results];
|
||||
}
|
||||
if (nexts.length !== results.length) {
|
||||
var error = new Error("server returned results with length ".concat(results.length, ", expected length of ").concat(nexts.length));
|
||||
error.result = results;
|
||||
return onError(error);
|
||||
}
|
||||
results.forEach(function (result, index) {
|
||||
if (nexts[index]) {
|
||||
nexts[index].forEach(function (next) { return next(result); });
|
||||
}
|
||||
});
|
||||
},
|
||||
error: onError,
|
||||
complete: function () {
|
||||
completes.forEach(function (complete) {
|
||||
if (complete) {
|
||||
complete.forEach(function (c) { return c(); });
|
||||
}
|
||||
});
|
||||
},
|
||||
});
|
||||
return observables;
|
||||
};
|
||||
OperationBatcher.prototype.scheduleQueueConsumption = function (key) {
|
||||
var _this = this;
|
||||
clearTimeout(this.scheduledBatchTimerByKey.get(key));
|
||||
this.scheduledBatchTimerByKey.set(key, setTimeout(function () {
|
||||
_this.consumeQueue(key);
|
||||
_this.scheduledBatchTimerByKey.delete(key);
|
||||
}, this.batchInterval));
|
||||
};
|
||||
return OperationBatcher;
|
||||
}());
|
||||
|
||||
var BatchLink = (function (_super) {
|
||||
tslib.__extends(BatchLink, _super);
|
||||
function BatchLink(fetchParams) {
|
||||
var _this = _super.call(this) || this;
|
||||
var _a = fetchParams || {}, batchDebounce = _a.batchDebounce, _b = _a.batchInterval, batchInterval = _b === void 0 ? 10 : _b, _c = _a.batchMax, batchMax = _c === void 0 ? 0 : _c, _d = _a.batchHandler, batchHandler = _d === void 0 ? function () { return null; } : _d, _e = _a.batchKey, batchKey = _e === void 0 ? function () { return ""; } : _e;
|
||||
_this.batcher = new OperationBatcher({
|
||||
batchDebounce: batchDebounce,
|
||||
batchInterval: batchInterval,
|
||||
batchMax: batchMax,
|
||||
batchHandler: batchHandler,
|
||||
batchKey: batchKey,
|
||||
});
|
||||
if (fetchParams.batchHandler.length <= 1) {
|
||||
_this.request = function (operation) { return _this.batcher.enqueueRequest({ operation: operation }); };
|
||||
}
|
||||
return _this;
|
||||
}
|
||||
BatchLink.prototype.request = function (operation, forward) {
|
||||
return this.batcher.enqueueRequest({
|
||||
operation: operation,
|
||||
forward: forward,
|
||||
});
|
||||
};
|
||||
return BatchLink;
|
||||
}(core.ApolloLink));
|
||||
|
||||
exports.BatchLink = BatchLink;
|
||||
exports.OperationBatcher = OperationBatcher;
|
||||
//# sourceMappingURL=batch.cjs.map
|
||||
1
graphql-subscription/node_modules/@apollo/client/link/batch/batch.cjs.map
generated
vendored
Normal file
1
graphql-subscription/node_modules/@apollo/client/link/batch/batch.cjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
159
graphql-subscription/node_modules/@apollo/client/link/batch/batch.cjs.native.js
generated
vendored
Normal file
159
graphql-subscription/node_modules/@apollo/client/link/batch/batch.cjs.native.js
generated
vendored
Normal file
@@ -0,0 +1,159 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
var tslib = require('tslib');
|
||||
var core = require('../core');
|
||||
var utilities = require('../../utilities');
|
||||
|
||||
var OperationBatcher = (function () {
|
||||
function OperationBatcher(_a) {
|
||||
var batchDebounce = _a.batchDebounce, batchInterval = _a.batchInterval, batchMax = _a.batchMax, batchHandler = _a.batchHandler, batchKey = _a.batchKey;
|
||||
this.batchesByKey = new Map();
|
||||
this.scheduledBatchTimerByKey = new Map();
|
||||
this.batchDebounce = batchDebounce;
|
||||
this.batchInterval = batchInterval;
|
||||
this.batchMax = batchMax || 0;
|
||||
this.batchHandler = batchHandler;
|
||||
this.batchKey = batchKey || (function () { return ""; });
|
||||
}
|
||||
OperationBatcher.prototype.enqueueRequest = function (request) {
|
||||
var _this = this;
|
||||
var requestCopy = tslib.__assign(tslib.__assign({}, request), { next: [], error: [], complete: [], subscribers: new Set() });
|
||||
var key = this.batchKey(request.operation);
|
||||
if (!requestCopy.observable) {
|
||||
requestCopy.observable = new utilities.Observable(function (observer) {
|
||||
var batch = _this.batchesByKey.get(key);
|
||||
if (!batch)
|
||||
_this.batchesByKey.set(key, (batch = new Set()));
|
||||
var isFirstEnqueuedRequest = batch.size === 0;
|
||||
var isFirstSubscriber = requestCopy.subscribers.size === 0;
|
||||
requestCopy.subscribers.add(observer);
|
||||
if (isFirstSubscriber) {
|
||||
batch.add(requestCopy);
|
||||
}
|
||||
if (observer.next) {
|
||||
requestCopy.next.push(observer.next.bind(observer));
|
||||
}
|
||||
if (observer.error) {
|
||||
requestCopy.error.push(observer.error.bind(observer));
|
||||
}
|
||||
if (observer.complete) {
|
||||
requestCopy.complete.push(observer.complete.bind(observer));
|
||||
}
|
||||
if (isFirstEnqueuedRequest || _this.batchDebounce) {
|
||||
_this.scheduleQueueConsumption(key);
|
||||
}
|
||||
if (batch.size === _this.batchMax) {
|
||||
_this.consumeQueue(key);
|
||||
}
|
||||
return function () {
|
||||
var _a;
|
||||
if (requestCopy.subscribers.delete(observer) &&
|
||||
requestCopy.subscribers.size < 1) {
|
||||
if (batch.delete(requestCopy) && batch.size < 1) {
|
||||
_this.consumeQueue(key);
|
||||
(_a = batch.subscription) === null || _a === void 0 ? void 0 : _a.unsubscribe();
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
||||
}
|
||||
return requestCopy.observable;
|
||||
};
|
||||
OperationBatcher.prototype.consumeQueue = function (key) {
|
||||
if (key === void 0) { key = ""; }
|
||||
var batch = this.batchesByKey.get(key);
|
||||
this.batchesByKey.delete(key);
|
||||
if (!batch || !batch.size) {
|
||||
return;
|
||||
}
|
||||
var operations = [];
|
||||
var forwards = [];
|
||||
var observables = [];
|
||||
var nexts = [];
|
||||
var errors = [];
|
||||
var completes = [];
|
||||
batch.forEach(function (request) {
|
||||
operations.push(request.operation);
|
||||
forwards.push(request.forward);
|
||||
observables.push(request.observable);
|
||||
nexts.push(request.next);
|
||||
errors.push(request.error);
|
||||
completes.push(request.complete);
|
||||
});
|
||||
var batchedObservable = this.batchHandler(operations, forwards) || utilities.Observable.of();
|
||||
var onError = function (error) {
|
||||
errors.forEach(function (rejecters) {
|
||||
if (rejecters) {
|
||||
rejecters.forEach(function (e) { return e(error); });
|
||||
}
|
||||
});
|
||||
};
|
||||
batch.subscription = batchedObservable.subscribe({
|
||||
next: function (results) {
|
||||
if (!Array.isArray(results)) {
|
||||
results = [results];
|
||||
}
|
||||
if (nexts.length !== results.length) {
|
||||
var error = new Error("server returned results with length ".concat(results.length, ", expected length of ").concat(nexts.length));
|
||||
error.result = results;
|
||||
return onError(error);
|
||||
}
|
||||
results.forEach(function (result, index) {
|
||||
if (nexts[index]) {
|
||||
nexts[index].forEach(function (next) { return next(result); });
|
||||
}
|
||||
});
|
||||
},
|
||||
error: onError,
|
||||
complete: function () {
|
||||
completes.forEach(function (complete) {
|
||||
if (complete) {
|
||||
complete.forEach(function (c) { return c(); });
|
||||
}
|
||||
});
|
||||
},
|
||||
});
|
||||
return observables;
|
||||
};
|
||||
OperationBatcher.prototype.scheduleQueueConsumption = function (key) {
|
||||
var _this = this;
|
||||
clearTimeout(this.scheduledBatchTimerByKey.get(key));
|
||||
this.scheduledBatchTimerByKey.set(key, setTimeout(function () {
|
||||
_this.consumeQueue(key);
|
||||
_this.scheduledBatchTimerByKey.delete(key);
|
||||
}, this.batchInterval));
|
||||
};
|
||||
return OperationBatcher;
|
||||
}());
|
||||
|
||||
var BatchLink = (function (_super) {
|
||||
tslib.__extends(BatchLink, _super);
|
||||
function BatchLink(fetchParams) {
|
||||
var _this = _super.call(this) || this;
|
||||
var _a = fetchParams || {}, batchDebounce = _a.batchDebounce, _b = _a.batchInterval, batchInterval = _b === void 0 ? 10 : _b, _c = _a.batchMax, batchMax = _c === void 0 ? 0 : _c, _d = _a.batchHandler, batchHandler = _d === void 0 ? function () { return null; } : _d, _e = _a.batchKey, batchKey = _e === void 0 ? function () { return ""; } : _e;
|
||||
_this.batcher = new OperationBatcher({
|
||||
batchDebounce: batchDebounce,
|
||||
batchInterval: batchInterval,
|
||||
batchMax: batchMax,
|
||||
batchHandler: batchHandler,
|
||||
batchKey: batchKey,
|
||||
});
|
||||
if (fetchParams.batchHandler.length <= 1) {
|
||||
_this.request = function (operation) { return _this.batcher.enqueueRequest({ operation: operation }); };
|
||||
}
|
||||
return _this;
|
||||
}
|
||||
BatchLink.prototype.request = function (operation, forward) {
|
||||
return this.batcher.enqueueRequest({
|
||||
operation: operation,
|
||||
forward: forward,
|
||||
});
|
||||
};
|
||||
return BatchLink;
|
||||
}(core.ApolloLink));
|
||||
|
||||
exports.BatchLink = BatchLink;
|
||||
exports.OperationBatcher = OperationBatcher;
|
||||
//# sourceMappingURL=batch.cjs.map
|
||||
42
graphql-subscription/node_modules/@apollo/client/link/batch/batchLink.d.ts
generated
vendored
Normal file
42
graphql-subscription/node_modules/@apollo/client/link/batch/batchLink.d.ts
generated
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
import type { Operation, FetchResult, NextLink } from "../core/index.js";
|
||||
import { ApolloLink } from "../core/index.js";
|
||||
import type { Observable } from "../../utilities/index.js";
|
||||
import type { BatchHandler } from "./batching.js";
|
||||
export type { BatchableRequest, BatchHandler } from "./batching.js";
|
||||
export { OperationBatcher } from "./batching.js";
|
||||
export declare namespace BatchLink {
|
||||
interface Options {
|
||||
/**
|
||||
* The interval at which to batch, in milliseconds.
|
||||
*
|
||||
* Defaults to 10.
|
||||
*/
|
||||
batchInterval?: number;
|
||||
/**
|
||||
* "batchInterval" is a throttling behavior by default, if you instead wish
|
||||
* to debounce outbound requests, set "batchDebounce" to true. More useful
|
||||
* for mutations than queries.
|
||||
*/
|
||||
batchDebounce?: boolean;
|
||||
/**
|
||||
* The maximum number of operations to include in one fetch.
|
||||
*
|
||||
* Defaults to 0 (infinite operations within the interval).
|
||||
*/
|
||||
batchMax?: number;
|
||||
/**
|
||||
* The handler that should execute a batch of operations.
|
||||
*/
|
||||
batchHandler?: BatchHandler;
|
||||
/**
|
||||
* creates the key for a batch
|
||||
*/
|
||||
batchKey?: (operation: Operation) => string;
|
||||
}
|
||||
}
|
||||
export declare class BatchLink extends ApolloLink {
|
||||
private batcher;
|
||||
constructor(fetchParams?: BatchLink.Options);
|
||||
request(operation: Operation, forward?: NextLink): Observable<FetchResult> | null;
|
||||
}
|
||||
//# sourceMappingURL=batchLink.d.ts.map
|
||||
32
graphql-subscription/node_modules/@apollo/client/link/batch/batchLink.js
generated
vendored
Normal file
32
graphql-subscription/node_modules/@apollo/client/link/batch/batchLink.js
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
import { __extends } from "tslib";
|
||||
import { ApolloLink } from "../core/index.js";
|
||||
import { OperationBatcher } from "./batching.js";
|
||||
export { OperationBatcher } from "./batching.js";
|
||||
var BatchLink = /** @class */ (function (_super) {
|
||||
__extends(BatchLink, _super);
|
||||
function BatchLink(fetchParams) {
|
||||
var _this = _super.call(this) || this;
|
||||
var _a = fetchParams || {}, batchDebounce = _a.batchDebounce, _b = _a.batchInterval, batchInterval = _b === void 0 ? 10 : _b, _c = _a.batchMax, batchMax = _c === void 0 ? 0 : _c, _d = _a.batchHandler, batchHandler = _d === void 0 ? function () { return null; } : _d, _e = _a.batchKey, batchKey = _e === void 0 ? function () { return ""; } : _e;
|
||||
_this.batcher = new OperationBatcher({
|
||||
batchDebounce: batchDebounce,
|
||||
batchInterval: batchInterval,
|
||||
batchMax: batchMax,
|
||||
batchHandler: batchHandler,
|
||||
batchKey: batchKey,
|
||||
});
|
||||
//make this link terminating
|
||||
if (fetchParams.batchHandler.length <= 1) {
|
||||
_this.request = function (operation) { return _this.batcher.enqueueRequest({ operation: operation }); };
|
||||
}
|
||||
return _this;
|
||||
}
|
||||
BatchLink.prototype.request = function (operation, forward) {
|
||||
return this.batcher.enqueueRequest({
|
||||
operation: operation,
|
||||
forward: forward,
|
||||
});
|
||||
};
|
||||
return BatchLink;
|
||||
}(ApolloLink));
|
||||
export { BatchLink };
|
||||
//# sourceMappingURL=batchLink.js.map
|
||||
1
graphql-subscription/node_modules/@apollo/client/link/batch/batchLink.js.map
generated
vendored
Normal file
1
graphql-subscription/node_modules/@apollo/client/link/batch/batchLink.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"batchLink.js","sourceRoot":"","sources":["../../../src/link/batch/batchLink.ts"],"names":[],"mappings":";AACA,OAAO,EAAE,UAAU,EAAE,MAAM,kBAAkB,CAAC;AAG9C,OAAO,EAAE,gBAAgB,EAAE,MAAM,eAAe,CAAC;AAEjD,OAAO,EAAE,gBAAgB,EAAE,MAAM,eAAe,CAAC;AAqCjD;IAA+B,6BAAU;IAGvC,mBAAY,WAA+B;QACzC,YAAA,MAAK,WAAE,SAAC;QAEF,IAAA,KAMF,WAAW,IAAI,EAAE,EALnB,aAAa,mBAAA,EACb,qBAAkB,EAAlB,aAAa,mBAAG,EAAE,KAAA,EAClB,gBAAY,EAAZ,QAAQ,mBAAG,CAAC,KAAA,EACZ,oBAAyB,EAAzB,YAAY,mBAAG,cAAM,OAAA,IAAI,EAAJ,CAAI,KAAA,EACzB,gBAAmB,EAAnB,QAAQ,mBAAG,cAAM,OAAA,EAAE,EAAF,CAAE,KACA,CAAC;QAEtB,KAAI,CAAC,OAAO,GAAG,IAAI,gBAAgB,CAAC;YAClC,aAAa,eAAA;YACb,aAAa,eAAA;YACb,QAAQ,UAAA;YACR,YAAY,cAAA;YACZ,QAAQ,UAAA;SACT,CAAC,CAAC;QAEH,4BAA4B;QAC5B,IAAI,WAAY,CAAC,YAAa,CAAC,MAAM,IAAI,CAAC,EAAE,CAAC;YAC3C,KAAI,CAAC,OAAO,GAAG,UAAC,SAAS,IAAK,OAAA,KAAI,CAAC,OAAO,CAAC,cAAc,CAAC,EAAE,SAAS,WAAA,EAAE,CAAC,EAA1C,CAA0C,CAAC;QAC3E,CAAC;;IACH,CAAC;IAEM,2BAAO,GAAd,UACE,SAAoB,EACpB,OAAkB;QAElB,OAAO,IAAI,CAAC,OAAO,CAAC,cAAc,CAAC;YACjC,SAAS,WAAA;YACT,OAAO,SAAA;SACR,CAAC,CAAC;IACL,CAAC;IACH,gBAAC;AAAD,CAAC,AArCD,CAA+B,UAAU,GAqCxC","sourcesContent":["import type { Operation, FetchResult, NextLink } from \"../core/index.js\";\nimport { ApolloLink } from \"../core/index.js\";\nimport type { Observable } from \"../../utilities/index.js\";\nimport type { BatchHandler } from \"./batching.js\";\nimport { OperationBatcher } from \"./batching.js\";\nexport type { BatchableRequest, BatchHandler } from \"./batching.js\";\nexport { OperationBatcher } from \"./batching.js\";\n\nexport namespace BatchLink {\n export interface Options {\n /**\n * The interval at which to batch, in milliseconds.\n *\n * Defaults to 10.\n */\n batchInterval?: number;\n\n /**\n * \"batchInterval\" is a throttling behavior by default, if you instead wish\n * to debounce outbound requests, set \"batchDebounce\" to true. More useful\n * for mutations than queries.\n */\n batchDebounce?: boolean;\n\n /**\n * The maximum number of operations to include in one fetch.\n *\n * Defaults to 0 (infinite operations within the interval).\n */\n batchMax?: number;\n\n /**\n * The handler that should execute a batch of operations.\n */\n batchHandler?: BatchHandler;\n\n /**\n * creates the key for a batch\n */\n batchKey?: (operation: Operation) => string;\n }\n}\n\nexport class BatchLink extends ApolloLink {\n private batcher: OperationBatcher;\n\n constructor(fetchParams?: BatchLink.Options) {\n super();\n\n const {\n batchDebounce,\n batchInterval = 10,\n batchMax = 0,\n batchHandler = () => null,\n batchKey = () => \"\",\n } = fetchParams || {};\n\n this.batcher = new OperationBatcher({\n batchDebounce,\n batchInterval,\n batchMax,\n batchHandler,\n batchKey,\n });\n\n //make this link terminating\n if (fetchParams!.batchHandler!.length <= 1) {\n this.request = (operation) => this.batcher.enqueueRequest({ operation });\n }\n }\n\n public request(\n operation: Operation,\n forward?: NextLink\n ): Observable<FetchResult> | null {\n return this.batcher.enqueueRequest({\n operation,\n forward,\n });\n }\n}\n"]}
|
||||
27
graphql-subscription/node_modules/@apollo/client/link/batch/batching.d.ts
generated
vendored
Normal file
27
graphql-subscription/node_modules/@apollo/client/link/batch/batching.d.ts
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
import type { FetchResult, NextLink, Operation } from "../core/index.js";
|
||||
import { Observable } from "../../utilities/index.js";
|
||||
export type BatchHandler = (operations: Operation[], forward?: (NextLink | undefined)[]) => Observable<FetchResult[]> | null;
|
||||
export interface BatchableRequest {
|
||||
operation: Operation;
|
||||
forward?: NextLink;
|
||||
}
|
||||
export declare class OperationBatcher {
|
||||
private batchesByKey;
|
||||
private scheduledBatchTimerByKey;
|
||||
private batchDebounce?;
|
||||
private batchInterval?;
|
||||
private batchMax;
|
||||
private batchHandler;
|
||||
private batchKey;
|
||||
constructor({ batchDebounce, batchInterval, batchMax, batchHandler, batchKey, }: {
|
||||
batchDebounce?: boolean;
|
||||
batchInterval?: number;
|
||||
batchMax?: number;
|
||||
batchHandler: BatchHandler;
|
||||
batchKey?: (operation: Operation) => string;
|
||||
});
|
||||
enqueueRequest(request: BatchableRequest): Observable<FetchResult>;
|
||||
consumeQueue(key?: string): (Observable<FetchResult> | undefined)[] | undefined;
|
||||
private scheduleQueueConsumption;
|
||||
}
|
||||
//# sourceMappingURL=batching.d.ts.map
|
||||
148
graphql-subscription/node_modules/@apollo/client/link/batch/batching.js
generated
vendored
Normal file
148
graphql-subscription/node_modules/@apollo/client/link/batch/batching.js
generated
vendored
Normal file
@@ -0,0 +1,148 @@
|
||||
import { __assign } from "tslib";
|
||||
import { Observable } from "../../utilities/index.js";
|
||||
// QueryBatcher doesn't fire requests immediately. Requests that were enqueued within
|
||||
// a certain amount of time (configurable through `batchInterval`) will be batched together
|
||||
// into one query.
|
||||
var OperationBatcher = /** @class */ (function () {
|
||||
function OperationBatcher(_a) {
|
||||
var batchDebounce = _a.batchDebounce, batchInterval = _a.batchInterval, batchMax = _a.batchMax, batchHandler = _a.batchHandler, batchKey = _a.batchKey;
|
||||
// Queue on which the QueryBatcher will operate on a per-tick basis.
|
||||
this.batchesByKey = new Map();
|
||||
this.scheduledBatchTimerByKey = new Map();
|
||||
this.batchDebounce = batchDebounce;
|
||||
this.batchInterval = batchInterval;
|
||||
this.batchMax = batchMax || 0;
|
||||
this.batchHandler = batchHandler;
|
||||
this.batchKey = batchKey || (function () { return ""; });
|
||||
}
|
||||
OperationBatcher.prototype.enqueueRequest = function (request) {
|
||||
var _this = this;
|
||||
var requestCopy = __assign(__assign({}, request), { next: [], error: [], complete: [], subscribers: new Set() });
|
||||
var key = this.batchKey(request.operation);
|
||||
if (!requestCopy.observable) {
|
||||
requestCopy.observable = new Observable(function (observer) {
|
||||
var batch = _this.batchesByKey.get(key);
|
||||
if (!batch)
|
||||
_this.batchesByKey.set(key, (batch = new Set()));
|
||||
// These booleans seem to me (@benjamn) like they might always be the
|
||||
// same (and thus we could do with only one of them), but I'm not 100%
|
||||
// sure about that.
|
||||
var isFirstEnqueuedRequest = batch.size === 0;
|
||||
var isFirstSubscriber = requestCopy.subscribers.size === 0;
|
||||
requestCopy.subscribers.add(observer);
|
||||
if (isFirstSubscriber) {
|
||||
batch.add(requestCopy);
|
||||
}
|
||||
// called for each subscriber, so need to save all listeners (next, error, complete)
|
||||
if (observer.next) {
|
||||
requestCopy.next.push(observer.next.bind(observer));
|
||||
}
|
||||
if (observer.error) {
|
||||
requestCopy.error.push(observer.error.bind(observer));
|
||||
}
|
||||
if (observer.complete) {
|
||||
requestCopy.complete.push(observer.complete.bind(observer));
|
||||
}
|
||||
// The first enqueued request triggers the queue consumption after `batchInterval` milliseconds.
|
||||
if (isFirstEnqueuedRequest || _this.batchDebounce) {
|
||||
_this.scheduleQueueConsumption(key);
|
||||
}
|
||||
// When amount of requests reaches `batchMax`, trigger the queue consumption without waiting on the `batchInterval`.
|
||||
if (batch.size === _this.batchMax) {
|
||||
_this.consumeQueue(key);
|
||||
}
|
||||
return function () {
|
||||
var _a;
|
||||
// If this is last subscriber for this request, remove request from queue
|
||||
if (requestCopy.subscribers.delete(observer) &&
|
||||
requestCopy.subscribers.size < 1) {
|
||||
// If this is last request from queue, remove queue entirely
|
||||
if (batch.delete(requestCopy) && batch.size < 1) {
|
||||
_this.consumeQueue(key);
|
||||
// If queue was in flight, cancel it
|
||||
(_a = batch.subscription) === null || _a === void 0 ? void 0 : _a.unsubscribe();
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
||||
}
|
||||
return requestCopy.observable;
|
||||
};
|
||||
// Consumes the queue.
|
||||
// Returns a list of promises (one for each query).
|
||||
OperationBatcher.prototype.consumeQueue = function (key) {
|
||||
if (key === void 0) { key = ""; }
|
||||
var batch = this.batchesByKey.get(key);
|
||||
// Delete this batch and process it below.
|
||||
this.batchesByKey.delete(key);
|
||||
if (!batch || !batch.size) {
|
||||
// No requests to be processed.
|
||||
return;
|
||||
}
|
||||
var operations = [];
|
||||
var forwards = [];
|
||||
var observables = [];
|
||||
var nexts = [];
|
||||
var errors = [];
|
||||
var completes = [];
|
||||
// Even though batch is a Set, it preserves the order of first insertion
|
||||
// when iterating (per ECMAScript specification), so these requests will be
|
||||
// handled in the order they were enqueued (minus any deleted ones).
|
||||
batch.forEach(function (request) {
|
||||
operations.push(request.operation);
|
||||
forwards.push(request.forward);
|
||||
observables.push(request.observable);
|
||||
nexts.push(request.next);
|
||||
errors.push(request.error);
|
||||
completes.push(request.complete);
|
||||
});
|
||||
var batchedObservable = this.batchHandler(operations, forwards) || Observable.of();
|
||||
var onError = function (error) {
|
||||
//each callback list in batch
|
||||
errors.forEach(function (rejecters) {
|
||||
if (rejecters) {
|
||||
//each subscriber to request
|
||||
rejecters.forEach(function (e) { return e(error); });
|
||||
}
|
||||
});
|
||||
};
|
||||
batch.subscription = batchedObservable.subscribe({
|
||||
next: function (results) {
|
||||
if (!Array.isArray(results)) {
|
||||
results = [results];
|
||||
}
|
||||
if (nexts.length !== results.length) {
|
||||
var error = new Error("server returned results with length ".concat(results.length, ", expected length of ").concat(nexts.length));
|
||||
error.result = results;
|
||||
return onError(error);
|
||||
}
|
||||
results.forEach(function (result, index) {
|
||||
if (nexts[index]) {
|
||||
nexts[index].forEach(function (next) { return next(result); });
|
||||
}
|
||||
});
|
||||
},
|
||||
error: onError,
|
||||
complete: function () {
|
||||
completes.forEach(function (complete) {
|
||||
if (complete) {
|
||||
//each subscriber to request
|
||||
complete.forEach(function (c) { return c(); });
|
||||
}
|
||||
});
|
||||
},
|
||||
});
|
||||
return observables;
|
||||
};
|
||||
OperationBatcher.prototype.scheduleQueueConsumption = function (key) {
|
||||
var _this = this;
|
||||
clearTimeout(this.scheduledBatchTimerByKey.get(key));
|
||||
this.scheduledBatchTimerByKey.set(key, setTimeout(function () {
|
||||
_this.consumeQueue(key);
|
||||
_this.scheduledBatchTimerByKey.delete(key);
|
||||
}, this.batchInterval));
|
||||
};
|
||||
return OperationBatcher;
|
||||
}());
|
||||
export { OperationBatcher };
|
||||
//# sourceMappingURL=batching.js.map
|
||||
1
graphql-subscription/node_modules/@apollo/client/link/batch/batching.js.map
generated
vendored
Normal file
1
graphql-subscription/node_modules/@apollo/client/link/batch/batching.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
2
graphql-subscription/node_modules/@apollo/client/link/batch/index.d.ts
generated
vendored
Normal file
2
graphql-subscription/node_modules/@apollo/client/link/batch/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
export * from "./batchLink.js";
|
||||
//# sourceMappingURL=index.d.ts.map
|
||||
2
graphql-subscription/node_modules/@apollo/client/link/batch/index.js
generated
vendored
Normal file
2
graphql-subscription/node_modules/@apollo/client/link/batch/index.js
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
export * from "./batchLink.js";
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
graphql-subscription/node_modules/@apollo/client/link/batch/index.js.map
generated
vendored
Normal file
1
graphql-subscription/node_modules/@apollo/client/link/batch/index.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/link/batch/index.ts"],"names":[],"mappings":"AAAA,cAAc,gBAAgB,CAAC","sourcesContent":["export * from \"./batchLink.js\";\n"]}
|
||||
8
graphql-subscription/node_modules/@apollo/client/link/batch/package.json
generated
vendored
Normal file
8
graphql-subscription/node_modules/@apollo/client/link/batch/package.json
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"name": "@apollo/client/link/batch",
|
||||
"type": "module",
|
||||
"main": "batch.cjs",
|
||||
"module": "index.js",
|
||||
"types": "index.d.ts",
|
||||
"sideEffects": false
|
||||
}
|
||||
Reference in New Issue
Block a user