diff --git a/dist/index.js b/dist/index.js index d1395af..714ab36 100644 --- a/dist/index.js +++ b/dist/index.js @@ -459,39 +459,47 @@ var __importDefault = (this && this.__importDefault) || function (mod) { }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.fetchGitHubApiV3 = exports.fetchGitHubGraphQL = void 0; -const node_fetch_1 = __importDefault(__nccwpck_require__(4429)); +const https_1 = __importDefault(__nccwpck_require__(5687)); const env_1 = __nccwpck_require__(5284); -const fetchGitHubGraphQL = (query, variables = {}) => __awaiter(void 0, void 0, void 0, function* () { - const response = yield (0, node_fetch_1.default)('https://api.github.com/graphql', { - method: 'POST', - headers: { - Authorization: `token ${(0, env_1.getEnvVar)('GITHUB_TOKEN')}` - }, - body: JSON.stringify({ query, variables }) +const request = (method, url, body) => __awaiter(void 0, void 0, void 0, function* () { + return new Promise((resolve, reject) => { + const req = https_1.default.request(url, { + method, + headers: { + Authorization: `token ${(0, env_1.getEnvVar)('GITHUB_TOKEN')}`, + Accept: 'application/json', + 'User-Agent': 'Connehito/deploying-marker-with-issue' + }, + timeout: 10000 + }); + req.on('response', res => { + if (res.statusCode !== 200) { + throw new Error(`Invalid status code: ${res.statusCode}`); + } + try { + const chunks = []; + res.on('data', chunk => { + chunks.push(chunk); + }); + res.on('end', () => { + resolve(JSON.parse(Buffer.concat(chunks).toString('utf-8'))); + }); + res.on('error', reject); + } + catch (err) { + reject(err); + } + }); + req.on('error', reject); + req.write(body); + req.end(); }); - if (response.status !== 200) - throw new Error(`Invalid status code: ${response.status}`); - const body = yield response.json(); - if (typeof body === 'object' && body != null && 'errors' in body) { - throw new Error(`Response has error: ${JSON.stringify(body)}`); - } - return body; }); -exports.fetchGitHubGraphQL = fetchGitHubGraphQL; -const fetchGitHubApiV3 = (method, path, body = '') => __awaiter(void 0, void 0, void 0, function* () { - var _a; - const response = yield (0, node_fetch_1.default)(`https://api.github.com${path}`, { - method, - headers: { - Accept: 'application/vnd.github.v3+json', - Authorization: `token ${(0, env_1.getEnvVar)('GITHUB_TOKEN')}` - }, - body - }); - if (!response.ok) - throw new Error(`Invalid status code: ${response.status} => ${(_a = (yield response.text())) !== null && _a !== void 0 ? _a : '(Empty body)'}`); - return response; +const fetchGitHubGraphQL = (query, variables = {}) => __awaiter(void 0, void 0, void 0, function* () { + return request('POST', 'https://api.github.com/graphql', JSON.stringify({ query, variables })); }); +exports.fetchGitHubGraphQL = fetchGitHubGraphQL; +const fetchGitHubApiV3 = (method, path, body = '') => __awaiter(void 0, void 0, void 0, function* () { return request(method, `https://api.github.com${path}`, body); }); exports.fetchGitHubApiV3 = fetchGitHubApiV3; @@ -710,12 +718,12 @@ const Schema = { }; const createLabel = (args) => __awaiter(void 0, void 0, void 0, function* () { const { repoOwner, repoName, labelName, labelColor, labelDescription } = args; - const response = yield (0, common_1.fetchGitHubApiV3)('POST', `/repos/${repoOwner}/${repoName}/labels`, JSON.stringify({ + const data = yield (0, common_1.fetchGitHubApiV3)('POST', `/repos/${repoOwner}/${repoName}/labels`, JSON.stringify({ name: labelName, color: labelColor !== null && labelColor !== void 0 ? labelColor : label_1.DefaultLabelColor, description: labelDescription !== null && labelDescription !== void 0 ? labelDescription : label_1.DefaultLabelDescription })); - return (0, validater_1.buildValidator)(Schema)(yield response.json()); + return (0, validater_1.buildValidator)(Schema)(data); }); exports.createLabel = createLabel; @@ -8498,29 +8506,6 @@ function escapeJsonPtr(str) { } -/***/ }), - -/***/ 7760: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -/*! node-domexception. MIT License. Jimmy Wärting */ - -if (!globalThis.DOMException) { - try { - const { MessageChannel } = __nccwpck_require__(1267), - port = new MessageChannel().port1, - ab = new ArrayBuffer() - port.postMessage(ab, [ab, ab]) - } catch (err) { - err.constructor.name === 'DOMException' && ( - globalThis.DOMException = err.constructor - ) - } -} - -module.exports = globalThis.DOMException - - /***/ }), /***/ 4294: @@ -10898,7080 +10883,91 @@ exports["default"] = _default; /***/ }), -/***/ 1452: -/***/ (function(__unused_webpack_module, exports) { +/***/ 9491: +/***/ ((module) => { -/** - * web-streams-polyfill v3.2.1 - */ -(function (global, factory) { - true ? factory(exports) : - 0; -}(this, (function (exports) { 'use strict'; +"use strict"; +module.exports = require("assert"); - /// - const SymbolPolyfill = typeof Symbol === 'function' && typeof Symbol.iterator === 'symbol' ? - Symbol : - description => `Symbol(${description})`; +/***/ }), - /// - function noop() { - return undefined; - } - function getGlobals() { - if (typeof self !== 'undefined') { - return self; - } - else if (typeof window !== 'undefined') { - return window; - } - else if (typeof global !== 'undefined') { - return global; - } - return undefined; - } - const globals = getGlobals(); +/***/ 6113: +/***/ ((module) => { - function typeIsObject(x) { - return (typeof x === 'object' && x !== null) || typeof x === 'function'; - } - const rethrowAssertionErrorRejection = noop; +"use strict"; +module.exports = require("crypto"); - const originalPromise = Promise; - const originalPromiseThen = Promise.prototype.then; - const originalPromiseResolve = Promise.resolve.bind(originalPromise); - const originalPromiseReject = Promise.reject.bind(originalPromise); - function newPromise(executor) { - return new originalPromise(executor); - } - function promiseResolvedWith(value) { - return originalPromiseResolve(value); - } - function promiseRejectedWith(reason) { - return originalPromiseReject(reason); - } - function PerformPromiseThen(promise, onFulfilled, onRejected) { - // There doesn't appear to be any way to correctly emulate the behaviour from JavaScript, so this is just an - // approximation. - return originalPromiseThen.call(promise, onFulfilled, onRejected); - } - function uponPromise(promise, onFulfilled, onRejected) { - PerformPromiseThen(PerformPromiseThen(promise, onFulfilled, onRejected), undefined, rethrowAssertionErrorRejection); - } - function uponFulfillment(promise, onFulfilled) { - uponPromise(promise, onFulfilled); - } - function uponRejection(promise, onRejected) { - uponPromise(promise, undefined, onRejected); - } - function transformPromiseWith(promise, fulfillmentHandler, rejectionHandler) { - return PerformPromiseThen(promise, fulfillmentHandler, rejectionHandler); - } - function setPromiseIsHandledToTrue(promise) { - PerformPromiseThen(promise, undefined, rethrowAssertionErrorRejection); - } - const queueMicrotask = (() => { - const globalQueueMicrotask = globals && globals.queueMicrotask; - if (typeof globalQueueMicrotask === 'function') { - return globalQueueMicrotask; - } - const resolvedPromise = promiseResolvedWith(undefined); - return (fn) => PerformPromiseThen(resolvedPromise, fn); - })(); - function reflectCall(F, V, args) { - if (typeof F !== 'function') { - throw new TypeError('Argument is not a function'); - } - return Function.prototype.apply.call(F, V, args); - } - function promiseCall(F, V, args) { - try { - return promiseResolvedWith(reflectCall(F, V, args)); - } - catch (value) { - return promiseRejectedWith(value); - } - } +/***/ }), - // Original from Chromium - // https://chromium.googlesource.com/chromium/src/+/0aee4434a4dba42a42abaea9bfbc0cd196a63bc1/third_party/blink/renderer/core/streams/SimpleQueue.js - const QUEUE_MAX_ARRAY_SIZE = 16384; - /** - * Simple queue structure. - * - * Avoids scalability issues with using a packed array directly by using - * multiple arrays in a linked list and keeping the array size bounded. - */ - class SimpleQueue { - constructor() { - this._cursor = 0; - this._size = 0; - // _front and _back are always defined. - this._front = { - _elements: [], - _next: undefined - }; - this._back = this._front; - // The cursor is used to avoid calling Array.shift(). - // It contains the index of the front element of the array inside the - // front-most node. It is always in the range [0, QUEUE_MAX_ARRAY_SIZE). - this._cursor = 0; - // When there is only one node, size === elements.length - cursor. - this._size = 0; - } - get length() { - return this._size; - } - // For exception safety, this method is structured in order: - // 1. Read state - // 2. Calculate required state mutations - // 3. Perform state mutations - push(element) { - const oldBack = this._back; - let newBack = oldBack; - if (oldBack._elements.length === QUEUE_MAX_ARRAY_SIZE - 1) { - newBack = { - _elements: [], - _next: undefined - }; - } - // push() is the mutation most likely to throw an exception, so it - // goes first. - oldBack._elements.push(element); - if (newBack !== oldBack) { - this._back = newBack; - oldBack._next = newBack; - } - ++this._size; - } - // Like push(), shift() follows the read -> calculate -> mutate pattern for - // exception safety. - shift() { // must not be called on an empty queue - const oldFront = this._front; - let newFront = oldFront; - const oldCursor = this._cursor; - let newCursor = oldCursor + 1; - const elements = oldFront._elements; - const element = elements[oldCursor]; - if (newCursor === QUEUE_MAX_ARRAY_SIZE) { - newFront = oldFront._next; - newCursor = 0; - } - // No mutations before this point. - --this._size; - this._cursor = newCursor; - if (oldFront !== newFront) { - this._front = newFront; - } - // Permit shifted element to be garbage collected. - elements[oldCursor] = undefined; - return element; - } - // The tricky thing about forEach() is that it can be called - // re-entrantly. The queue may be mutated inside the callback. It is easy to - // see that push() within the callback has no negative effects since the end - // of the queue is checked for on every iteration. If shift() is called - // repeatedly within the callback then the next iteration may return an - // element that has been removed. In this case the callback will be called - // with undefined values until we either "catch up" with elements that still - // exist or reach the back of the queue. - forEach(callback) { - let i = this._cursor; - let node = this._front; - let elements = node._elements; - while (i !== elements.length || node._next !== undefined) { - if (i === elements.length) { - node = node._next; - elements = node._elements; - i = 0; - if (elements.length === 0) { - break; - } - } - callback(elements[i]); - ++i; - } - } - // Return the element that would be returned if shift() was called now, - // without modifying the queue. - peek() { // must not be called on an empty queue - const front = this._front; - const cursor = this._cursor; - return front._elements[cursor]; - } - } +/***/ 2361: +/***/ ((module) => { - function ReadableStreamReaderGenericInitialize(reader, stream) { - reader._ownerReadableStream = stream; - stream._reader = reader; - if (stream._state === 'readable') { - defaultReaderClosedPromiseInitialize(reader); - } - else if (stream._state === 'closed') { - defaultReaderClosedPromiseInitializeAsResolved(reader); - } - else { - defaultReaderClosedPromiseInitializeAsRejected(reader, stream._storedError); - } - } - // A client of ReadableStreamDefaultReader and ReadableStreamBYOBReader may use these functions directly to bypass state - // check. - function ReadableStreamReaderGenericCancel(reader, reason) { - const stream = reader._ownerReadableStream; - return ReadableStreamCancel(stream, reason); - } - function ReadableStreamReaderGenericRelease(reader) { - if (reader._ownerReadableStream._state === 'readable') { - defaultReaderClosedPromiseReject(reader, new TypeError(`Reader was released and can no longer be used to monitor the stream's closedness`)); - } - else { - defaultReaderClosedPromiseResetToRejected(reader, new TypeError(`Reader was released and can no longer be used to monitor the stream's closedness`)); - } - reader._ownerReadableStream._reader = undefined; - reader._ownerReadableStream = undefined; - } - // Helper functions for the readers. - function readerLockException(name) { - return new TypeError('Cannot ' + name + ' a stream using a released reader'); - } - // Helper functions for the ReadableStreamDefaultReader. - function defaultReaderClosedPromiseInitialize(reader) { - reader._closedPromise = newPromise((resolve, reject) => { - reader._closedPromise_resolve = resolve; - reader._closedPromise_reject = reject; - }); - } - function defaultReaderClosedPromiseInitializeAsRejected(reader, reason) { - defaultReaderClosedPromiseInitialize(reader); - defaultReaderClosedPromiseReject(reader, reason); - } - function defaultReaderClosedPromiseInitializeAsResolved(reader) { - defaultReaderClosedPromiseInitialize(reader); - defaultReaderClosedPromiseResolve(reader); - } - function defaultReaderClosedPromiseReject(reader, reason) { - if (reader._closedPromise_reject === undefined) { - return; - } - setPromiseIsHandledToTrue(reader._closedPromise); - reader._closedPromise_reject(reason); - reader._closedPromise_resolve = undefined; - reader._closedPromise_reject = undefined; - } - function defaultReaderClosedPromiseResetToRejected(reader, reason) { - defaultReaderClosedPromiseInitializeAsRejected(reader, reason); - } - function defaultReaderClosedPromiseResolve(reader) { - if (reader._closedPromise_resolve === undefined) { - return; - } - reader._closedPromise_resolve(undefined); - reader._closedPromise_resolve = undefined; - reader._closedPromise_reject = undefined; - } +"use strict"; +module.exports = require("events"); - const AbortSteps = SymbolPolyfill('[[AbortSteps]]'); - const ErrorSteps = SymbolPolyfill('[[ErrorSteps]]'); - const CancelSteps = SymbolPolyfill('[[CancelSteps]]'); - const PullSteps = SymbolPolyfill('[[PullSteps]]'); +/***/ }), - /// - // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isFinite#Polyfill - const NumberIsFinite = Number.isFinite || function (x) { - return typeof x === 'number' && isFinite(x); - }; +/***/ 7147: +/***/ ((module) => { - /// - // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/trunc#Polyfill - const MathTrunc = Math.trunc || function (v) { - return v < 0 ? Math.ceil(v) : Math.floor(v); - }; +"use strict"; +module.exports = require("fs"); - // https://heycam.github.io/webidl/#idl-dictionaries - function isDictionary(x) { - return typeof x === 'object' || typeof x === 'function'; - } - function assertDictionary(obj, context) { - if (obj !== undefined && !isDictionary(obj)) { - throw new TypeError(`${context} is not an object.`); - } - } - // https://heycam.github.io/webidl/#idl-callback-functions - function assertFunction(x, context) { - if (typeof x !== 'function') { - throw new TypeError(`${context} is not a function.`); - } - } - // https://heycam.github.io/webidl/#idl-object - function isObject(x) { - return (typeof x === 'object' && x !== null) || typeof x === 'function'; - } - function assertObject(x, context) { - if (!isObject(x)) { - throw new TypeError(`${context} is not an object.`); - } - } - function assertRequiredArgument(x, position, context) { - if (x === undefined) { - throw new TypeError(`Parameter ${position} is required in '${context}'.`); - } - } - function assertRequiredField(x, field, context) { - if (x === undefined) { - throw new TypeError(`${field} is required in '${context}'.`); - } - } - // https://heycam.github.io/webidl/#idl-unrestricted-double - function convertUnrestrictedDouble(value) { - return Number(value); - } - function censorNegativeZero(x) { - return x === 0 ? 0 : x; - } - function integerPart(x) { - return censorNegativeZero(MathTrunc(x)); - } - // https://heycam.github.io/webidl/#idl-unsigned-long-long - function convertUnsignedLongLongWithEnforceRange(value, context) { - const lowerBound = 0; - const upperBound = Number.MAX_SAFE_INTEGER; - let x = Number(value); - x = censorNegativeZero(x); - if (!NumberIsFinite(x)) { - throw new TypeError(`${context} is not a finite number`); - } - x = integerPart(x); - if (x < lowerBound || x > upperBound) { - throw new TypeError(`${context} is outside the accepted range of ${lowerBound} to ${upperBound}, inclusive`); - } - if (!NumberIsFinite(x) || x === 0) { - return 0; - } - // TODO Use BigInt if supported? - // let xBigInt = BigInt(integerPart(x)); - // xBigInt = BigInt.asUintN(64, xBigInt); - // return Number(xBigInt); - return x; - } +/***/ }), - function assertReadableStream(x, context) { - if (!IsReadableStream(x)) { - throw new TypeError(`${context} is not a ReadableStream.`); - } - } +/***/ 3685: +/***/ ((module) => { - // Abstract operations for the ReadableStream. - function AcquireReadableStreamDefaultReader(stream) { - return new ReadableStreamDefaultReader(stream); - } - // ReadableStream API exposed for controllers. - function ReadableStreamAddReadRequest(stream, readRequest) { - stream._reader._readRequests.push(readRequest); - } - function ReadableStreamFulfillReadRequest(stream, chunk, done) { - const reader = stream._reader; - const readRequest = reader._readRequests.shift(); - if (done) { - readRequest._closeSteps(); - } - else { - readRequest._chunkSteps(chunk); - } - } - function ReadableStreamGetNumReadRequests(stream) { - return stream._reader._readRequests.length; - } - function ReadableStreamHasDefaultReader(stream) { - const reader = stream._reader; - if (reader === undefined) { - return false; - } - if (!IsReadableStreamDefaultReader(reader)) { - return false; - } - return true; - } - /** - * A default reader vended by a {@link ReadableStream}. - * - * @public - */ - class ReadableStreamDefaultReader { - constructor(stream) { - assertRequiredArgument(stream, 1, 'ReadableStreamDefaultReader'); - assertReadableStream(stream, 'First parameter'); - if (IsReadableStreamLocked(stream)) { - throw new TypeError('This stream has already been locked for exclusive reading by another reader'); - } - ReadableStreamReaderGenericInitialize(this, stream); - this._readRequests = new SimpleQueue(); - } - /** - * Returns a promise that will be fulfilled when the stream becomes closed, - * or rejected if the stream ever errors or the reader's lock is released before the stream finishes closing. - */ - get closed() { - if (!IsReadableStreamDefaultReader(this)) { - return promiseRejectedWith(defaultReaderBrandCheckException('closed')); - } - return this._closedPromise; - } - /** - * If the reader is active, behaves the same as {@link ReadableStream.cancel | stream.cancel(reason)}. - */ - cancel(reason = undefined) { - if (!IsReadableStreamDefaultReader(this)) { - return promiseRejectedWith(defaultReaderBrandCheckException('cancel')); - } - if (this._ownerReadableStream === undefined) { - return promiseRejectedWith(readerLockException('cancel')); - } - return ReadableStreamReaderGenericCancel(this, reason); - } - /** - * Returns a promise that allows access to the next chunk from the stream's internal queue, if available. - * - * If reading a chunk causes the queue to become empty, more data will be pulled from the underlying source. - */ - read() { - if (!IsReadableStreamDefaultReader(this)) { - return promiseRejectedWith(defaultReaderBrandCheckException('read')); - } - if (this._ownerReadableStream === undefined) { - return promiseRejectedWith(readerLockException('read from')); - } - let resolvePromise; - let rejectPromise; - const promise = newPromise((resolve, reject) => { - resolvePromise = resolve; - rejectPromise = reject; - }); - const readRequest = { - _chunkSteps: chunk => resolvePromise({ value: chunk, done: false }), - _closeSteps: () => resolvePromise({ value: undefined, done: true }), - _errorSteps: e => rejectPromise(e) - }; - ReadableStreamDefaultReaderRead(this, readRequest); - return promise; - } - /** - * Releases the reader's lock on the corresponding stream. After the lock is released, the reader is no longer active. - * If the associated stream is errored when the lock is released, the reader will appear errored in the same way - * from now on; otherwise, the reader will appear closed. - * - * A reader's lock cannot be released while it still has a pending read request, i.e., if a promise returned by - * the reader's {@link ReadableStreamDefaultReader.read | read()} method has not yet been settled. Attempting to - * do so will throw a `TypeError` and leave the reader locked to the stream. - */ - releaseLock() { - if (!IsReadableStreamDefaultReader(this)) { - throw defaultReaderBrandCheckException('releaseLock'); - } - if (this._ownerReadableStream === undefined) { - return; - } - if (this._readRequests.length > 0) { - throw new TypeError('Tried to release a reader lock when that reader has pending read() calls un-settled'); - } - ReadableStreamReaderGenericRelease(this); - } - } - Object.defineProperties(ReadableStreamDefaultReader.prototype, { - cancel: { enumerable: true }, - read: { enumerable: true }, - releaseLock: { enumerable: true }, - closed: { enumerable: true } - }); - if (typeof SymbolPolyfill.toStringTag === 'symbol') { - Object.defineProperty(ReadableStreamDefaultReader.prototype, SymbolPolyfill.toStringTag, { - value: 'ReadableStreamDefaultReader', - configurable: true - }); - } - // Abstract operations for the readers. - function IsReadableStreamDefaultReader(x) { - if (!typeIsObject(x)) { - return false; - } - if (!Object.prototype.hasOwnProperty.call(x, '_readRequests')) { - return false; - } - return x instanceof ReadableStreamDefaultReader; - } - function ReadableStreamDefaultReaderRead(reader, readRequest) { - const stream = reader._ownerReadableStream; - stream._disturbed = true; - if (stream._state === 'closed') { - readRequest._closeSteps(); - } - else if (stream._state === 'errored') { - readRequest._errorSteps(stream._storedError); - } - else { - stream._readableStreamController[PullSteps](readRequest); - } - } - // Helper functions for the ReadableStreamDefaultReader. - function defaultReaderBrandCheckException(name) { - return new TypeError(`ReadableStreamDefaultReader.prototype.${name} can only be used on a ReadableStreamDefaultReader`); - } +"use strict"; +module.exports = require("http"); - /// - /* eslint-disable @typescript-eslint/no-empty-function */ - const AsyncIteratorPrototype = Object.getPrototypeOf(Object.getPrototypeOf(async function* () { }).prototype); +/***/ }), - /// - class ReadableStreamAsyncIteratorImpl { - constructor(reader, preventCancel) { - this._ongoingPromise = undefined; - this._isFinished = false; - this._reader = reader; - this._preventCancel = preventCancel; - } - next() { - const nextSteps = () => this._nextSteps(); - this._ongoingPromise = this._ongoingPromise ? - transformPromiseWith(this._ongoingPromise, nextSteps, nextSteps) : - nextSteps(); - return this._ongoingPromise; - } - return(value) { - const returnSteps = () => this._returnSteps(value); - return this._ongoingPromise ? - transformPromiseWith(this._ongoingPromise, returnSteps, returnSteps) : - returnSteps(); - } - _nextSteps() { - if (this._isFinished) { - return Promise.resolve({ value: undefined, done: true }); - } - const reader = this._reader; - if (reader._ownerReadableStream === undefined) { - return promiseRejectedWith(readerLockException('iterate')); - } - let resolvePromise; - let rejectPromise; - const promise = newPromise((resolve, reject) => { - resolvePromise = resolve; - rejectPromise = reject; - }); - const readRequest = { - _chunkSteps: chunk => { - this._ongoingPromise = undefined; - // This needs to be delayed by one microtask, otherwise we stop pulling too early which breaks a test. - // FIXME Is this a bug in the specification, or in the test? - queueMicrotask(() => resolvePromise({ value: chunk, done: false })); - }, - _closeSteps: () => { - this._ongoingPromise = undefined; - this._isFinished = true; - ReadableStreamReaderGenericRelease(reader); - resolvePromise({ value: undefined, done: true }); - }, - _errorSteps: reason => { - this._ongoingPromise = undefined; - this._isFinished = true; - ReadableStreamReaderGenericRelease(reader); - rejectPromise(reason); - } - }; - ReadableStreamDefaultReaderRead(reader, readRequest); - return promise; - } - _returnSteps(value) { - if (this._isFinished) { - return Promise.resolve({ value, done: true }); - } - this._isFinished = true; - const reader = this._reader; - if (reader._ownerReadableStream === undefined) { - return promiseRejectedWith(readerLockException('finish iterating')); - } - if (!this._preventCancel) { - const result = ReadableStreamReaderGenericCancel(reader, value); - ReadableStreamReaderGenericRelease(reader); - return transformPromiseWith(result, () => ({ value, done: true })); - } - ReadableStreamReaderGenericRelease(reader); - return promiseResolvedWith({ value, done: true }); - } - } - const ReadableStreamAsyncIteratorPrototype = { - next() { - if (!IsReadableStreamAsyncIterator(this)) { - return promiseRejectedWith(streamAsyncIteratorBrandCheckException('next')); - } - return this._asyncIteratorImpl.next(); - }, - return(value) { - if (!IsReadableStreamAsyncIterator(this)) { - return promiseRejectedWith(streamAsyncIteratorBrandCheckException('return')); - } - return this._asyncIteratorImpl.return(value); - } - }; - if (AsyncIteratorPrototype !== undefined) { - Object.setPrototypeOf(ReadableStreamAsyncIteratorPrototype, AsyncIteratorPrototype); - } - // Abstract operations for the ReadableStream. - function AcquireReadableStreamAsyncIterator(stream, preventCancel) { - const reader = AcquireReadableStreamDefaultReader(stream); - const impl = new ReadableStreamAsyncIteratorImpl(reader, preventCancel); - const iterator = Object.create(ReadableStreamAsyncIteratorPrototype); - iterator._asyncIteratorImpl = impl; - return iterator; - } - function IsReadableStreamAsyncIterator(x) { - if (!typeIsObject(x)) { - return false; - } - if (!Object.prototype.hasOwnProperty.call(x, '_asyncIteratorImpl')) { - return false; - } - try { - // noinspection SuspiciousTypeOfGuard - return x._asyncIteratorImpl instanceof - ReadableStreamAsyncIteratorImpl; - } - catch (_a) { - return false; - } - } - // Helper functions for the ReadableStream. - function streamAsyncIteratorBrandCheckException(name) { - return new TypeError(`ReadableStreamAsyncIterator.${name} can only be used on a ReadableSteamAsyncIterator`); - } +/***/ 5687: +/***/ ((module) => { - /// - // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isNaN#Polyfill - const NumberIsNaN = Number.isNaN || function (x) { - // eslint-disable-next-line no-self-compare - return x !== x; - }; +"use strict"; +module.exports = require("https"); - function CreateArrayFromList(elements) { - // We use arrays to represent lists, so this is basically a no-op. - // Do a slice though just in case we happen to depend on the unique-ness. - return elements.slice(); - } - function CopyDataBlockBytes(dest, destOffset, src, srcOffset, n) { - new Uint8Array(dest).set(new Uint8Array(src, srcOffset, n), destOffset); - } - // Not implemented correctly - function TransferArrayBuffer(O) { - return O; - } - // Not implemented correctly - // eslint-disable-next-line @typescript-eslint/no-unused-vars - function IsDetachedBuffer(O) { - return false; - } - function ArrayBufferSlice(buffer, begin, end) { - // ArrayBuffer.prototype.slice is not available on IE10 - // https://www.caniuse.com/mdn-javascript_builtins_arraybuffer_slice - if (buffer.slice) { - return buffer.slice(begin, end); - } - const length = end - begin; - const slice = new ArrayBuffer(length); - CopyDataBlockBytes(slice, 0, buffer, begin, length); - return slice; - } +/***/ }), - function IsNonNegativeNumber(v) { - if (typeof v !== 'number') { - return false; - } - if (NumberIsNaN(v)) { - return false; - } - if (v < 0) { - return false; - } - return true; - } - function CloneAsUint8Array(O) { - const buffer = ArrayBufferSlice(O.buffer, O.byteOffset, O.byteOffset + O.byteLength); - return new Uint8Array(buffer); - } +/***/ 1808: +/***/ ((module) => { - function DequeueValue(container) { - const pair = container._queue.shift(); - container._queueTotalSize -= pair.size; - if (container._queueTotalSize < 0) { - container._queueTotalSize = 0; - } - return pair.value; - } - function EnqueueValueWithSize(container, value, size) { - if (!IsNonNegativeNumber(size) || size === Infinity) { - throw new RangeError('Size must be a finite, non-NaN, non-negative number.'); - } - container._queue.push({ value, size }); - container._queueTotalSize += size; - } - function PeekQueueValue(container) { - const pair = container._queue.peek(); - return pair.value; - } - function ResetQueue(container) { - container._queue = new SimpleQueue(); - container._queueTotalSize = 0; - } +"use strict"; +module.exports = require("net"); - /** - * A pull-into request in a {@link ReadableByteStreamController}. - * - * @public - */ - class ReadableStreamBYOBRequest { - constructor() { - throw new TypeError('Illegal constructor'); - } - /** - * Returns the view for writing in to, or `null` if the BYOB request has already been responded to. - */ - get view() { - if (!IsReadableStreamBYOBRequest(this)) { - throw byobRequestBrandCheckException('view'); - } - return this._view; - } - respond(bytesWritten) { - if (!IsReadableStreamBYOBRequest(this)) { - throw byobRequestBrandCheckException('respond'); - } - assertRequiredArgument(bytesWritten, 1, 'respond'); - bytesWritten = convertUnsignedLongLongWithEnforceRange(bytesWritten, 'First parameter'); - if (this._associatedReadableByteStreamController === undefined) { - throw new TypeError('This BYOB request has been invalidated'); - } - if (IsDetachedBuffer(this._view.buffer)) ; - ReadableByteStreamControllerRespond(this._associatedReadableByteStreamController, bytesWritten); - } - respondWithNewView(view) { - if (!IsReadableStreamBYOBRequest(this)) { - throw byobRequestBrandCheckException('respondWithNewView'); - } - assertRequiredArgument(view, 1, 'respondWithNewView'); - if (!ArrayBuffer.isView(view)) { - throw new TypeError('You can only respond with array buffer views'); - } - if (this._associatedReadableByteStreamController === undefined) { - throw new TypeError('This BYOB request has been invalidated'); - } - if (IsDetachedBuffer(view.buffer)) ; - ReadableByteStreamControllerRespondWithNewView(this._associatedReadableByteStreamController, view); - } - } - Object.defineProperties(ReadableStreamBYOBRequest.prototype, { - respond: { enumerable: true }, - respondWithNewView: { enumerable: true }, - view: { enumerable: true } - }); - if (typeof SymbolPolyfill.toStringTag === 'symbol') { - Object.defineProperty(ReadableStreamBYOBRequest.prototype, SymbolPolyfill.toStringTag, { - value: 'ReadableStreamBYOBRequest', - configurable: true - }); - } - /** - * Allows control of a {@link ReadableStream | readable byte stream}'s state and internal queue. - * - * @public - */ - class ReadableByteStreamController { - constructor() { - throw new TypeError('Illegal constructor'); - } - /** - * Returns the current BYOB pull request, or `null` if there isn't one. - */ - get byobRequest() { - if (!IsReadableByteStreamController(this)) { - throw byteStreamControllerBrandCheckException('byobRequest'); - } - return ReadableByteStreamControllerGetBYOBRequest(this); - } - /** - * Returns the desired size to fill the controlled stream's internal queue. It can be negative, if the queue is - * over-full. An underlying byte source ought to use this information to determine when and how to apply backpressure. - */ - get desiredSize() { - if (!IsReadableByteStreamController(this)) { - throw byteStreamControllerBrandCheckException('desiredSize'); - } - return ReadableByteStreamControllerGetDesiredSize(this); - } - /** - * Closes the controlled readable stream. Consumers will still be able to read any previously-enqueued chunks from - * the stream, but once those are read, the stream will become closed. - */ - close() { - if (!IsReadableByteStreamController(this)) { - throw byteStreamControllerBrandCheckException('close'); - } - if (this._closeRequested) { - throw new TypeError('The stream has already been closed; do not close it again!'); - } - const state = this._controlledReadableByteStream._state; - if (state !== 'readable') { - throw new TypeError(`The stream (in ${state} state) is not in the readable state and cannot be closed`); - } - ReadableByteStreamControllerClose(this); - } - enqueue(chunk) { - if (!IsReadableByteStreamController(this)) { - throw byteStreamControllerBrandCheckException('enqueue'); - } - assertRequiredArgument(chunk, 1, 'enqueue'); - if (!ArrayBuffer.isView(chunk)) { - throw new TypeError('chunk must be an array buffer view'); - } - if (chunk.byteLength === 0) { - throw new TypeError('chunk must have non-zero byteLength'); - } - if (chunk.buffer.byteLength === 0) { - throw new TypeError(`chunk's buffer must have non-zero byteLength`); - } - if (this._closeRequested) { - throw new TypeError('stream is closed or draining'); - } - const state = this._controlledReadableByteStream._state; - if (state !== 'readable') { - throw new TypeError(`The stream (in ${state} state) is not in the readable state and cannot be enqueued to`); - } - ReadableByteStreamControllerEnqueue(this, chunk); - } - /** - * Errors the controlled readable stream, making all future interactions with it fail with the given error `e`. - */ - error(e = undefined) { - if (!IsReadableByteStreamController(this)) { - throw byteStreamControllerBrandCheckException('error'); - } - ReadableByteStreamControllerError(this, e); - } - /** @internal */ - [CancelSteps](reason) { - ReadableByteStreamControllerClearPendingPullIntos(this); - ResetQueue(this); - const result = this._cancelAlgorithm(reason); - ReadableByteStreamControllerClearAlgorithms(this); - return result; - } - /** @internal */ - [PullSteps](readRequest) { - const stream = this._controlledReadableByteStream; - if (this._queueTotalSize > 0) { - const entry = this._queue.shift(); - this._queueTotalSize -= entry.byteLength; - ReadableByteStreamControllerHandleQueueDrain(this); - const view = new Uint8Array(entry.buffer, entry.byteOffset, entry.byteLength); - readRequest._chunkSteps(view); - return; - } - const autoAllocateChunkSize = this._autoAllocateChunkSize; - if (autoAllocateChunkSize !== undefined) { - let buffer; - try { - buffer = new ArrayBuffer(autoAllocateChunkSize); - } - catch (bufferE) { - readRequest._errorSteps(bufferE); - return; - } - const pullIntoDescriptor = { - buffer, - bufferByteLength: autoAllocateChunkSize, - byteOffset: 0, - byteLength: autoAllocateChunkSize, - bytesFilled: 0, - elementSize: 1, - viewConstructor: Uint8Array, - readerType: 'default' - }; - this._pendingPullIntos.push(pullIntoDescriptor); - } - ReadableStreamAddReadRequest(stream, readRequest); - ReadableByteStreamControllerCallPullIfNeeded(this); - } - } - Object.defineProperties(ReadableByteStreamController.prototype, { - close: { enumerable: true }, - enqueue: { enumerable: true }, - error: { enumerable: true }, - byobRequest: { enumerable: true }, - desiredSize: { enumerable: true } - }); - if (typeof SymbolPolyfill.toStringTag === 'symbol') { - Object.defineProperty(ReadableByteStreamController.prototype, SymbolPolyfill.toStringTag, { - value: 'ReadableByteStreamController', - configurable: true - }); - } - // Abstract operations for the ReadableByteStreamController. - function IsReadableByteStreamController(x) { - if (!typeIsObject(x)) { - return false; - } - if (!Object.prototype.hasOwnProperty.call(x, '_controlledReadableByteStream')) { - return false; - } - return x instanceof ReadableByteStreamController; - } - function IsReadableStreamBYOBRequest(x) { - if (!typeIsObject(x)) { - return false; - } - if (!Object.prototype.hasOwnProperty.call(x, '_associatedReadableByteStreamController')) { - return false; - } - return x instanceof ReadableStreamBYOBRequest; - } - function ReadableByteStreamControllerCallPullIfNeeded(controller) { - const shouldPull = ReadableByteStreamControllerShouldCallPull(controller); - if (!shouldPull) { - return; - } - if (controller._pulling) { - controller._pullAgain = true; - return; - } - controller._pulling = true; - // TODO: Test controller argument - const pullPromise = controller._pullAlgorithm(); - uponPromise(pullPromise, () => { - controller._pulling = false; - if (controller._pullAgain) { - controller._pullAgain = false; - ReadableByteStreamControllerCallPullIfNeeded(controller); - } - }, e => { - ReadableByteStreamControllerError(controller, e); - }); - } - function ReadableByteStreamControllerClearPendingPullIntos(controller) { - ReadableByteStreamControllerInvalidateBYOBRequest(controller); - controller._pendingPullIntos = new SimpleQueue(); - } - function ReadableByteStreamControllerCommitPullIntoDescriptor(stream, pullIntoDescriptor) { - let done = false; - if (stream._state === 'closed') { - done = true; - } - const filledView = ReadableByteStreamControllerConvertPullIntoDescriptor(pullIntoDescriptor); - if (pullIntoDescriptor.readerType === 'default') { - ReadableStreamFulfillReadRequest(stream, filledView, done); - } - else { - ReadableStreamFulfillReadIntoRequest(stream, filledView, done); - } - } - function ReadableByteStreamControllerConvertPullIntoDescriptor(pullIntoDescriptor) { - const bytesFilled = pullIntoDescriptor.bytesFilled; - const elementSize = pullIntoDescriptor.elementSize; - return new pullIntoDescriptor.viewConstructor(pullIntoDescriptor.buffer, pullIntoDescriptor.byteOffset, bytesFilled / elementSize); - } - function ReadableByteStreamControllerEnqueueChunkToQueue(controller, buffer, byteOffset, byteLength) { - controller._queue.push({ buffer, byteOffset, byteLength }); - controller._queueTotalSize += byteLength; - } - function ReadableByteStreamControllerFillPullIntoDescriptorFromQueue(controller, pullIntoDescriptor) { - const elementSize = pullIntoDescriptor.elementSize; - const currentAlignedBytes = pullIntoDescriptor.bytesFilled - pullIntoDescriptor.bytesFilled % elementSize; - const maxBytesToCopy = Math.min(controller._queueTotalSize, pullIntoDescriptor.byteLength - pullIntoDescriptor.bytesFilled); - const maxBytesFilled = pullIntoDescriptor.bytesFilled + maxBytesToCopy; - const maxAlignedBytes = maxBytesFilled - maxBytesFilled % elementSize; - let totalBytesToCopyRemaining = maxBytesToCopy; - let ready = false; - if (maxAlignedBytes > currentAlignedBytes) { - totalBytesToCopyRemaining = maxAlignedBytes - pullIntoDescriptor.bytesFilled; - ready = true; - } - const queue = controller._queue; - while (totalBytesToCopyRemaining > 0) { - const headOfQueue = queue.peek(); - const bytesToCopy = Math.min(totalBytesToCopyRemaining, headOfQueue.byteLength); - const destStart = pullIntoDescriptor.byteOffset + pullIntoDescriptor.bytesFilled; - CopyDataBlockBytes(pullIntoDescriptor.buffer, destStart, headOfQueue.buffer, headOfQueue.byteOffset, bytesToCopy); - if (headOfQueue.byteLength === bytesToCopy) { - queue.shift(); - } - else { - headOfQueue.byteOffset += bytesToCopy; - headOfQueue.byteLength -= bytesToCopy; - } - controller._queueTotalSize -= bytesToCopy; - ReadableByteStreamControllerFillHeadPullIntoDescriptor(controller, bytesToCopy, pullIntoDescriptor); - totalBytesToCopyRemaining -= bytesToCopy; - } - return ready; - } - function ReadableByteStreamControllerFillHeadPullIntoDescriptor(controller, size, pullIntoDescriptor) { - pullIntoDescriptor.bytesFilled += size; - } - function ReadableByteStreamControllerHandleQueueDrain(controller) { - if (controller._queueTotalSize === 0 && controller._closeRequested) { - ReadableByteStreamControllerClearAlgorithms(controller); - ReadableStreamClose(controller._controlledReadableByteStream); - } - else { - ReadableByteStreamControllerCallPullIfNeeded(controller); - } - } - function ReadableByteStreamControllerInvalidateBYOBRequest(controller) { - if (controller._byobRequest === null) { - return; - } - controller._byobRequest._associatedReadableByteStreamController = undefined; - controller._byobRequest._view = null; - controller._byobRequest = null; - } - function ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller) { - while (controller._pendingPullIntos.length > 0) { - if (controller._queueTotalSize === 0) { - return; - } - const pullIntoDescriptor = controller._pendingPullIntos.peek(); - if (ReadableByteStreamControllerFillPullIntoDescriptorFromQueue(controller, pullIntoDescriptor)) { - ReadableByteStreamControllerShiftPendingPullInto(controller); - ReadableByteStreamControllerCommitPullIntoDescriptor(controller._controlledReadableByteStream, pullIntoDescriptor); - } - } - } - function ReadableByteStreamControllerPullInto(controller, view, readIntoRequest) { - const stream = controller._controlledReadableByteStream; - let elementSize = 1; - if (view.constructor !== DataView) { - elementSize = view.constructor.BYTES_PER_ELEMENT; - } - const ctor = view.constructor; - // try { - const buffer = TransferArrayBuffer(view.buffer); - // } catch (e) { - // readIntoRequest._errorSteps(e); - // return; - // } - const pullIntoDescriptor = { - buffer, - bufferByteLength: buffer.byteLength, - byteOffset: view.byteOffset, - byteLength: view.byteLength, - bytesFilled: 0, - elementSize, - viewConstructor: ctor, - readerType: 'byob' - }; - if (controller._pendingPullIntos.length > 0) { - controller._pendingPullIntos.push(pullIntoDescriptor); - // No ReadableByteStreamControllerCallPullIfNeeded() call since: - // - No change happens on desiredSize - // - The source has already been notified of that there's at least 1 pending read(view) - ReadableStreamAddReadIntoRequest(stream, readIntoRequest); - return; - } - if (stream._state === 'closed') { - const emptyView = new ctor(pullIntoDescriptor.buffer, pullIntoDescriptor.byteOffset, 0); - readIntoRequest._closeSteps(emptyView); - return; - } - if (controller._queueTotalSize > 0) { - if (ReadableByteStreamControllerFillPullIntoDescriptorFromQueue(controller, pullIntoDescriptor)) { - const filledView = ReadableByteStreamControllerConvertPullIntoDescriptor(pullIntoDescriptor); - ReadableByteStreamControllerHandleQueueDrain(controller); - readIntoRequest._chunkSteps(filledView); - return; - } - if (controller._closeRequested) { - const e = new TypeError('Insufficient bytes to fill elements in the given buffer'); - ReadableByteStreamControllerError(controller, e); - readIntoRequest._errorSteps(e); - return; - } - } - controller._pendingPullIntos.push(pullIntoDescriptor); - ReadableStreamAddReadIntoRequest(stream, readIntoRequest); - ReadableByteStreamControllerCallPullIfNeeded(controller); - } - function ReadableByteStreamControllerRespondInClosedState(controller, firstDescriptor) { - const stream = controller._controlledReadableByteStream; - if (ReadableStreamHasBYOBReader(stream)) { - while (ReadableStreamGetNumReadIntoRequests(stream) > 0) { - const pullIntoDescriptor = ReadableByteStreamControllerShiftPendingPullInto(controller); - ReadableByteStreamControllerCommitPullIntoDescriptor(stream, pullIntoDescriptor); - } - } - } - function ReadableByteStreamControllerRespondInReadableState(controller, bytesWritten, pullIntoDescriptor) { - ReadableByteStreamControllerFillHeadPullIntoDescriptor(controller, bytesWritten, pullIntoDescriptor); - if (pullIntoDescriptor.bytesFilled < pullIntoDescriptor.elementSize) { - return; - } - ReadableByteStreamControllerShiftPendingPullInto(controller); - const remainderSize = pullIntoDescriptor.bytesFilled % pullIntoDescriptor.elementSize; - if (remainderSize > 0) { - const end = pullIntoDescriptor.byteOffset + pullIntoDescriptor.bytesFilled; - const remainder = ArrayBufferSlice(pullIntoDescriptor.buffer, end - remainderSize, end); - ReadableByteStreamControllerEnqueueChunkToQueue(controller, remainder, 0, remainder.byteLength); - } - pullIntoDescriptor.bytesFilled -= remainderSize; - ReadableByteStreamControllerCommitPullIntoDescriptor(controller._controlledReadableByteStream, pullIntoDescriptor); - ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller); - } - function ReadableByteStreamControllerRespondInternal(controller, bytesWritten) { - const firstDescriptor = controller._pendingPullIntos.peek(); - ReadableByteStreamControllerInvalidateBYOBRequest(controller); - const state = controller._controlledReadableByteStream._state; - if (state === 'closed') { - ReadableByteStreamControllerRespondInClosedState(controller); - } - else { - ReadableByteStreamControllerRespondInReadableState(controller, bytesWritten, firstDescriptor); - } - ReadableByteStreamControllerCallPullIfNeeded(controller); - } - function ReadableByteStreamControllerShiftPendingPullInto(controller) { - const descriptor = controller._pendingPullIntos.shift(); - return descriptor; - } - function ReadableByteStreamControllerShouldCallPull(controller) { - const stream = controller._controlledReadableByteStream; - if (stream._state !== 'readable') { - return false; - } - if (controller._closeRequested) { - return false; - } - if (!controller._started) { - return false; - } - if (ReadableStreamHasDefaultReader(stream) && ReadableStreamGetNumReadRequests(stream) > 0) { - return true; - } - if (ReadableStreamHasBYOBReader(stream) && ReadableStreamGetNumReadIntoRequests(stream) > 0) { - return true; - } - const desiredSize = ReadableByteStreamControllerGetDesiredSize(controller); - if (desiredSize > 0) { - return true; - } - return false; - } - function ReadableByteStreamControllerClearAlgorithms(controller) { - controller._pullAlgorithm = undefined; - controller._cancelAlgorithm = undefined; - } - // A client of ReadableByteStreamController may use these functions directly to bypass state check. - function ReadableByteStreamControllerClose(controller) { - const stream = controller._controlledReadableByteStream; - if (controller._closeRequested || stream._state !== 'readable') { - return; - } - if (controller._queueTotalSize > 0) { - controller._closeRequested = true; - return; - } - if (controller._pendingPullIntos.length > 0) { - const firstPendingPullInto = controller._pendingPullIntos.peek(); - if (firstPendingPullInto.bytesFilled > 0) { - const e = new TypeError('Insufficient bytes to fill elements in the given buffer'); - ReadableByteStreamControllerError(controller, e); - throw e; - } - } - ReadableByteStreamControllerClearAlgorithms(controller); - ReadableStreamClose(stream); - } - function ReadableByteStreamControllerEnqueue(controller, chunk) { - const stream = controller._controlledReadableByteStream; - if (controller._closeRequested || stream._state !== 'readable') { - return; - } - const buffer = chunk.buffer; - const byteOffset = chunk.byteOffset; - const byteLength = chunk.byteLength; - const transferredBuffer = TransferArrayBuffer(buffer); - if (controller._pendingPullIntos.length > 0) { - const firstPendingPullInto = controller._pendingPullIntos.peek(); - if (IsDetachedBuffer(firstPendingPullInto.buffer)) ; - firstPendingPullInto.buffer = TransferArrayBuffer(firstPendingPullInto.buffer); - } - ReadableByteStreamControllerInvalidateBYOBRequest(controller); - if (ReadableStreamHasDefaultReader(stream)) { - if (ReadableStreamGetNumReadRequests(stream) === 0) { - ReadableByteStreamControllerEnqueueChunkToQueue(controller, transferredBuffer, byteOffset, byteLength); - } - else { - if (controller._pendingPullIntos.length > 0) { - ReadableByteStreamControllerShiftPendingPullInto(controller); - } - const transferredView = new Uint8Array(transferredBuffer, byteOffset, byteLength); - ReadableStreamFulfillReadRequest(stream, transferredView, false); - } - } - else if (ReadableStreamHasBYOBReader(stream)) { - // TODO: Ideally in this branch detaching should happen only if the buffer is not consumed fully. - ReadableByteStreamControllerEnqueueChunkToQueue(controller, transferredBuffer, byteOffset, byteLength); - ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller); - } - else { - ReadableByteStreamControllerEnqueueChunkToQueue(controller, transferredBuffer, byteOffset, byteLength); - } - ReadableByteStreamControllerCallPullIfNeeded(controller); - } - function ReadableByteStreamControllerError(controller, e) { - const stream = controller._controlledReadableByteStream; - if (stream._state !== 'readable') { - return; - } - ReadableByteStreamControllerClearPendingPullIntos(controller); - ResetQueue(controller); - ReadableByteStreamControllerClearAlgorithms(controller); - ReadableStreamError(stream, e); - } - function ReadableByteStreamControllerGetBYOBRequest(controller) { - if (controller._byobRequest === null && controller._pendingPullIntos.length > 0) { - const firstDescriptor = controller._pendingPullIntos.peek(); - const view = new Uint8Array(firstDescriptor.buffer, firstDescriptor.byteOffset + firstDescriptor.bytesFilled, firstDescriptor.byteLength - firstDescriptor.bytesFilled); - const byobRequest = Object.create(ReadableStreamBYOBRequest.prototype); - SetUpReadableStreamBYOBRequest(byobRequest, controller, view); - controller._byobRequest = byobRequest; - } - return controller._byobRequest; - } - function ReadableByteStreamControllerGetDesiredSize(controller) { - const state = controller._controlledReadableByteStream._state; - if (state === 'errored') { - return null; - } - if (state === 'closed') { - return 0; - } - return controller._strategyHWM - controller._queueTotalSize; - } - function ReadableByteStreamControllerRespond(controller, bytesWritten) { - const firstDescriptor = controller._pendingPullIntos.peek(); - const state = controller._controlledReadableByteStream._state; - if (state === 'closed') { - if (bytesWritten !== 0) { - throw new TypeError('bytesWritten must be 0 when calling respond() on a closed stream'); - } - } - else { - if (bytesWritten === 0) { - throw new TypeError('bytesWritten must be greater than 0 when calling respond() on a readable stream'); - } - if (firstDescriptor.bytesFilled + bytesWritten > firstDescriptor.byteLength) { - throw new RangeError('bytesWritten out of range'); - } - } - firstDescriptor.buffer = TransferArrayBuffer(firstDescriptor.buffer); - ReadableByteStreamControllerRespondInternal(controller, bytesWritten); - } - function ReadableByteStreamControllerRespondWithNewView(controller, view) { - const firstDescriptor = controller._pendingPullIntos.peek(); - const state = controller._controlledReadableByteStream._state; - if (state === 'closed') { - if (view.byteLength !== 0) { - throw new TypeError('The view\'s length must be 0 when calling respondWithNewView() on a closed stream'); - } - } - else { - if (view.byteLength === 0) { - throw new TypeError('The view\'s length must be greater than 0 when calling respondWithNewView() on a readable stream'); - } - } - if (firstDescriptor.byteOffset + firstDescriptor.bytesFilled !== view.byteOffset) { - throw new RangeError('The region specified by view does not match byobRequest'); - } - if (firstDescriptor.bufferByteLength !== view.buffer.byteLength) { - throw new RangeError('The buffer of view has different capacity than byobRequest'); - } - if (firstDescriptor.bytesFilled + view.byteLength > firstDescriptor.byteLength) { - throw new RangeError('The region specified by view is larger than byobRequest'); - } - const viewByteLength = view.byteLength; - firstDescriptor.buffer = TransferArrayBuffer(view.buffer); - ReadableByteStreamControllerRespondInternal(controller, viewByteLength); - } - function SetUpReadableByteStreamController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, autoAllocateChunkSize) { - controller._controlledReadableByteStream = stream; - controller._pullAgain = false; - controller._pulling = false; - controller._byobRequest = null; - // Need to set the slots so that the assert doesn't fire. In the spec the slots already exist implicitly. - controller._queue = controller._queueTotalSize = undefined; - ResetQueue(controller); - controller._closeRequested = false; - controller._started = false; - controller._strategyHWM = highWaterMark; - controller._pullAlgorithm = pullAlgorithm; - controller._cancelAlgorithm = cancelAlgorithm; - controller._autoAllocateChunkSize = autoAllocateChunkSize; - controller._pendingPullIntos = new SimpleQueue(); - stream._readableStreamController = controller; - const startResult = startAlgorithm(); - uponPromise(promiseResolvedWith(startResult), () => { - controller._started = true; - ReadableByteStreamControllerCallPullIfNeeded(controller); - }, r => { - ReadableByteStreamControllerError(controller, r); - }); - } - function SetUpReadableByteStreamControllerFromUnderlyingSource(stream, underlyingByteSource, highWaterMark) { - const controller = Object.create(ReadableByteStreamController.prototype); - let startAlgorithm = () => undefined; - let pullAlgorithm = () => promiseResolvedWith(undefined); - let cancelAlgorithm = () => promiseResolvedWith(undefined); - if (underlyingByteSource.start !== undefined) { - startAlgorithm = () => underlyingByteSource.start(controller); - } - if (underlyingByteSource.pull !== undefined) { - pullAlgorithm = () => underlyingByteSource.pull(controller); - } - if (underlyingByteSource.cancel !== undefined) { - cancelAlgorithm = reason => underlyingByteSource.cancel(reason); - } - const autoAllocateChunkSize = underlyingByteSource.autoAllocateChunkSize; - if (autoAllocateChunkSize === 0) { - throw new TypeError('autoAllocateChunkSize must be greater than 0'); - } - SetUpReadableByteStreamController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, autoAllocateChunkSize); - } - function SetUpReadableStreamBYOBRequest(request, controller, view) { - request._associatedReadableByteStreamController = controller; - request._view = view; - } - // Helper functions for the ReadableStreamBYOBRequest. - function byobRequestBrandCheckException(name) { - return new TypeError(`ReadableStreamBYOBRequest.prototype.${name} can only be used on a ReadableStreamBYOBRequest`); - } - // Helper functions for the ReadableByteStreamController. - function byteStreamControllerBrandCheckException(name) { - return new TypeError(`ReadableByteStreamController.prototype.${name} can only be used on a ReadableByteStreamController`); - } - - // Abstract operations for the ReadableStream. - function AcquireReadableStreamBYOBReader(stream) { - return new ReadableStreamBYOBReader(stream); - } - // ReadableStream API exposed for controllers. - function ReadableStreamAddReadIntoRequest(stream, readIntoRequest) { - stream._reader._readIntoRequests.push(readIntoRequest); - } - function ReadableStreamFulfillReadIntoRequest(stream, chunk, done) { - const reader = stream._reader; - const readIntoRequest = reader._readIntoRequests.shift(); - if (done) { - readIntoRequest._closeSteps(chunk); - } - else { - readIntoRequest._chunkSteps(chunk); - } - } - function ReadableStreamGetNumReadIntoRequests(stream) { - return stream._reader._readIntoRequests.length; - } - function ReadableStreamHasBYOBReader(stream) { - const reader = stream._reader; - if (reader === undefined) { - return false; - } - if (!IsReadableStreamBYOBReader(reader)) { - return false; - } - return true; - } - /** - * A BYOB reader vended by a {@link ReadableStream}. - * - * @public - */ - class ReadableStreamBYOBReader { - constructor(stream) { - assertRequiredArgument(stream, 1, 'ReadableStreamBYOBReader'); - assertReadableStream(stream, 'First parameter'); - if (IsReadableStreamLocked(stream)) { - throw new TypeError('This stream has already been locked for exclusive reading by another reader'); - } - if (!IsReadableByteStreamController(stream._readableStreamController)) { - throw new TypeError('Cannot construct a ReadableStreamBYOBReader for a stream not constructed with a byte ' + - 'source'); - } - ReadableStreamReaderGenericInitialize(this, stream); - this._readIntoRequests = new SimpleQueue(); - } - /** - * Returns a promise that will be fulfilled when the stream becomes closed, or rejected if the stream ever errors or - * the reader's lock is released before the stream finishes closing. - */ - get closed() { - if (!IsReadableStreamBYOBReader(this)) { - return promiseRejectedWith(byobReaderBrandCheckException('closed')); - } - return this._closedPromise; - } - /** - * If the reader is active, behaves the same as {@link ReadableStream.cancel | stream.cancel(reason)}. - */ - cancel(reason = undefined) { - if (!IsReadableStreamBYOBReader(this)) { - return promiseRejectedWith(byobReaderBrandCheckException('cancel')); - } - if (this._ownerReadableStream === undefined) { - return promiseRejectedWith(readerLockException('cancel')); - } - return ReadableStreamReaderGenericCancel(this, reason); - } - /** - * Attempts to reads bytes into view, and returns a promise resolved with the result. - * - * If reading a chunk causes the queue to become empty, more data will be pulled from the underlying source. - */ - read(view) { - if (!IsReadableStreamBYOBReader(this)) { - return promiseRejectedWith(byobReaderBrandCheckException('read')); - } - if (!ArrayBuffer.isView(view)) { - return promiseRejectedWith(new TypeError('view must be an array buffer view')); - } - if (view.byteLength === 0) { - return promiseRejectedWith(new TypeError('view must have non-zero byteLength')); - } - if (view.buffer.byteLength === 0) { - return promiseRejectedWith(new TypeError(`view's buffer must have non-zero byteLength`)); - } - if (IsDetachedBuffer(view.buffer)) ; - if (this._ownerReadableStream === undefined) { - return promiseRejectedWith(readerLockException('read from')); - } - let resolvePromise; - let rejectPromise; - const promise = newPromise((resolve, reject) => { - resolvePromise = resolve; - rejectPromise = reject; - }); - const readIntoRequest = { - _chunkSteps: chunk => resolvePromise({ value: chunk, done: false }), - _closeSteps: chunk => resolvePromise({ value: chunk, done: true }), - _errorSteps: e => rejectPromise(e) - }; - ReadableStreamBYOBReaderRead(this, view, readIntoRequest); - return promise; - } - /** - * Releases the reader's lock on the corresponding stream. After the lock is released, the reader is no longer active. - * If the associated stream is errored when the lock is released, the reader will appear errored in the same way - * from now on; otherwise, the reader will appear closed. - * - * A reader's lock cannot be released while it still has a pending read request, i.e., if a promise returned by - * the reader's {@link ReadableStreamBYOBReader.read | read()} method has not yet been settled. Attempting to - * do so will throw a `TypeError` and leave the reader locked to the stream. - */ - releaseLock() { - if (!IsReadableStreamBYOBReader(this)) { - throw byobReaderBrandCheckException('releaseLock'); - } - if (this._ownerReadableStream === undefined) { - return; - } - if (this._readIntoRequests.length > 0) { - throw new TypeError('Tried to release a reader lock when that reader has pending read() calls un-settled'); - } - ReadableStreamReaderGenericRelease(this); - } - } - Object.defineProperties(ReadableStreamBYOBReader.prototype, { - cancel: { enumerable: true }, - read: { enumerable: true }, - releaseLock: { enumerable: true }, - closed: { enumerable: true } - }); - if (typeof SymbolPolyfill.toStringTag === 'symbol') { - Object.defineProperty(ReadableStreamBYOBReader.prototype, SymbolPolyfill.toStringTag, { - value: 'ReadableStreamBYOBReader', - configurable: true - }); - } - // Abstract operations for the readers. - function IsReadableStreamBYOBReader(x) { - if (!typeIsObject(x)) { - return false; - } - if (!Object.prototype.hasOwnProperty.call(x, '_readIntoRequests')) { - return false; - } - return x instanceof ReadableStreamBYOBReader; - } - function ReadableStreamBYOBReaderRead(reader, view, readIntoRequest) { - const stream = reader._ownerReadableStream; - stream._disturbed = true; - if (stream._state === 'errored') { - readIntoRequest._errorSteps(stream._storedError); - } - else { - ReadableByteStreamControllerPullInto(stream._readableStreamController, view, readIntoRequest); - } - } - // Helper functions for the ReadableStreamBYOBReader. - function byobReaderBrandCheckException(name) { - return new TypeError(`ReadableStreamBYOBReader.prototype.${name} can only be used on a ReadableStreamBYOBReader`); - } - - function ExtractHighWaterMark(strategy, defaultHWM) { - const { highWaterMark } = strategy; - if (highWaterMark === undefined) { - return defaultHWM; - } - if (NumberIsNaN(highWaterMark) || highWaterMark < 0) { - throw new RangeError('Invalid highWaterMark'); - } - return highWaterMark; - } - function ExtractSizeAlgorithm(strategy) { - const { size } = strategy; - if (!size) { - return () => 1; - } - return size; - } - - function convertQueuingStrategy(init, context) { - assertDictionary(init, context); - const highWaterMark = init === null || init === void 0 ? void 0 : init.highWaterMark; - const size = init === null || init === void 0 ? void 0 : init.size; - return { - highWaterMark: highWaterMark === undefined ? undefined : convertUnrestrictedDouble(highWaterMark), - size: size === undefined ? undefined : convertQueuingStrategySize(size, `${context} has member 'size' that`) - }; - } - function convertQueuingStrategySize(fn, context) { - assertFunction(fn, context); - return chunk => convertUnrestrictedDouble(fn(chunk)); - } - - function convertUnderlyingSink(original, context) { - assertDictionary(original, context); - const abort = original === null || original === void 0 ? void 0 : original.abort; - const close = original === null || original === void 0 ? void 0 : original.close; - const start = original === null || original === void 0 ? void 0 : original.start; - const type = original === null || original === void 0 ? void 0 : original.type; - const write = original === null || original === void 0 ? void 0 : original.write; - return { - abort: abort === undefined ? - undefined : - convertUnderlyingSinkAbortCallback(abort, original, `${context} has member 'abort' that`), - close: close === undefined ? - undefined : - convertUnderlyingSinkCloseCallback(close, original, `${context} has member 'close' that`), - start: start === undefined ? - undefined : - convertUnderlyingSinkStartCallback(start, original, `${context} has member 'start' that`), - write: write === undefined ? - undefined : - convertUnderlyingSinkWriteCallback(write, original, `${context} has member 'write' that`), - type - }; - } - function convertUnderlyingSinkAbortCallback(fn, original, context) { - assertFunction(fn, context); - return (reason) => promiseCall(fn, original, [reason]); - } - function convertUnderlyingSinkCloseCallback(fn, original, context) { - assertFunction(fn, context); - return () => promiseCall(fn, original, []); - } - function convertUnderlyingSinkStartCallback(fn, original, context) { - assertFunction(fn, context); - return (controller) => reflectCall(fn, original, [controller]); - } - function convertUnderlyingSinkWriteCallback(fn, original, context) { - assertFunction(fn, context); - return (chunk, controller) => promiseCall(fn, original, [chunk, controller]); - } - - function assertWritableStream(x, context) { - if (!IsWritableStream(x)) { - throw new TypeError(`${context} is not a WritableStream.`); - } - } - - function isAbortSignal(value) { - if (typeof value !== 'object' || value === null) { - return false; - } - try { - return typeof value.aborted === 'boolean'; - } - catch (_a) { - // AbortSignal.prototype.aborted throws if its brand check fails - return false; - } - } - const supportsAbortController = typeof AbortController === 'function'; - /** - * Construct a new AbortController, if supported by the platform. - * - * @internal - */ - function createAbortController() { - if (supportsAbortController) { - return new AbortController(); - } - return undefined; - } - - /** - * A writable stream represents a destination for data, into which you can write. - * - * @public - */ - class WritableStream { - constructor(rawUnderlyingSink = {}, rawStrategy = {}) { - if (rawUnderlyingSink === undefined) { - rawUnderlyingSink = null; - } - else { - assertObject(rawUnderlyingSink, 'First parameter'); - } - const strategy = convertQueuingStrategy(rawStrategy, 'Second parameter'); - const underlyingSink = convertUnderlyingSink(rawUnderlyingSink, 'First parameter'); - InitializeWritableStream(this); - const type = underlyingSink.type; - if (type !== undefined) { - throw new RangeError('Invalid type is specified'); - } - const sizeAlgorithm = ExtractSizeAlgorithm(strategy); - const highWaterMark = ExtractHighWaterMark(strategy, 1); - SetUpWritableStreamDefaultControllerFromUnderlyingSink(this, underlyingSink, highWaterMark, sizeAlgorithm); - } - /** - * Returns whether or not the writable stream is locked to a writer. - */ - get locked() { - if (!IsWritableStream(this)) { - throw streamBrandCheckException$2('locked'); - } - return IsWritableStreamLocked(this); - } - /** - * Aborts the stream, signaling that the producer can no longer successfully write to the stream and it is to be - * immediately moved to an errored state, with any queued-up writes discarded. This will also execute any abort - * mechanism of the underlying sink. - * - * The returned promise will fulfill if the stream shuts down successfully, or reject if the underlying sink signaled - * that there was an error doing so. Additionally, it will reject with a `TypeError` (without attempting to cancel - * the stream) if the stream is currently locked. - */ - abort(reason = undefined) { - if (!IsWritableStream(this)) { - return promiseRejectedWith(streamBrandCheckException$2('abort')); - } - if (IsWritableStreamLocked(this)) { - return promiseRejectedWith(new TypeError('Cannot abort a stream that already has a writer')); - } - return WritableStreamAbort(this, reason); - } - /** - * Closes the stream. The underlying sink will finish processing any previously-written chunks, before invoking its - * close behavior. During this time any further attempts to write will fail (without erroring the stream). - * - * The method returns a promise that will fulfill if all remaining chunks are successfully written and the stream - * successfully closes, or rejects if an error is encountered during this process. Additionally, it will reject with - * a `TypeError` (without attempting to cancel the stream) if the stream is currently locked. - */ - close() { - if (!IsWritableStream(this)) { - return promiseRejectedWith(streamBrandCheckException$2('close')); - } - if (IsWritableStreamLocked(this)) { - return promiseRejectedWith(new TypeError('Cannot close a stream that already has a writer')); - } - if (WritableStreamCloseQueuedOrInFlight(this)) { - return promiseRejectedWith(new TypeError('Cannot close an already-closing stream')); - } - return WritableStreamClose(this); - } - /** - * Creates a {@link WritableStreamDefaultWriter | writer} and locks the stream to the new writer. While the stream - * is locked, no other writer can be acquired until this one is released. - * - * This functionality is especially useful for creating abstractions that desire the ability to write to a stream - * without interruption or interleaving. By getting a writer for the stream, you can ensure nobody else can write at - * the same time, which would cause the resulting written data to be unpredictable and probably useless. - */ - getWriter() { - if (!IsWritableStream(this)) { - throw streamBrandCheckException$2('getWriter'); - } - return AcquireWritableStreamDefaultWriter(this); - } - } - Object.defineProperties(WritableStream.prototype, { - abort: { enumerable: true }, - close: { enumerable: true }, - getWriter: { enumerable: true }, - locked: { enumerable: true } - }); - if (typeof SymbolPolyfill.toStringTag === 'symbol') { - Object.defineProperty(WritableStream.prototype, SymbolPolyfill.toStringTag, { - value: 'WritableStream', - configurable: true - }); - } - // Abstract operations for the WritableStream. - function AcquireWritableStreamDefaultWriter(stream) { - return new WritableStreamDefaultWriter(stream); - } - // Throws if and only if startAlgorithm throws. - function CreateWritableStream(startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, highWaterMark = 1, sizeAlgorithm = () => 1) { - const stream = Object.create(WritableStream.prototype); - InitializeWritableStream(stream); - const controller = Object.create(WritableStreamDefaultController.prototype); - SetUpWritableStreamDefaultController(stream, controller, startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, highWaterMark, sizeAlgorithm); - return stream; - } - function InitializeWritableStream(stream) { - stream._state = 'writable'; - // The error that will be reported by new method calls once the state becomes errored. Only set when [[state]] is - // 'erroring' or 'errored'. May be set to an undefined value. - stream._storedError = undefined; - stream._writer = undefined; - // Initialize to undefined first because the constructor of the controller checks this - // variable to validate the caller. - stream._writableStreamController = undefined; - // This queue is placed here instead of the writer class in order to allow for passing a writer to the next data - // producer without waiting for the queued writes to finish. - stream._writeRequests = new SimpleQueue(); - // Write requests are removed from _writeRequests when write() is called on the underlying sink. This prevents - // them from being erroneously rejected on error. If a write() call is in-flight, the request is stored here. - stream._inFlightWriteRequest = undefined; - // The promise that was returned from writer.close(). Stored here because it may be fulfilled after the writer - // has been detached. - stream._closeRequest = undefined; - // Close request is removed from _closeRequest when close() is called on the underlying sink. This prevents it - // from being erroneously rejected on error. If a close() call is in-flight, the request is stored here. - stream._inFlightCloseRequest = undefined; - // The promise that was returned from writer.abort(). This may also be fulfilled after the writer has detached. - stream._pendingAbortRequest = undefined; - // The backpressure signal set by the controller. - stream._backpressure = false; - } - function IsWritableStream(x) { - if (!typeIsObject(x)) { - return false; - } - if (!Object.prototype.hasOwnProperty.call(x, '_writableStreamController')) { - return false; - } - return x instanceof WritableStream; - } - function IsWritableStreamLocked(stream) { - if (stream._writer === undefined) { - return false; - } - return true; - } - function WritableStreamAbort(stream, reason) { - var _a; - if (stream._state === 'closed' || stream._state === 'errored') { - return promiseResolvedWith(undefined); - } - stream._writableStreamController._abortReason = reason; - (_a = stream._writableStreamController._abortController) === null || _a === void 0 ? void 0 : _a.abort(); - // TypeScript narrows the type of `stream._state` down to 'writable' | 'erroring', - // but it doesn't know that signaling abort runs author code that might have changed the state. - // Widen the type again by casting to WritableStreamState. - const state = stream._state; - if (state === 'closed' || state === 'errored') { - return promiseResolvedWith(undefined); - } - if (stream._pendingAbortRequest !== undefined) { - return stream._pendingAbortRequest._promise; - } - let wasAlreadyErroring = false; - if (state === 'erroring') { - wasAlreadyErroring = true; - // reason will not be used, so don't keep a reference to it. - reason = undefined; - } - const promise = newPromise((resolve, reject) => { - stream._pendingAbortRequest = { - _promise: undefined, - _resolve: resolve, - _reject: reject, - _reason: reason, - _wasAlreadyErroring: wasAlreadyErroring - }; - }); - stream._pendingAbortRequest._promise = promise; - if (!wasAlreadyErroring) { - WritableStreamStartErroring(stream, reason); - } - return promise; - } - function WritableStreamClose(stream) { - const state = stream._state; - if (state === 'closed' || state === 'errored') { - return promiseRejectedWith(new TypeError(`The stream (in ${state} state) is not in the writable state and cannot be closed`)); - } - const promise = newPromise((resolve, reject) => { - const closeRequest = { - _resolve: resolve, - _reject: reject - }; - stream._closeRequest = closeRequest; - }); - const writer = stream._writer; - if (writer !== undefined && stream._backpressure && state === 'writable') { - defaultWriterReadyPromiseResolve(writer); - } - WritableStreamDefaultControllerClose(stream._writableStreamController); - return promise; - } - // WritableStream API exposed for controllers. - function WritableStreamAddWriteRequest(stream) { - const promise = newPromise((resolve, reject) => { - const writeRequest = { - _resolve: resolve, - _reject: reject - }; - stream._writeRequests.push(writeRequest); - }); - return promise; - } - function WritableStreamDealWithRejection(stream, error) { - const state = stream._state; - if (state === 'writable') { - WritableStreamStartErroring(stream, error); - return; - } - WritableStreamFinishErroring(stream); - } - function WritableStreamStartErroring(stream, reason) { - const controller = stream._writableStreamController; - stream._state = 'erroring'; - stream._storedError = reason; - const writer = stream._writer; - if (writer !== undefined) { - WritableStreamDefaultWriterEnsureReadyPromiseRejected(writer, reason); - } - if (!WritableStreamHasOperationMarkedInFlight(stream) && controller._started) { - WritableStreamFinishErroring(stream); - } - } - function WritableStreamFinishErroring(stream) { - stream._state = 'errored'; - stream._writableStreamController[ErrorSteps](); - const storedError = stream._storedError; - stream._writeRequests.forEach(writeRequest => { - writeRequest._reject(storedError); - }); - stream._writeRequests = new SimpleQueue(); - if (stream._pendingAbortRequest === undefined) { - WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream); - return; - } - const abortRequest = stream._pendingAbortRequest; - stream._pendingAbortRequest = undefined; - if (abortRequest._wasAlreadyErroring) { - abortRequest._reject(storedError); - WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream); - return; - } - const promise = stream._writableStreamController[AbortSteps](abortRequest._reason); - uponPromise(promise, () => { - abortRequest._resolve(); - WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream); - }, (reason) => { - abortRequest._reject(reason); - WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream); - }); - } - function WritableStreamFinishInFlightWrite(stream) { - stream._inFlightWriteRequest._resolve(undefined); - stream._inFlightWriteRequest = undefined; - } - function WritableStreamFinishInFlightWriteWithError(stream, error) { - stream._inFlightWriteRequest._reject(error); - stream._inFlightWriteRequest = undefined; - WritableStreamDealWithRejection(stream, error); - } - function WritableStreamFinishInFlightClose(stream) { - stream._inFlightCloseRequest._resolve(undefined); - stream._inFlightCloseRequest = undefined; - const state = stream._state; - if (state === 'erroring') { - // The error was too late to do anything, so it is ignored. - stream._storedError = undefined; - if (stream._pendingAbortRequest !== undefined) { - stream._pendingAbortRequest._resolve(); - stream._pendingAbortRequest = undefined; - } - } - stream._state = 'closed'; - const writer = stream._writer; - if (writer !== undefined) { - defaultWriterClosedPromiseResolve(writer); - } - } - function WritableStreamFinishInFlightCloseWithError(stream, error) { - stream._inFlightCloseRequest._reject(error); - stream._inFlightCloseRequest = undefined; - // Never execute sink abort() after sink close(). - if (stream._pendingAbortRequest !== undefined) { - stream._pendingAbortRequest._reject(error); - stream._pendingAbortRequest = undefined; - } - WritableStreamDealWithRejection(stream, error); - } - // TODO(ricea): Fix alphabetical order. - function WritableStreamCloseQueuedOrInFlight(stream) { - if (stream._closeRequest === undefined && stream._inFlightCloseRequest === undefined) { - return false; - } - return true; - } - function WritableStreamHasOperationMarkedInFlight(stream) { - if (stream._inFlightWriteRequest === undefined && stream._inFlightCloseRequest === undefined) { - return false; - } - return true; - } - function WritableStreamMarkCloseRequestInFlight(stream) { - stream._inFlightCloseRequest = stream._closeRequest; - stream._closeRequest = undefined; - } - function WritableStreamMarkFirstWriteRequestInFlight(stream) { - stream._inFlightWriteRequest = stream._writeRequests.shift(); - } - function WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream) { - if (stream._closeRequest !== undefined) { - stream._closeRequest._reject(stream._storedError); - stream._closeRequest = undefined; - } - const writer = stream._writer; - if (writer !== undefined) { - defaultWriterClosedPromiseReject(writer, stream._storedError); - } - } - function WritableStreamUpdateBackpressure(stream, backpressure) { - const writer = stream._writer; - if (writer !== undefined && backpressure !== stream._backpressure) { - if (backpressure) { - defaultWriterReadyPromiseReset(writer); - } - else { - defaultWriterReadyPromiseResolve(writer); - } - } - stream._backpressure = backpressure; - } - /** - * A default writer vended by a {@link WritableStream}. - * - * @public - */ - class WritableStreamDefaultWriter { - constructor(stream) { - assertRequiredArgument(stream, 1, 'WritableStreamDefaultWriter'); - assertWritableStream(stream, 'First parameter'); - if (IsWritableStreamLocked(stream)) { - throw new TypeError('This stream has already been locked for exclusive writing by another writer'); - } - this._ownerWritableStream = stream; - stream._writer = this; - const state = stream._state; - if (state === 'writable') { - if (!WritableStreamCloseQueuedOrInFlight(stream) && stream._backpressure) { - defaultWriterReadyPromiseInitialize(this); - } - else { - defaultWriterReadyPromiseInitializeAsResolved(this); - } - defaultWriterClosedPromiseInitialize(this); - } - else if (state === 'erroring') { - defaultWriterReadyPromiseInitializeAsRejected(this, stream._storedError); - defaultWriterClosedPromiseInitialize(this); - } - else if (state === 'closed') { - defaultWriterReadyPromiseInitializeAsResolved(this); - defaultWriterClosedPromiseInitializeAsResolved(this); - } - else { - const storedError = stream._storedError; - defaultWriterReadyPromiseInitializeAsRejected(this, storedError); - defaultWriterClosedPromiseInitializeAsRejected(this, storedError); - } - } - /** - * Returns a promise that will be fulfilled when the stream becomes closed, or rejected if the stream ever errors or - * the writer’s lock is released before the stream finishes closing. - */ - get closed() { - if (!IsWritableStreamDefaultWriter(this)) { - return promiseRejectedWith(defaultWriterBrandCheckException('closed')); - } - return this._closedPromise; - } - /** - * Returns the desired size to fill the stream’s internal queue. It can be negative, if the queue is over-full. - * A producer can use this information to determine the right amount of data to write. - * - * It will be `null` if the stream cannot be successfully written to (due to either being errored, or having an abort - * queued up). It will return zero if the stream is closed. And the getter will throw an exception if invoked when - * the writer’s lock is released. - */ - get desiredSize() { - if (!IsWritableStreamDefaultWriter(this)) { - throw defaultWriterBrandCheckException('desiredSize'); - } - if (this._ownerWritableStream === undefined) { - throw defaultWriterLockException('desiredSize'); - } - return WritableStreamDefaultWriterGetDesiredSize(this); - } - /** - * Returns a promise that will be fulfilled when the desired size to fill the stream’s internal queue transitions - * from non-positive to positive, signaling that it is no longer applying backpressure. Once the desired size dips - * back to zero or below, the getter will return a new promise that stays pending until the next transition. - * - * If the stream becomes errored or aborted, or the writer’s lock is released, the returned promise will become - * rejected. - */ - get ready() { - if (!IsWritableStreamDefaultWriter(this)) { - return promiseRejectedWith(defaultWriterBrandCheckException('ready')); - } - return this._readyPromise; - } - /** - * If the reader is active, behaves the same as {@link WritableStream.abort | stream.abort(reason)}. - */ - abort(reason = undefined) { - if (!IsWritableStreamDefaultWriter(this)) { - return promiseRejectedWith(defaultWriterBrandCheckException('abort')); - } - if (this._ownerWritableStream === undefined) { - return promiseRejectedWith(defaultWriterLockException('abort')); - } - return WritableStreamDefaultWriterAbort(this, reason); - } - /** - * If the reader is active, behaves the same as {@link WritableStream.close | stream.close()}. - */ - close() { - if (!IsWritableStreamDefaultWriter(this)) { - return promiseRejectedWith(defaultWriterBrandCheckException('close')); - } - const stream = this._ownerWritableStream; - if (stream === undefined) { - return promiseRejectedWith(defaultWriterLockException('close')); - } - if (WritableStreamCloseQueuedOrInFlight(stream)) { - return promiseRejectedWith(new TypeError('Cannot close an already-closing stream')); - } - return WritableStreamDefaultWriterClose(this); - } - /** - * Releases the writer’s lock on the corresponding stream. After the lock is released, the writer is no longer active. - * If the associated stream is errored when the lock is released, the writer will appear errored in the same way from - * now on; otherwise, the writer will appear closed. - * - * Note that the lock can still be released even if some ongoing writes have not yet finished (i.e. even if the - * promises returned from previous calls to {@link WritableStreamDefaultWriter.write | write()} have not yet settled). - * It’s not necessary to hold the lock on the writer for the duration of the write; the lock instead simply prevents - * other producers from writing in an interleaved manner. - */ - releaseLock() { - if (!IsWritableStreamDefaultWriter(this)) { - throw defaultWriterBrandCheckException('releaseLock'); - } - const stream = this._ownerWritableStream; - if (stream === undefined) { - return; - } - WritableStreamDefaultWriterRelease(this); - } - write(chunk = undefined) { - if (!IsWritableStreamDefaultWriter(this)) { - return promiseRejectedWith(defaultWriterBrandCheckException('write')); - } - if (this._ownerWritableStream === undefined) { - return promiseRejectedWith(defaultWriterLockException('write to')); - } - return WritableStreamDefaultWriterWrite(this, chunk); - } - } - Object.defineProperties(WritableStreamDefaultWriter.prototype, { - abort: { enumerable: true }, - close: { enumerable: true }, - releaseLock: { enumerable: true }, - write: { enumerable: true }, - closed: { enumerable: true }, - desiredSize: { enumerable: true }, - ready: { enumerable: true } - }); - if (typeof SymbolPolyfill.toStringTag === 'symbol') { - Object.defineProperty(WritableStreamDefaultWriter.prototype, SymbolPolyfill.toStringTag, { - value: 'WritableStreamDefaultWriter', - configurable: true - }); - } - // Abstract operations for the WritableStreamDefaultWriter. - function IsWritableStreamDefaultWriter(x) { - if (!typeIsObject(x)) { - return false; - } - if (!Object.prototype.hasOwnProperty.call(x, '_ownerWritableStream')) { - return false; - } - return x instanceof WritableStreamDefaultWriter; - } - // A client of WritableStreamDefaultWriter may use these functions directly to bypass state check. - function WritableStreamDefaultWriterAbort(writer, reason) { - const stream = writer._ownerWritableStream; - return WritableStreamAbort(stream, reason); - } - function WritableStreamDefaultWriterClose(writer) { - const stream = writer._ownerWritableStream; - return WritableStreamClose(stream); - } - function WritableStreamDefaultWriterCloseWithErrorPropagation(writer) { - const stream = writer._ownerWritableStream; - const state = stream._state; - if (WritableStreamCloseQueuedOrInFlight(stream) || state === 'closed') { - return promiseResolvedWith(undefined); - } - if (state === 'errored') { - return promiseRejectedWith(stream._storedError); - } - return WritableStreamDefaultWriterClose(writer); - } - function WritableStreamDefaultWriterEnsureClosedPromiseRejected(writer, error) { - if (writer._closedPromiseState === 'pending') { - defaultWriterClosedPromiseReject(writer, error); - } - else { - defaultWriterClosedPromiseResetToRejected(writer, error); - } - } - function WritableStreamDefaultWriterEnsureReadyPromiseRejected(writer, error) { - if (writer._readyPromiseState === 'pending') { - defaultWriterReadyPromiseReject(writer, error); - } - else { - defaultWriterReadyPromiseResetToRejected(writer, error); - } - } - function WritableStreamDefaultWriterGetDesiredSize(writer) { - const stream = writer._ownerWritableStream; - const state = stream._state; - if (state === 'errored' || state === 'erroring') { - return null; - } - if (state === 'closed') { - return 0; - } - return WritableStreamDefaultControllerGetDesiredSize(stream._writableStreamController); - } - function WritableStreamDefaultWriterRelease(writer) { - const stream = writer._ownerWritableStream; - const releasedError = new TypeError(`Writer was released and can no longer be used to monitor the stream's closedness`); - WritableStreamDefaultWriterEnsureReadyPromiseRejected(writer, releasedError); - // The state transitions to "errored" before the sink abort() method runs, but the writer.closed promise is not - // rejected until afterwards. This means that simply testing state will not work. - WritableStreamDefaultWriterEnsureClosedPromiseRejected(writer, releasedError); - stream._writer = undefined; - writer._ownerWritableStream = undefined; - } - function WritableStreamDefaultWriterWrite(writer, chunk) { - const stream = writer._ownerWritableStream; - const controller = stream._writableStreamController; - const chunkSize = WritableStreamDefaultControllerGetChunkSize(controller, chunk); - if (stream !== writer._ownerWritableStream) { - return promiseRejectedWith(defaultWriterLockException('write to')); - } - const state = stream._state; - if (state === 'errored') { - return promiseRejectedWith(stream._storedError); - } - if (WritableStreamCloseQueuedOrInFlight(stream) || state === 'closed') { - return promiseRejectedWith(new TypeError('The stream is closing or closed and cannot be written to')); - } - if (state === 'erroring') { - return promiseRejectedWith(stream._storedError); - } - const promise = WritableStreamAddWriteRequest(stream); - WritableStreamDefaultControllerWrite(controller, chunk, chunkSize); - return promise; - } - const closeSentinel = {}; - /** - * Allows control of a {@link WritableStream | writable stream}'s state and internal queue. - * - * @public - */ - class WritableStreamDefaultController { - constructor() { - throw new TypeError('Illegal constructor'); - } - /** - * The reason which was passed to `WritableStream.abort(reason)` when the stream was aborted. - * - * @deprecated - * This property has been removed from the specification, see https://github.com/whatwg/streams/pull/1177. - * Use {@link WritableStreamDefaultController.signal}'s `reason` instead. - */ - get abortReason() { - if (!IsWritableStreamDefaultController(this)) { - throw defaultControllerBrandCheckException$2('abortReason'); - } - return this._abortReason; - } - /** - * An `AbortSignal` that can be used to abort the pending write or close operation when the stream is aborted. - */ - get signal() { - if (!IsWritableStreamDefaultController(this)) { - throw defaultControllerBrandCheckException$2('signal'); - } - if (this._abortController === undefined) { - // Older browsers or older Node versions may not support `AbortController` or `AbortSignal`. - // We don't want to bundle and ship an `AbortController` polyfill together with our polyfill, - // so instead we only implement support for `signal` if we find a global `AbortController` constructor. - throw new TypeError('WritableStreamDefaultController.prototype.signal is not supported'); - } - return this._abortController.signal; - } - /** - * Closes the controlled writable stream, making all future interactions with it fail with the given error `e`. - * - * This method is rarely used, since usually it suffices to return a rejected promise from one of the underlying - * sink's methods. However, it can be useful for suddenly shutting down a stream in response to an event outside the - * normal lifecycle of interactions with the underlying sink. - */ - error(e = undefined) { - if (!IsWritableStreamDefaultController(this)) { - throw defaultControllerBrandCheckException$2('error'); - } - const state = this._controlledWritableStream._state; - if (state !== 'writable') { - // The stream is closed, errored or will be soon. The sink can't do anything useful if it gets an error here, so - // just treat it as a no-op. - return; - } - WritableStreamDefaultControllerError(this, e); - } - /** @internal */ - [AbortSteps](reason) { - const result = this._abortAlgorithm(reason); - WritableStreamDefaultControllerClearAlgorithms(this); - return result; - } - /** @internal */ - [ErrorSteps]() { - ResetQueue(this); - } - } - Object.defineProperties(WritableStreamDefaultController.prototype, { - abortReason: { enumerable: true }, - signal: { enumerable: true }, - error: { enumerable: true } - }); - if (typeof SymbolPolyfill.toStringTag === 'symbol') { - Object.defineProperty(WritableStreamDefaultController.prototype, SymbolPolyfill.toStringTag, { - value: 'WritableStreamDefaultController', - configurable: true - }); - } - // Abstract operations implementing interface required by the WritableStream. - function IsWritableStreamDefaultController(x) { - if (!typeIsObject(x)) { - return false; - } - if (!Object.prototype.hasOwnProperty.call(x, '_controlledWritableStream')) { - return false; - } - return x instanceof WritableStreamDefaultController; - } - function SetUpWritableStreamDefaultController(stream, controller, startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, highWaterMark, sizeAlgorithm) { - controller._controlledWritableStream = stream; - stream._writableStreamController = controller; - // Need to set the slots so that the assert doesn't fire. In the spec the slots already exist implicitly. - controller._queue = undefined; - controller._queueTotalSize = undefined; - ResetQueue(controller); - controller._abortReason = undefined; - controller._abortController = createAbortController(); - controller._started = false; - controller._strategySizeAlgorithm = sizeAlgorithm; - controller._strategyHWM = highWaterMark; - controller._writeAlgorithm = writeAlgorithm; - controller._closeAlgorithm = closeAlgorithm; - controller._abortAlgorithm = abortAlgorithm; - const backpressure = WritableStreamDefaultControllerGetBackpressure(controller); - WritableStreamUpdateBackpressure(stream, backpressure); - const startResult = startAlgorithm(); - const startPromise = promiseResolvedWith(startResult); - uponPromise(startPromise, () => { - controller._started = true; - WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller); - }, r => { - controller._started = true; - WritableStreamDealWithRejection(stream, r); - }); - } - function SetUpWritableStreamDefaultControllerFromUnderlyingSink(stream, underlyingSink, highWaterMark, sizeAlgorithm) { - const controller = Object.create(WritableStreamDefaultController.prototype); - let startAlgorithm = () => undefined; - let writeAlgorithm = () => promiseResolvedWith(undefined); - let closeAlgorithm = () => promiseResolvedWith(undefined); - let abortAlgorithm = () => promiseResolvedWith(undefined); - if (underlyingSink.start !== undefined) { - startAlgorithm = () => underlyingSink.start(controller); - } - if (underlyingSink.write !== undefined) { - writeAlgorithm = chunk => underlyingSink.write(chunk, controller); - } - if (underlyingSink.close !== undefined) { - closeAlgorithm = () => underlyingSink.close(); - } - if (underlyingSink.abort !== undefined) { - abortAlgorithm = reason => underlyingSink.abort(reason); - } - SetUpWritableStreamDefaultController(stream, controller, startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, highWaterMark, sizeAlgorithm); - } - // ClearAlgorithms may be called twice. Erroring the same stream in multiple ways will often result in redundant calls. - function WritableStreamDefaultControllerClearAlgorithms(controller) { - controller._writeAlgorithm = undefined; - controller._closeAlgorithm = undefined; - controller._abortAlgorithm = undefined; - controller._strategySizeAlgorithm = undefined; - } - function WritableStreamDefaultControllerClose(controller) { - EnqueueValueWithSize(controller, closeSentinel, 0); - WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller); - } - function WritableStreamDefaultControllerGetChunkSize(controller, chunk) { - try { - return controller._strategySizeAlgorithm(chunk); - } - catch (chunkSizeE) { - WritableStreamDefaultControllerErrorIfNeeded(controller, chunkSizeE); - return 1; - } - } - function WritableStreamDefaultControllerGetDesiredSize(controller) { - return controller._strategyHWM - controller._queueTotalSize; - } - function WritableStreamDefaultControllerWrite(controller, chunk, chunkSize) { - try { - EnqueueValueWithSize(controller, chunk, chunkSize); - } - catch (enqueueE) { - WritableStreamDefaultControllerErrorIfNeeded(controller, enqueueE); - return; - } - const stream = controller._controlledWritableStream; - if (!WritableStreamCloseQueuedOrInFlight(stream) && stream._state === 'writable') { - const backpressure = WritableStreamDefaultControllerGetBackpressure(controller); - WritableStreamUpdateBackpressure(stream, backpressure); - } - WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller); - } - // Abstract operations for the WritableStreamDefaultController. - function WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller) { - const stream = controller._controlledWritableStream; - if (!controller._started) { - return; - } - if (stream._inFlightWriteRequest !== undefined) { - return; - } - const state = stream._state; - if (state === 'erroring') { - WritableStreamFinishErroring(stream); - return; - } - if (controller._queue.length === 0) { - return; - } - const value = PeekQueueValue(controller); - if (value === closeSentinel) { - WritableStreamDefaultControllerProcessClose(controller); - } - else { - WritableStreamDefaultControllerProcessWrite(controller, value); - } - } - function WritableStreamDefaultControllerErrorIfNeeded(controller, error) { - if (controller._controlledWritableStream._state === 'writable') { - WritableStreamDefaultControllerError(controller, error); - } - } - function WritableStreamDefaultControllerProcessClose(controller) { - const stream = controller._controlledWritableStream; - WritableStreamMarkCloseRequestInFlight(stream); - DequeueValue(controller); - const sinkClosePromise = controller._closeAlgorithm(); - WritableStreamDefaultControllerClearAlgorithms(controller); - uponPromise(sinkClosePromise, () => { - WritableStreamFinishInFlightClose(stream); - }, reason => { - WritableStreamFinishInFlightCloseWithError(stream, reason); - }); - } - function WritableStreamDefaultControllerProcessWrite(controller, chunk) { - const stream = controller._controlledWritableStream; - WritableStreamMarkFirstWriteRequestInFlight(stream); - const sinkWritePromise = controller._writeAlgorithm(chunk); - uponPromise(sinkWritePromise, () => { - WritableStreamFinishInFlightWrite(stream); - const state = stream._state; - DequeueValue(controller); - if (!WritableStreamCloseQueuedOrInFlight(stream) && state === 'writable') { - const backpressure = WritableStreamDefaultControllerGetBackpressure(controller); - WritableStreamUpdateBackpressure(stream, backpressure); - } - WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller); - }, reason => { - if (stream._state === 'writable') { - WritableStreamDefaultControllerClearAlgorithms(controller); - } - WritableStreamFinishInFlightWriteWithError(stream, reason); - }); - } - function WritableStreamDefaultControllerGetBackpressure(controller) { - const desiredSize = WritableStreamDefaultControllerGetDesiredSize(controller); - return desiredSize <= 0; - } - // A client of WritableStreamDefaultController may use these functions directly to bypass state check. - function WritableStreamDefaultControllerError(controller, error) { - const stream = controller._controlledWritableStream; - WritableStreamDefaultControllerClearAlgorithms(controller); - WritableStreamStartErroring(stream, error); - } - // Helper functions for the WritableStream. - function streamBrandCheckException$2(name) { - return new TypeError(`WritableStream.prototype.${name} can only be used on a WritableStream`); - } - // Helper functions for the WritableStreamDefaultController. - function defaultControllerBrandCheckException$2(name) { - return new TypeError(`WritableStreamDefaultController.prototype.${name} can only be used on a WritableStreamDefaultController`); - } - // Helper functions for the WritableStreamDefaultWriter. - function defaultWriterBrandCheckException(name) { - return new TypeError(`WritableStreamDefaultWriter.prototype.${name} can only be used on a WritableStreamDefaultWriter`); - } - function defaultWriterLockException(name) { - return new TypeError('Cannot ' + name + ' a stream using a released writer'); - } - function defaultWriterClosedPromiseInitialize(writer) { - writer._closedPromise = newPromise((resolve, reject) => { - writer._closedPromise_resolve = resolve; - writer._closedPromise_reject = reject; - writer._closedPromiseState = 'pending'; - }); - } - function defaultWriterClosedPromiseInitializeAsRejected(writer, reason) { - defaultWriterClosedPromiseInitialize(writer); - defaultWriterClosedPromiseReject(writer, reason); - } - function defaultWriterClosedPromiseInitializeAsResolved(writer) { - defaultWriterClosedPromiseInitialize(writer); - defaultWriterClosedPromiseResolve(writer); - } - function defaultWriterClosedPromiseReject(writer, reason) { - if (writer._closedPromise_reject === undefined) { - return; - } - setPromiseIsHandledToTrue(writer._closedPromise); - writer._closedPromise_reject(reason); - writer._closedPromise_resolve = undefined; - writer._closedPromise_reject = undefined; - writer._closedPromiseState = 'rejected'; - } - function defaultWriterClosedPromiseResetToRejected(writer, reason) { - defaultWriterClosedPromiseInitializeAsRejected(writer, reason); - } - function defaultWriterClosedPromiseResolve(writer) { - if (writer._closedPromise_resolve === undefined) { - return; - } - writer._closedPromise_resolve(undefined); - writer._closedPromise_resolve = undefined; - writer._closedPromise_reject = undefined; - writer._closedPromiseState = 'resolved'; - } - function defaultWriterReadyPromiseInitialize(writer) { - writer._readyPromise = newPromise((resolve, reject) => { - writer._readyPromise_resolve = resolve; - writer._readyPromise_reject = reject; - }); - writer._readyPromiseState = 'pending'; - } - function defaultWriterReadyPromiseInitializeAsRejected(writer, reason) { - defaultWriterReadyPromiseInitialize(writer); - defaultWriterReadyPromiseReject(writer, reason); - } - function defaultWriterReadyPromiseInitializeAsResolved(writer) { - defaultWriterReadyPromiseInitialize(writer); - defaultWriterReadyPromiseResolve(writer); - } - function defaultWriterReadyPromiseReject(writer, reason) { - if (writer._readyPromise_reject === undefined) { - return; - } - setPromiseIsHandledToTrue(writer._readyPromise); - writer._readyPromise_reject(reason); - writer._readyPromise_resolve = undefined; - writer._readyPromise_reject = undefined; - writer._readyPromiseState = 'rejected'; - } - function defaultWriterReadyPromiseReset(writer) { - defaultWriterReadyPromiseInitialize(writer); - } - function defaultWriterReadyPromiseResetToRejected(writer, reason) { - defaultWriterReadyPromiseInitializeAsRejected(writer, reason); - } - function defaultWriterReadyPromiseResolve(writer) { - if (writer._readyPromise_resolve === undefined) { - return; - } - writer._readyPromise_resolve(undefined); - writer._readyPromise_resolve = undefined; - writer._readyPromise_reject = undefined; - writer._readyPromiseState = 'fulfilled'; - } - - /// - const NativeDOMException = typeof DOMException !== 'undefined' ? DOMException : undefined; - - /// - function isDOMExceptionConstructor(ctor) { - if (!(typeof ctor === 'function' || typeof ctor === 'object')) { - return false; - } - try { - new ctor(); - return true; - } - catch (_a) { - return false; - } - } - function createDOMExceptionPolyfill() { - // eslint-disable-next-line no-shadow - const ctor = function DOMException(message, name) { - this.message = message || ''; - this.name = name || 'Error'; - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } - }; - ctor.prototype = Object.create(Error.prototype); - Object.defineProperty(ctor.prototype, 'constructor', { value: ctor, writable: true, configurable: true }); - return ctor; - } - // eslint-disable-next-line no-redeclare - const DOMException$1 = isDOMExceptionConstructor(NativeDOMException) ? NativeDOMException : createDOMExceptionPolyfill(); - - function ReadableStreamPipeTo(source, dest, preventClose, preventAbort, preventCancel, signal) { - const reader = AcquireReadableStreamDefaultReader(source); - const writer = AcquireWritableStreamDefaultWriter(dest); - source._disturbed = true; - let shuttingDown = false; - // This is used to keep track of the spec's requirement that we wait for ongoing writes during shutdown. - let currentWrite = promiseResolvedWith(undefined); - return newPromise((resolve, reject) => { - let abortAlgorithm; - if (signal !== undefined) { - abortAlgorithm = () => { - const error = new DOMException$1('Aborted', 'AbortError'); - const actions = []; - if (!preventAbort) { - actions.push(() => { - if (dest._state === 'writable') { - return WritableStreamAbort(dest, error); - } - return promiseResolvedWith(undefined); - }); - } - if (!preventCancel) { - actions.push(() => { - if (source._state === 'readable') { - return ReadableStreamCancel(source, error); - } - return promiseResolvedWith(undefined); - }); - } - shutdownWithAction(() => Promise.all(actions.map(action => action())), true, error); - }; - if (signal.aborted) { - abortAlgorithm(); - return; - } - signal.addEventListener('abort', abortAlgorithm); - } - // Using reader and writer, read all chunks from this and write them to dest - // - Backpressure must be enforced - // - Shutdown must stop all activity - function pipeLoop() { - return newPromise((resolveLoop, rejectLoop) => { - function next(done) { - if (done) { - resolveLoop(); - } - else { - // Use `PerformPromiseThen` instead of `uponPromise` to avoid - // adding unnecessary `.catch(rethrowAssertionErrorRejection)` handlers - PerformPromiseThen(pipeStep(), next, rejectLoop); - } - } - next(false); - }); - } - function pipeStep() { - if (shuttingDown) { - return promiseResolvedWith(true); - } - return PerformPromiseThen(writer._readyPromise, () => { - return newPromise((resolveRead, rejectRead) => { - ReadableStreamDefaultReaderRead(reader, { - _chunkSteps: chunk => { - currentWrite = PerformPromiseThen(WritableStreamDefaultWriterWrite(writer, chunk), undefined, noop); - resolveRead(false); - }, - _closeSteps: () => resolveRead(true), - _errorSteps: rejectRead - }); - }); - }); - } - // Errors must be propagated forward - isOrBecomesErrored(source, reader._closedPromise, storedError => { - if (!preventAbort) { - shutdownWithAction(() => WritableStreamAbort(dest, storedError), true, storedError); - } - else { - shutdown(true, storedError); - } - }); - // Errors must be propagated backward - isOrBecomesErrored(dest, writer._closedPromise, storedError => { - if (!preventCancel) { - shutdownWithAction(() => ReadableStreamCancel(source, storedError), true, storedError); - } - else { - shutdown(true, storedError); - } - }); - // Closing must be propagated forward - isOrBecomesClosed(source, reader._closedPromise, () => { - if (!preventClose) { - shutdownWithAction(() => WritableStreamDefaultWriterCloseWithErrorPropagation(writer)); - } - else { - shutdown(); - } - }); - // Closing must be propagated backward - if (WritableStreamCloseQueuedOrInFlight(dest) || dest._state === 'closed') { - const destClosed = new TypeError('the destination writable stream closed before all data could be piped to it'); - if (!preventCancel) { - shutdownWithAction(() => ReadableStreamCancel(source, destClosed), true, destClosed); - } - else { - shutdown(true, destClosed); - } - } - setPromiseIsHandledToTrue(pipeLoop()); - function waitForWritesToFinish() { - // Another write may have started while we were waiting on this currentWrite, so we have to be sure to wait - // for that too. - const oldCurrentWrite = currentWrite; - return PerformPromiseThen(currentWrite, () => oldCurrentWrite !== currentWrite ? waitForWritesToFinish() : undefined); - } - function isOrBecomesErrored(stream, promise, action) { - if (stream._state === 'errored') { - action(stream._storedError); - } - else { - uponRejection(promise, action); - } - } - function isOrBecomesClosed(stream, promise, action) { - if (stream._state === 'closed') { - action(); - } - else { - uponFulfillment(promise, action); - } - } - function shutdownWithAction(action, originalIsError, originalError) { - if (shuttingDown) { - return; - } - shuttingDown = true; - if (dest._state === 'writable' && !WritableStreamCloseQueuedOrInFlight(dest)) { - uponFulfillment(waitForWritesToFinish(), doTheRest); - } - else { - doTheRest(); - } - function doTheRest() { - uponPromise(action(), () => finalize(originalIsError, originalError), newError => finalize(true, newError)); - } - } - function shutdown(isError, error) { - if (shuttingDown) { - return; - } - shuttingDown = true; - if (dest._state === 'writable' && !WritableStreamCloseQueuedOrInFlight(dest)) { - uponFulfillment(waitForWritesToFinish(), () => finalize(isError, error)); - } - else { - finalize(isError, error); - } - } - function finalize(isError, error) { - WritableStreamDefaultWriterRelease(writer); - ReadableStreamReaderGenericRelease(reader); - if (signal !== undefined) { - signal.removeEventListener('abort', abortAlgorithm); - } - if (isError) { - reject(error); - } - else { - resolve(undefined); - } - } - }); - } - - /** - * Allows control of a {@link ReadableStream | readable stream}'s state and internal queue. - * - * @public - */ - class ReadableStreamDefaultController { - constructor() { - throw new TypeError('Illegal constructor'); - } - /** - * Returns the desired size to fill the controlled stream's internal queue. It can be negative, if the queue is - * over-full. An underlying source ought to use this information to determine when and how to apply backpressure. - */ - get desiredSize() { - if (!IsReadableStreamDefaultController(this)) { - throw defaultControllerBrandCheckException$1('desiredSize'); - } - return ReadableStreamDefaultControllerGetDesiredSize(this); - } - /** - * Closes the controlled readable stream. Consumers will still be able to read any previously-enqueued chunks from - * the stream, but once those are read, the stream will become closed. - */ - close() { - if (!IsReadableStreamDefaultController(this)) { - throw defaultControllerBrandCheckException$1('close'); - } - if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(this)) { - throw new TypeError('The stream is not in a state that permits close'); - } - ReadableStreamDefaultControllerClose(this); - } - enqueue(chunk = undefined) { - if (!IsReadableStreamDefaultController(this)) { - throw defaultControllerBrandCheckException$1('enqueue'); - } - if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(this)) { - throw new TypeError('The stream is not in a state that permits enqueue'); - } - return ReadableStreamDefaultControllerEnqueue(this, chunk); - } - /** - * Errors the controlled readable stream, making all future interactions with it fail with the given error `e`. - */ - error(e = undefined) { - if (!IsReadableStreamDefaultController(this)) { - throw defaultControllerBrandCheckException$1('error'); - } - ReadableStreamDefaultControllerError(this, e); - } - /** @internal */ - [CancelSteps](reason) { - ResetQueue(this); - const result = this._cancelAlgorithm(reason); - ReadableStreamDefaultControllerClearAlgorithms(this); - return result; - } - /** @internal */ - [PullSteps](readRequest) { - const stream = this._controlledReadableStream; - if (this._queue.length > 0) { - const chunk = DequeueValue(this); - if (this._closeRequested && this._queue.length === 0) { - ReadableStreamDefaultControllerClearAlgorithms(this); - ReadableStreamClose(stream); - } - else { - ReadableStreamDefaultControllerCallPullIfNeeded(this); - } - readRequest._chunkSteps(chunk); - } - else { - ReadableStreamAddReadRequest(stream, readRequest); - ReadableStreamDefaultControllerCallPullIfNeeded(this); - } - } - } - Object.defineProperties(ReadableStreamDefaultController.prototype, { - close: { enumerable: true }, - enqueue: { enumerable: true }, - error: { enumerable: true }, - desiredSize: { enumerable: true } - }); - if (typeof SymbolPolyfill.toStringTag === 'symbol') { - Object.defineProperty(ReadableStreamDefaultController.prototype, SymbolPolyfill.toStringTag, { - value: 'ReadableStreamDefaultController', - configurable: true - }); - } - // Abstract operations for the ReadableStreamDefaultController. - function IsReadableStreamDefaultController(x) { - if (!typeIsObject(x)) { - return false; - } - if (!Object.prototype.hasOwnProperty.call(x, '_controlledReadableStream')) { - return false; - } - return x instanceof ReadableStreamDefaultController; - } - function ReadableStreamDefaultControllerCallPullIfNeeded(controller) { - const shouldPull = ReadableStreamDefaultControllerShouldCallPull(controller); - if (!shouldPull) { - return; - } - if (controller._pulling) { - controller._pullAgain = true; - return; - } - controller._pulling = true; - const pullPromise = controller._pullAlgorithm(); - uponPromise(pullPromise, () => { - controller._pulling = false; - if (controller._pullAgain) { - controller._pullAgain = false; - ReadableStreamDefaultControllerCallPullIfNeeded(controller); - } - }, e => { - ReadableStreamDefaultControllerError(controller, e); - }); - } - function ReadableStreamDefaultControllerShouldCallPull(controller) { - const stream = controller._controlledReadableStream; - if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(controller)) { - return false; - } - if (!controller._started) { - return false; - } - if (IsReadableStreamLocked(stream) && ReadableStreamGetNumReadRequests(stream) > 0) { - return true; - } - const desiredSize = ReadableStreamDefaultControllerGetDesiredSize(controller); - if (desiredSize > 0) { - return true; - } - return false; - } - function ReadableStreamDefaultControllerClearAlgorithms(controller) { - controller._pullAlgorithm = undefined; - controller._cancelAlgorithm = undefined; - controller._strategySizeAlgorithm = undefined; - } - // A client of ReadableStreamDefaultController may use these functions directly to bypass state check. - function ReadableStreamDefaultControllerClose(controller) { - if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(controller)) { - return; - } - const stream = controller._controlledReadableStream; - controller._closeRequested = true; - if (controller._queue.length === 0) { - ReadableStreamDefaultControllerClearAlgorithms(controller); - ReadableStreamClose(stream); - } - } - function ReadableStreamDefaultControllerEnqueue(controller, chunk) { - if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(controller)) { - return; - } - const stream = controller._controlledReadableStream; - if (IsReadableStreamLocked(stream) && ReadableStreamGetNumReadRequests(stream) > 0) { - ReadableStreamFulfillReadRequest(stream, chunk, false); - } - else { - let chunkSize; - try { - chunkSize = controller._strategySizeAlgorithm(chunk); - } - catch (chunkSizeE) { - ReadableStreamDefaultControllerError(controller, chunkSizeE); - throw chunkSizeE; - } - try { - EnqueueValueWithSize(controller, chunk, chunkSize); - } - catch (enqueueE) { - ReadableStreamDefaultControllerError(controller, enqueueE); - throw enqueueE; - } - } - ReadableStreamDefaultControllerCallPullIfNeeded(controller); - } - function ReadableStreamDefaultControllerError(controller, e) { - const stream = controller._controlledReadableStream; - if (stream._state !== 'readable') { - return; - } - ResetQueue(controller); - ReadableStreamDefaultControllerClearAlgorithms(controller); - ReadableStreamError(stream, e); - } - function ReadableStreamDefaultControllerGetDesiredSize(controller) { - const state = controller._controlledReadableStream._state; - if (state === 'errored') { - return null; - } - if (state === 'closed') { - return 0; - } - return controller._strategyHWM - controller._queueTotalSize; - } - // This is used in the implementation of TransformStream. - function ReadableStreamDefaultControllerHasBackpressure(controller) { - if (ReadableStreamDefaultControllerShouldCallPull(controller)) { - return false; - } - return true; - } - function ReadableStreamDefaultControllerCanCloseOrEnqueue(controller) { - const state = controller._controlledReadableStream._state; - if (!controller._closeRequested && state === 'readable') { - return true; - } - return false; - } - function SetUpReadableStreamDefaultController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, sizeAlgorithm) { - controller._controlledReadableStream = stream; - controller._queue = undefined; - controller._queueTotalSize = undefined; - ResetQueue(controller); - controller._started = false; - controller._closeRequested = false; - controller._pullAgain = false; - controller._pulling = false; - controller._strategySizeAlgorithm = sizeAlgorithm; - controller._strategyHWM = highWaterMark; - controller._pullAlgorithm = pullAlgorithm; - controller._cancelAlgorithm = cancelAlgorithm; - stream._readableStreamController = controller; - const startResult = startAlgorithm(); - uponPromise(promiseResolvedWith(startResult), () => { - controller._started = true; - ReadableStreamDefaultControllerCallPullIfNeeded(controller); - }, r => { - ReadableStreamDefaultControllerError(controller, r); - }); - } - function SetUpReadableStreamDefaultControllerFromUnderlyingSource(stream, underlyingSource, highWaterMark, sizeAlgorithm) { - const controller = Object.create(ReadableStreamDefaultController.prototype); - let startAlgorithm = () => undefined; - let pullAlgorithm = () => promiseResolvedWith(undefined); - let cancelAlgorithm = () => promiseResolvedWith(undefined); - if (underlyingSource.start !== undefined) { - startAlgorithm = () => underlyingSource.start(controller); - } - if (underlyingSource.pull !== undefined) { - pullAlgorithm = () => underlyingSource.pull(controller); - } - if (underlyingSource.cancel !== undefined) { - cancelAlgorithm = reason => underlyingSource.cancel(reason); - } - SetUpReadableStreamDefaultController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, sizeAlgorithm); - } - // Helper functions for the ReadableStreamDefaultController. - function defaultControllerBrandCheckException$1(name) { - return new TypeError(`ReadableStreamDefaultController.prototype.${name} can only be used on a ReadableStreamDefaultController`); - } - - function ReadableStreamTee(stream, cloneForBranch2) { - if (IsReadableByteStreamController(stream._readableStreamController)) { - return ReadableByteStreamTee(stream); - } - return ReadableStreamDefaultTee(stream); - } - function ReadableStreamDefaultTee(stream, cloneForBranch2) { - const reader = AcquireReadableStreamDefaultReader(stream); - let reading = false; - let readAgain = false; - let canceled1 = false; - let canceled2 = false; - let reason1; - let reason2; - let branch1; - let branch2; - let resolveCancelPromise; - const cancelPromise = newPromise(resolve => { - resolveCancelPromise = resolve; - }); - function pullAlgorithm() { - if (reading) { - readAgain = true; - return promiseResolvedWith(undefined); - } - reading = true; - const readRequest = { - _chunkSteps: chunk => { - // This needs to be delayed a microtask because it takes at least a microtask to detect errors (using - // reader._closedPromise below), and we want errors in stream to error both branches immediately. We cannot let - // successful synchronously-available reads get ahead of asynchronously-available errors. - queueMicrotask(() => { - readAgain = false; - const chunk1 = chunk; - const chunk2 = chunk; - // There is no way to access the cloning code right now in the reference implementation. - // If we add one then we'll need an implementation for serializable objects. - // if (!canceled2 && cloneForBranch2) { - // chunk2 = StructuredDeserialize(StructuredSerialize(chunk2)); - // } - if (!canceled1) { - ReadableStreamDefaultControllerEnqueue(branch1._readableStreamController, chunk1); - } - if (!canceled2) { - ReadableStreamDefaultControllerEnqueue(branch2._readableStreamController, chunk2); - } - reading = false; - if (readAgain) { - pullAlgorithm(); - } - }); - }, - _closeSteps: () => { - reading = false; - if (!canceled1) { - ReadableStreamDefaultControllerClose(branch1._readableStreamController); - } - if (!canceled2) { - ReadableStreamDefaultControllerClose(branch2._readableStreamController); - } - if (!canceled1 || !canceled2) { - resolveCancelPromise(undefined); - } - }, - _errorSteps: () => { - reading = false; - } - }; - ReadableStreamDefaultReaderRead(reader, readRequest); - return promiseResolvedWith(undefined); - } - function cancel1Algorithm(reason) { - canceled1 = true; - reason1 = reason; - if (canceled2) { - const compositeReason = CreateArrayFromList([reason1, reason2]); - const cancelResult = ReadableStreamCancel(stream, compositeReason); - resolveCancelPromise(cancelResult); - } - return cancelPromise; - } - function cancel2Algorithm(reason) { - canceled2 = true; - reason2 = reason; - if (canceled1) { - const compositeReason = CreateArrayFromList([reason1, reason2]); - const cancelResult = ReadableStreamCancel(stream, compositeReason); - resolveCancelPromise(cancelResult); - } - return cancelPromise; - } - function startAlgorithm() { - // do nothing - } - branch1 = CreateReadableStream(startAlgorithm, pullAlgorithm, cancel1Algorithm); - branch2 = CreateReadableStream(startAlgorithm, pullAlgorithm, cancel2Algorithm); - uponRejection(reader._closedPromise, (r) => { - ReadableStreamDefaultControllerError(branch1._readableStreamController, r); - ReadableStreamDefaultControllerError(branch2._readableStreamController, r); - if (!canceled1 || !canceled2) { - resolveCancelPromise(undefined); - } - }); - return [branch1, branch2]; - } - function ReadableByteStreamTee(stream) { - let reader = AcquireReadableStreamDefaultReader(stream); - let reading = false; - let readAgainForBranch1 = false; - let readAgainForBranch2 = false; - let canceled1 = false; - let canceled2 = false; - let reason1; - let reason2; - let branch1; - let branch2; - let resolveCancelPromise; - const cancelPromise = newPromise(resolve => { - resolveCancelPromise = resolve; - }); - function forwardReaderError(thisReader) { - uponRejection(thisReader._closedPromise, r => { - if (thisReader !== reader) { - return; - } - ReadableByteStreamControllerError(branch1._readableStreamController, r); - ReadableByteStreamControllerError(branch2._readableStreamController, r); - if (!canceled1 || !canceled2) { - resolveCancelPromise(undefined); - } - }); - } - function pullWithDefaultReader() { - if (IsReadableStreamBYOBReader(reader)) { - ReadableStreamReaderGenericRelease(reader); - reader = AcquireReadableStreamDefaultReader(stream); - forwardReaderError(reader); - } - const readRequest = { - _chunkSteps: chunk => { - // This needs to be delayed a microtask because it takes at least a microtask to detect errors (using - // reader._closedPromise below), and we want errors in stream to error both branches immediately. We cannot let - // successful synchronously-available reads get ahead of asynchronously-available errors. - queueMicrotask(() => { - readAgainForBranch1 = false; - readAgainForBranch2 = false; - const chunk1 = chunk; - let chunk2 = chunk; - if (!canceled1 && !canceled2) { - try { - chunk2 = CloneAsUint8Array(chunk); - } - catch (cloneE) { - ReadableByteStreamControllerError(branch1._readableStreamController, cloneE); - ReadableByteStreamControllerError(branch2._readableStreamController, cloneE); - resolveCancelPromise(ReadableStreamCancel(stream, cloneE)); - return; - } - } - if (!canceled1) { - ReadableByteStreamControllerEnqueue(branch1._readableStreamController, chunk1); - } - if (!canceled2) { - ReadableByteStreamControllerEnqueue(branch2._readableStreamController, chunk2); - } - reading = false; - if (readAgainForBranch1) { - pull1Algorithm(); - } - else if (readAgainForBranch2) { - pull2Algorithm(); - } - }); - }, - _closeSteps: () => { - reading = false; - if (!canceled1) { - ReadableByteStreamControllerClose(branch1._readableStreamController); - } - if (!canceled2) { - ReadableByteStreamControllerClose(branch2._readableStreamController); - } - if (branch1._readableStreamController._pendingPullIntos.length > 0) { - ReadableByteStreamControllerRespond(branch1._readableStreamController, 0); - } - if (branch2._readableStreamController._pendingPullIntos.length > 0) { - ReadableByteStreamControllerRespond(branch2._readableStreamController, 0); - } - if (!canceled1 || !canceled2) { - resolveCancelPromise(undefined); - } - }, - _errorSteps: () => { - reading = false; - } - }; - ReadableStreamDefaultReaderRead(reader, readRequest); - } - function pullWithBYOBReader(view, forBranch2) { - if (IsReadableStreamDefaultReader(reader)) { - ReadableStreamReaderGenericRelease(reader); - reader = AcquireReadableStreamBYOBReader(stream); - forwardReaderError(reader); - } - const byobBranch = forBranch2 ? branch2 : branch1; - const otherBranch = forBranch2 ? branch1 : branch2; - const readIntoRequest = { - _chunkSteps: chunk => { - // This needs to be delayed a microtask because it takes at least a microtask to detect errors (using - // reader._closedPromise below), and we want errors in stream to error both branches immediately. We cannot let - // successful synchronously-available reads get ahead of asynchronously-available errors. - queueMicrotask(() => { - readAgainForBranch1 = false; - readAgainForBranch2 = false; - const byobCanceled = forBranch2 ? canceled2 : canceled1; - const otherCanceled = forBranch2 ? canceled1 : canceled2; - if (!otherCanceled) { - let clonedChunk; - try { - clonedChunk = CloneAsUint8Array(chunk); - } - catch (cloneE) { - ReadableByteStreamControllerError(byobBranch._readableStreamController, cloneE); - ReadableByteStreamControllerError(otherBranch._readableStreamController, cloneE); - resolveCancelPromise(ReadableStreamCancel(stream, cloneE)); - return; - } - if (!byobCanceled) { - ReadableByteStreamControllerRespondWithNewView(byobBranch._readableStreamController, chunk); - } - ReadableByteStreamControllerEnqueue(otherBranch._readableStreamController, clonedChunk); - } - else if (!byobCanceled) { - ReadableByteStreamControllerRespondWithNewView(byobBranch._readableStreamController, chunk); - } - reading = false; - if (readAgainForBranch1) { - pull1Algorithm(); - } - else if (readAgainForBranch2) { - pull2Algorithm(); - } - }); - }, - _closeSteps: chunk => { - reading = false; - const byobCanceled = forBranch2 ? canceled2 : canceled1; - const otherCanceled = forBranch2 ? canceled1 : canceled2; - if (!byobCanceled) { - ReadableByteStreamControllerClose(byobBranch._readableStreamController); - } - if (!otherCanceled) { - ReadableByteStreamControllerClose(otherBranch._readableStreamController); - } - if (chunk !== undefined) { - if (!byobCanceled) { - ReadableByteStreamControllerRespondWithNewView(byobBranch._readableStreamController, chunk); - } - if (!otherCanceled && otherBranch._readableStreamController._pendingPullIntos.length > 0) { - ReadableByteStreamControllerRespond(otherBranch._readableStreamController, 0); - } - } - if (!byobCanceled || !otherCanceled) { - resolveCancelPromise(undefined); - } - }, - _errorSteps: () => { - reading = false; - } - }; - ReadableStreamBYOBReaderRead(reader, view, readIntoRequest); - } - function pull1Algorithm() { - if (reading) { - readAgainForBranch1 = true; - return promiseResolvedWith(undefined); - } - reading = true; - const byobRequest = ReadableByteStreamControllerGetBYOBRequest(branch1._readableStreamController); - if (byobRequest === null) { - pullWithDefaultReader(); - } - else { - pullWithBYOBReader(byobRequest._view, false); - } - return promiseResolvedWith(undefined); - } - function pull2Algorithm() { - if (reading) { - readAgainForBranch2 = true; - return promiseResolvedWith(undefined); - } - reading = true; - const byobRequest = ReadableByteStreamControllerGetBYOBRequest(branch2._readableStreamController); - if (byobRequest === null) { - pullWithDefaultReader(); - } - else { - pullWithBYOBReader(byobRequest._view, true); - } - return promiseResolvedWith(undefined); - } - function cancel1Algorithm(reason) { - canceled1 = true; - reason1 = reason; - if (canceled2) { - const compositeReason = CreateArrayFromList([reason1, reason2]); - const cancelResult = ReadableStreamCancel(stream, compositeReason); - resolveCancelPromise(cancelResult); - } - return cancelPromise; - } - function cancel2Algorithm(reason) { - canceled2 = true; - reason2 = reason; - if (canceled1) { - const compositeReason = CreateArrayFromList([reason1, reason2]); - const cancelResult = ReadableStreamCancel(stream, compositeReason); - resolveCancelPromise(cancelResult); - } - return cancelPromise; - } - function startAlgorithm() { - return; - } - branch1 = CreateReadableByteStream(startAlgorithm, pull1Algorithm, cancel1Algorithm); - branch2 = CreateReadableByteStream(startAlgorithm, pull2Algorithm, cancel2Algorithm); - forwardReaderError(reader); - return [branch1, branch2]; - } - - function convertUnderlyingDefaultOrByteSource(source, context) { - assertDictionary(source, context); - const original = source; - const autoAllocateChunkSize = original === null || original === void 0 ? void 0 : original.autoAllocateChunkSize; - const cancel = original === null || original === void 0 ? void 0 : original.cancel; - const pull = original === null || original === void 0 ? void 0 : original.pull; - const start = original === null || original === void 0 ? void 0 : original.start; - const type = original === null || original === void 0 ? void 0 : original.type; - return { - autoAllocateChunkSize: autoAllocateChunkSize === undefined ? - undefined : - convertUnsignedLongLongWithEnforceRange(autoAllocateChunkSize, `${context} has member 'autoAllocateChunkSize' that`), - cancel: cancel === undefined ? - undefined : - convertUnderlyingSourceCancelCallback(cancel, original, `${context} has member 'cancel' that`), - pull: pull === undefined ? - undefined : - convertUnderlyingSourcePullCallback(pull, original, `${context} has member 'pull' that`), - start: start === undefined ? - undefined : - convertUnderlyingSourceStartCallback(start, original, `${context} has member 'start' that`), - type: type === undefined ? undefined : convertReadableStreamType(type, `${context} has member 'type' that`) - }; - } - function convertUnderlyingSourceCancelCallback(fn, original, context) { - assertFunction(fn, context); - return (reason) => promiseCall(fn, original, [reason]); - } - function convertUnderlyingSourcePullCallback(fn, original, context) { - assertFunction(fn, context); - return (controller) => promiseCall(fn, original, [controller]); - } - function convertUnderlyingSourceStartCallback(fn, original, context) { - assertFunction(fn, context); - return (controller) => reflectCall(fn, original, [controller]); - } - function convertReadableStreamType(type, context) { - type = `${type}`; - if (type !== 'bytes') { - throw new TypeError(`${context} '${type}' is not a valid enumeration value for ReadableStreamType`); - } - return type; - } - - function convertReaderOptions(options, context) { - assertDictionary(options, context); - const mode = options === null || options === void 0 ? void 0 : options.mode; - return { - mode: mode === undefined ? undefined : convertReadableStreamReaderMode(mode, `${context} has member 'mode' that`) - }; - } - function convertReadableStreamReaderMode(mode, context) { - mode = `${mode}`; - if (mode !== 'byob') { - throw new TypeError(`${context} '${mode}' is not a valid enumeration value for ReadableStreamReaderMode`); - } - return mode; - } - - function convertIteratorOptions(options, context) { - assertDictionary(options, context); - const preventCancel = options === null || options === void 0 ? void 0 : options.preventCancel; - return { preventCancel: Boolean(preventCancel) }; - } - - function convertPipeOptions(options, context) { - assertDictionary(options, context); - const preventAbort = options === null || options === void 0 ? void 0 : options.preventAbort; - const preventCancel = options === null || options === void 0 ? void 0 : options.preventCancel; - const preventClose = options === null || options === void 0 ? void 0 : options.preventClose; - const signal = options === null || options === void 0 ? void 0 : options.signal; - if (signal !== undefined) { - assertAbortSignal(signal, `${context} has member 'signal' that`); - } - return { - preventAbort: Boolean(preventAbort), - preventCancel: Boolean(preventCancel), - preventClose: Boolean(preventClose), - signal - }; - } - function assertAbortSignal(signal, context) { - if (!isAbortSignal(signal)) { - throw new TypeError(`${context} is not an AbortSignal.`); - } - } - - function convertReadableWritablePair(pair, context) { - assertDictionary(pair, context); - const readable = pair === null || pair === void 0 ? void 0 : pair.readable; - assertRequiredField(readable, 'readable', 'ReadableWritablePair'); - assertReadableStream(readable, `${context} has member 'readable' that`); - const writable = pair === null || pair === void 0 ? void 0 : pair.writable; - assertRequiredField(writable, 'writable', 'ReadableWritablePair'); - assertWritableStream(writable, `${context} has member 'writable' that`); - return { readable, writable }; - } - - /** - * A readable stream represents a source of data, from which you can read. - * - * @public - */ - class ReadableStream { - constructor(rawUnderlyingSource = {}, rawStrategy = {}) { - if (rawUnderlyingSource === undefined) { - rawUnderlyingSource = null; - } - else { - assertObject(rawUnderlyingSource, 'First parameter'); - } - const strategy = convertQueuingStrategy(rawStrategy, 'Second parameter'); - const underlyingSource = convertUnderlyingDefaultOrByteSource(rawUnderlyingSource, 'First parameter'); - InitializeReadableStream(this); - if (underlyingSource.type === 'bytes') { - if (strategy.size !== undefined) { - throw new RangeError('The strategy for a byte stream cannot have a size function'); - } - const highWaterMark = ExtractHighWaterMark(strategy, 0); - SetUpReadableByteStreamControllerFromUnderlyingSource(this, underlyingSource, highWaterMark); - } - else { - const sizeAlgorithm = ExtractSizeAlgorithm(strategy); - const highWaterMark = ExtractHighWaterMark(strategy, 1); - SetUpReadableStreamDefaultControllerFromUnderlyingSource(this, underlyingSource, highWaterMark, sizeAlgorithm); - } - } - /** - * Whether or not the readable stream is locked to a {@link ReadableStreamDefaultReader | reader}. - */ - get locked() { - if (!IsReadableStream(this)) { - throw streamBrandCheckException$1('locked'); - } - return IsReadableStreamLocked(this); - } - /** - * Cancels the stream, signaling a loss of interest in the stream by a consumer. - * - * The supplied `reason` argument will be given to the underlying source's {@link UnderlyingSource.cancel | cancel()} - * method, which might or might not use it. - */ - cancel(reason = undefined) { - if (!IsReadableStream(this)) { - return promiseRejectedWith(streamBrandCheckException$1('cancel')); - } - if (IsReadableStreamLocked(this)) { - return promiseRejectedWith(new TypeError('Cannot cancel a stream that already has a reader')); - } - return ReadableStreamCancel(this, reason); - } - getReader(rawOptions = undefined) { - if (!IsReadableStream(this)) { - throw streamBrandCheckException$1('getReader'); - } - const options = convertReaderOptions(rawOptions, 'First parameter'); - if (options.mode === undefined) { - return AcquireReadableStreamDefaultReader(this); - } - return AcquireReadableStreamBYOBReader(this); - } - pipeThrough(rawTransform, rawOptions = {}) { - if (!IsReadableStream(this)) { - throw streamBrandCheckException$1('pipeThrough'); - } - assertRequiredArgument(rawTransform, 1, 'pipeThrough'); - const transform = convertReadableWritablePair(rawTransform, 'First parameter'); - const options = convertPipeOptions(rawOptions, 'Second parameter'); - if (IsReadableStreamLocked(this)) { - throw new TypeError('ReadableStream.prototype.pipeThrough cannot be used on a locked ReadableStream'); - } - if (IsWritableStreamLocked(transform.writable)) { - throw new TypeError('ReadableStream.prototype.pipeThrough cannot be used on a locked WritableStream'); - } - const promise = ReadableStreamPipeTo(this, transform.writable, options.preventClose, options.preventAbort, options.preventCancel, options.signal); - setPromiseIsHandledToTrue(promise); - return transform.readable; - } - pipeTo(destination, rawOptions = {}) { - if (!IsReadableStream(this)) { - return promiseRejectedWith(streamBrandCheckException$1('pipeTo')); - } - if (destination === undefined) { - return promiseRejectedWith(`Parameter 1 is required in 'pipeTo'.`); - } - if (!IsWritableStream(destination)) { - return promiseRejectedWith(new TypeError(`ReadableStream.prototype.pipeTo's first argument must be a WritableStream`)); - } - let options; - try { - options = convertPipeOptions(rawOptions, 'Second parameter'); - } - catch (e) { - return promiseRejectedWith(e); - } - if (IsReadableStreamLocked(this)) { - return promiseRejectedWith(new TypeError('ReadableStream.prototype.pipeTo cannot be used on a locked ReadableStream')); - } - if (IsWritableStreamLocked(destination)) { - return promiseRejectedWith(new TypeError('ReadableStream.prototype.pipeTo cannot be used on a locked WritableStream')); - } - return ReadableStreamPipeTo(this, destination, options.preventClose, options.preventAbort, options.preventCancel, options.signal); - } - /** - * Tees this readable stream, returning a two-element array containing the two resulting branches as - * new {@link ReadableStream} instances. - * - * Teeing a stream will lock it, preventing any other consumer from acquiring a reader. - * To cancel the stream, cancel both of the resulting branches; a composite cancellation reason will then be - * propagated to the stream's underlying source. - * - * Note that the chunks seen in each branch will be the same object. If the chunks are not immutable, - * this could allow interference between the two branches. - */ - tee() { - if (!IsReadableStream(this)) { - throw streamBrandCheckException$1('tee'); - } - const branches = ReadableStreamTee(this); - return CreateArrayFromList(branches); - } - values(rawOptions = undefined) { - if (!IsReadableStream(this)) { - throw streamBrandCheckException$1('values'); - } - const options = convertIteratorOptions(rawOptions, 'First parameter'); - return AcquireReadableStreamAsyncIterator(this, options.preventCancel); - } - } - Object.defineProperties(ReadableStream.prototype, { - cancel: { enumerable: true }, - getReader: { enumerable: true }, - pipeThrough: { enumerable: true }, - pipeTo: { enumerable: true }, - tee: { enumerable: true }, - values: { enumerable: true }, - locked: { enumerable: true } - }); - if (typeof SymbolPolyfill.toStringTag === 'symbol') { - Object.defineProperty(ReadableStream.prototype, SymbolPolyfill.toStringTag, { - value: 'ReadableStream', - configurable: true - }); - } - if (typeof SymbolPolyfill.asyncIterator === 'symbol') { - Object.defineProperty(ReadableStream.prototype, SymbolPolyfill.asyncIterator, { - value: ReadableStream.prototype.values, - writable: true, - configurable: true - }); - } - // Abstract operations for the ReadableStream. - // Throws if and only if startAlgorithm throws. - function CreateReadableStream(startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark = 1, sizeAlgorithm = () => 1) { - const stream = Object.create(ReadableStream.prototype); - InitializeReadableStream(stream); - const controller = Object.create(ReadableStreamDefaultController.prototype); - SetUpReadableStreamDefaultController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, sizeAlgorithm); - return stream; - } - // Throws if and only if startAlgorithm throws. - function CreateReadableByteStream(startAlgorithm, pullAlgorithm, cancelAlgorithm) { - const stream = Object.create(ReadableStream.prototype); - InitializeReadableStream(stream); - const controller = Object.create(ReadableByteStreamController.prototype); - SetUpReadableByteStreamController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, 0, undefined); - return stream; - } - function InitializeReadableStream(stream) { - stream._state = 'readable'; - stream._reader = undefined; - stream._storedError = undefined; - stream._disturbed = false; - } - function IsReadableStream(x) { - if (!typeIsObject(x)) { - return false; - } - if (!Object.prototype.hasOwnProperty.call(x, '_readableStreamController')) { - return false; - } - return x instanceof ReadableStream; - } - function IsReadableStreamLocked(stream) { - if (stream._reader === undefined) { - return false; - } - return true; - } - // ReadableStream API exposed for controllers. - function ReadableStreamCancel(stream, reason) { - stream._disturbed = true; - if (stream._state === 'closed') { - return promiseResolvedWith(undefined); - } - if (stream._state === 'errored') { - return promiseRejectedWith(stream._storedError); - } - ReadableStreamClose(stream); - const reader = stream._reader; - if (reader !== undefined && IsReadableStreamBYOBReader(reader)) { - reader._readIntoRequests.forEach(readIntoRequest => { - readIntoRequest._closeSteps(undefined); - }); - reader._readIntoRequests = new SimpleQueue(); - } - const sourceCancelPromise = stream._readableStreamController[CancelSteps](reason); - return transformPromiseWith(sourceCancelPromise, noop); - } - function ReadableStreamClose(stream) { - stream._state = 'closed'; - const reader = stream._reader; - if (reader === undefined) { - return; - } - defaultReaderClosedPromiseResolve(reader); - if (IsReadableStreamDefaultReader(reader)) { - reader._readRequests.forEach(readRequest => { - readRequest._closeSteps(); - }); - reader._readRequests = new SimpleQueue(); - } - } - function ReadableStreamError(stream, e) { - stream._state = 'errored'; - stream._storedError = e; - const reader = stream._reader; - if (reader === undefined) { - return; - } - defaultReaderClosedPromiseReject(reader, e); - if (IsReadableStreamDefaultReader(reader)) { - reader._readRequests.forEach(readRequest => { - readRequest._errorSteps(e); - }); - reader._readRequests = new SimpleQueue(); - } - else { - reader._readIntoRequests.forEach(readIntoRequest => { - readIntoRequest._errorSteps(e); - }); - reader._readIntoRequests = new SimpleQueue(); - } - } - // Helper functions for the ReadableStream. - function streamBrandCheckException$1(name) { - return new TypeError(`ReadableStream.prototype.${name} can only be used on a ReadableStream`); - } - - function convertQueuingStrategyInit(init, context) { - assertDictionary(init, context); - const highWaterMark = init === null || init === void 0 ? void 0 : init.highWaterMark; - assertRequiredField(highWaterMark, 'highWaterMark', 'QueuingStrategyInit'); - return { - highWaterMark: convertUnrestrictedDouble(highWaterMark) - }; - } - - // The size function must not have a prototype property nor be a constructor - const byteLengthSizeFunction = (chunk) => { - return chunk.byteLength; - }; - try { - Object.defineProperty(byteLengthSizeFunction, 'name', { - value: 'size', - configurable: true - }); - } - catch (_a) { - // This property is non-configurable in older browsers, so ignore if this throws. - // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/name#browser_compatibility - } - /** - * A queuing strategy that counts the number of bytes in each chunk. - * - * @public - */ - class ByteLengthQueuingStrategy { - constructor(options) { - assertRequiredArgument(options, 1, 'ByteLengthQueuingStrategy'); - options = convertQueuingStrategyInit(options, 'First parameter'); - this._byteLengthQueuingStrategyHighWaterMark = options.highWaterMark; - } - /** - * Returns the high water mark provided to the constructor. - */ - get highWaterMark() { - if (!IsByteLengthQueuingStrategy(this)) { - throw byteLengthBrandCheckException('highWaterMark'); - } - return this._byteLengthQueuingStrategyHighWaterMark; - } - /** - * Measures the size of `chunk` by returning the value of its `byteLength` property. - */ - get size() { - if (!IsByteLengthQueuingStrategy(this)) { - throw byteLengthBrandCheckException('size'); - } - return byteLengthSizeFunction; - } - } - Object.defineProperties(ByteLengthQueuingStrategy.prototype, { - highWaterMark: { enumerable: true }, - size: { enumerable: true } - }); - if (typeof SymbolPolyfill.toStringTag === 'symbol') { - Object.defineProperty(ByteLengthQueuingStrategy.prototype, SymbolPolyfill.toStringTag, { - value: 'ByteLengthQueuingStrategy', - configurable: true - }); - } - // Helper functions for the ByteLengthQueuingStrategy. - function byteLengthBrandCheckException(name) { - return new TypeError(`ByteLengthQueuingStrategy.prototype.${name} can only be used on a ByteLengthQueuingStrategy`); - } - function IsByteLengthQueuingStrategy(x) { - if (!typeIsObject(x)) { - return false; - } - if (!Object.prototype.hasOwnProperty.call(x, '_byteLengthQueuingStrategyHighWaterMark')) { - return false; - } - return x instanceof ByteLengthQueuingStrategy; - } - - // The size function must not have a prototype property nor be a constructor - const countSizeFunction = () => { - return 1; - }; - try { - Object.defineProperty(countSizeFunction, 'name', { - value: 'size', - configurable: true - }); - } - catch (_a) { - // This property is non-configurable in older browsers, so ignore if this throws. - // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/name#browser_compatibility - } - /** - * A queuing strategy that counts the number of chunks. - * - * @public - */ - class CountQueuingStrategy { - constructor(options) { - assertRequiredArgument(options, 1, 'CountQueuingStrategy'); - options = convertQueuingStrategyInit(options, 'First parameter'); - this._countQueuingStrategyHighWaterMark = options.highWaterMark; - } - /** - * Returns the high water mark provided to the constructor. - */ - get highWaterMark() { - if (!IsCountQueuingStrategy(this)) { - throw countBrandCheckException('highWaterMark'); - } - return this._countQueuingStrategyHighWaterMark; - } - /** - * Measures the size of `chunk` by always returning 1. - * This ensures that the total queue size is a count of the number of chunks in the queue. - */ - get size() { - if (!IsCountQueuingStrategy(this)) { - throw countBrandCheckException('size'); - } - return countSizeFunction; - } - } - Object.defineProperties(CountQueuingStrategy.prototype, { - highWaterMark: { enumerable: true }, - size: { enumerable: true } - }); - if (typeof SymbolPolyfill.toStringTag === 'symbol') { - Object.defineProperty(CountQueuingStrategy.prototype, SymbolPolyfill.toStringTag, { - value: 'CountQueuingStrategy', - configurable: true - }); - } - // Helper functions for the CountQueuingStrategy. - function countBrandCheckException(name) { - return new TypeError(`CountQueuingStrategy.prototype.${name} can only be used on a CountQueuingStrategy`); - } - function IsCountQueuingStrategy(x) { - if (!typeIsObject(x)) { - return false; - } - if (!Object.prototype.hasOwnProperty.call(x, '_countQueuingStrategyHighWaterMark')) { - return false; - } - return x instanceof CountQueuingStrategy; - } - - function convertTransformer(original, context) { - assertDictionary(original, context); - const flush = original === null || original === void 0 ? void 0 : original.flush; - const readableType = original === null || original === void 0 ? void 0 : original.readableType; - const start = original === null || original === void 0 ? void 0 : original.start; - const transform = original === null || original === void 0 ? void 0 : original.transform; - const writableType = original === null || original === void 0 ? void 0 : original.writableType; - return { - flush: flush === undefined ? - undefined : - convertTransformerFlushCallback(flush, original, `${context} has member 'flush' that`), - readableType, - start: start === undefined ? - undefined : - convertTransformerStartCallback(start, original, `${context} has member 'start' that`), - transform: transform === undefined ? - undefined : - convertTransformerTransformCallback(transform, original, `${context} has member 'transform' that`), - writableType - }; - } - function convertTransformerFlushCallback(fn, original, context) { - assertFunction(fn, context); - return (controller) => promiseCall(fn, original, [controller]); - } - function convertTransformerStartCallback(fn, original, context) { - assertFunction(fn, context); - return (controller) => reflectCall(fn, original, [controller]); - } - function convertTransformerTransformCallback(fn, original, context) { - assertFunction(fn, context); - return (chunk, controller) => promiseCall(fn, original, [chunk, controller]); - } - - // Class TransformStream - /** - * A transform stream consists of a pair of streams: a {@link WritableStream | writable stream}, - * known as its writable side, and a {@link ReadableStream | readable stream}, known as its readable side. - * In a manner specific to the transform stream in question, writes to the writable side result in new data being - * made available for reading from the readable side. - * - * @public - */ - class TransformStream { - constructor(rawTransformer = {}, rawWritableStrategy = {}, rawReadableStrategy = {}) { - if (rawTransformer === undefined) { - rawTransformer = null; - } - const writableStrategy = convertQueuingStrategy(rawWritableStrategy, 'Second parameter'); - const readableStrategy = convertQueuingStrategy(rawReadableStrategy, 'Third parameter'); - const transformer = convertTransformer(rawTransformer, 'First parameter'); - if (transformer.readableType !== undefined) { - throw new RangeError('Invalid readableType specified'); - } - if (transformer.writableType !== undefined) { - throw new RangeError('Invalid writableType specified'); - } - const readableHighWaterMark = ExtractHighWaterMark(readableStrategy, 0); - const readableSizeAlgorithm = ExtractSizeAlgorithm(readableStrategy); - const writableHighWaterMark = ExtractHighWaterMark(writableStrategy, 1); - const writableSizeAlgorithm = ExtractSizeAlgorithm(writableStrategy); - let startPromise_resolve; - const startPromise = newPromise(resolve => { - startPromise_resolve = resolve; - }); - InitializeTransformStream(this, startPromise, writableHighWaterMark, writableSizeAlgorithm, readableHighWaterMark, readableSizeAlgorithm); - SetUpTransformStreamDefaultControllerFromTransformer(this, transformer); - if (transformer.start !== undefined) { - startPromise_resolve(transformer.start(this._transformStreamController)); - } - else { - startPromise_resolve(undefined); - } - } - /** - * The readable side of the transform stream. - */ - get readable() { - if (!IsTransformStream(this)) { - throw streamBrandCheckException('readable'); - } - return this._readable; - } - /** - * The writable side of the transform stream. - */ - get writable() { - if (!IsTransformStream(this)) { - throw streamBrandCheckException('writable'); - } - return this._writable; - } - } - Object.defineProperties(TransformStream.prototype, { - readable: { enumerable: true }, - writable: { enumerable: true } - }); - if (typeof SymbolPolyfill.toStringTag === 'symbol') { - Object.defineProperty(TransformStream.prototype, SymbolPolyfill.toStringTag, { - value: 'TransformStream', - configurable: true - }); - } - function InitializeTransformStream(stream, startPromise, writableHighWaterMark, writableSizeAlgorithm, readableHighWaterMark, readableSizeAlgorithm) { - function startAlgorithm() { - return startPromise; - } - function writeAlgorithm(chunk) { - return TransformStreamDefaultSinkWriteAlgorithm(stream, chunk); - } - function abortAlgorithm(reason) { - return TransformStreamDefaultSinkAbortAlgorithm(stream, reason); - } - function closeAlgorithm() { - return TransformStreamDefaultSinkCloseAlgorithm(stream); - } - stream._writable = CreateWritableStream(startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, writableHighWaterMark, writableSizeAlgorithm); - function pullAlgorithm() { - return TransformStreamDefaultSourcePullAlgorithm(stream); - } - function cancelAlgorithm(reason) { - TransformStreamErrorWritableAndUnblockWrite(stream, reason); - return promiseResolvedWith(undefined); - } - stream._readable = CreateReadableStream(startAlgorithm, pullAlgorithm, cancelAlgorithm, readableHighWaterMark, readableSizeAlgorithm); - // The [[backpressure]] slot is set to undefined so that it can be initialised by TransformStreamSetBackpressure. - stream._backpressure = undefined; - stream._backpressureChangePromise = undefined; - stream._backpressureChangePromise_resolve = undefined; - TransformStreamSetBackpressure(stream, true); - stream._transformStreamController = undefined; - } - function IsTransformStream(x) { - if (!typeIsObject(x)) { - return false; - } - if (!Object.prototype.hasOwnProperty.call(x, '_transformStreamController')) { - return false; - } - return x instanceof TransformStream; - } - // This is a no-op if both sides are already errored. - function TransformStreamError(stream, e) { - ReadableStreamDefaultControllerError(stream._readable._readableStreamController, e); - TransformStreamErrorWritableAndUnblockWrite(stream, e); - } - function TransformStreamErrorWritableAndUnblockWrite(stream, e) { - TransformStreamDefaultControllerClearAlgorithms(stream._transformStreamController); - WritableStreamDefaultControllerErrorIfNeeded(stream._writable._writableStreamController, e); - if (stream._backpressure) { - // Pretend that pull() was called to permit any pending write() calls to complete. TransformStreamSetBackpressure() - // cannot be called from enqueue() or pull() once the ReadableStream is errored, so this will will be the final time - // _backpressure is set. - TransformStreamSetBackpressure(stream, false); - } - } - function TransformStreamSetBackpressure(stream, backpressure) { - // Passes also when called during construction. - if (stream._backpressureChangePromise !== undefined) { - stream._backpressureChangePromise_resolve(); - } - stream._backpressureChangePromise = newPromise(resolve => { - stream._backpressureChangePromise_resolve = resolve; - }); - stream._backpressure = backpressure; - } - // Class TransformStreamDefaultController - /** - * Allows control of the {@link ReadableStream} and {@link WritableStream} of the associated {@link TransformStream}. - * - * @public - */ - class TransformStreamDefaultController { - constructor() { - throw new TypeError('Illegal constructor'); - } - /** - * Returns the desired size to fill the readable side’s internal queue. It can be negative, if the queue is over-full. - */ - get desiredSize() { - if (!IsTransformStreamDefaultController(this)) { - throw defaultControllerBrandCheckException('desiredSize'); - } - const readableController = this._controlledTransformStream._readable._readableStreamController; - return ReadableStreamDefaultControllerGetDesiredSize(readableController); - } - enqueue(chunk = undefined) { - if (!IsTransformStreamDefaultController(this)) { - throw defaultControllerBrandCheckException('enqueue'); - } - TransformStreamDefaultControllerEnqueue(this, chunk); - } - /** - * Errors both the readable side and the writable side of the controlled transform stream, making all future - * interactions with it fail with the given error `e`. Any chunks queued for transformation will be discarded. - */ - error(reason = undefined) { - if (!IsTransformStreamDefaultController(this)) { - throw defaultControllerBrandCheckException('error'); - } - TransformStreamDefaultControllerError(this, reason); - } - /** - * Closes the readable side and errors the writable side of the controlled transform stream. This is useful when the - * transformer only needs to consume a portion of the chunks written to the writable side. - */ - terminate() { - if (!IsTransformStreamDefaultController(this)) { - throw defaultControllerBrandCheckException('terminate'); - } - TransformStreamDefaultControllerTerminate(this); - } - } - Object.defineProperties(TransformStreamDefaultController.prototype, { - enqueue: { enumerable: true }, - error: { enumerable: true }, - terminate: { enumerable: true }, - desiredSize: { enumerable: true } - }); - if (typeof SymbolPolyfill.toStringTag === 'symbol') { - Object.defineProperty(TransformStreamDefaultController.prototype, SymbolPolyfill.toStringTag, { - value: 'TransformStreamDefaultController', - configurable: true - }); - } - // Transform Stream Default Controller Abstract Operations - function IsTransformStreamDefaultController(x) { - if (!typeIsObject(x)) { - return false; - } - if (!Object.prototype.hasOwnProperty.call(x, '_controlledTransformStream')) { - return false; - } - return x instanceof TransformStreamDefaultController; - } - function SetUpTransformStreamDefaultController(stream, controller, transformAlgorithm, flushAlgorithm) { - controller._controlledTransformStream = stream; - stream._transformStreamController = controller; - controller._transformAlgorithm = transformAlgorithm; - controller._flushAlgorithm = flushAlgorithm; - } - function SetUpTransformStreamDefaultControllerFromTransformer(stream, transformer) { - const controller = Object.create(TransformStreamDefaultController.prototype); - let transformAlgorithm = (chunk) => { - try { - TransformStreamDefaultControllerEnqueue(controller, chunk); - return promiseResolvedWith(undefined); - } - catch (transformResultE) { - return promiseRejectedWith(transformResultE); - } - }; - let flushAlgorithm = () => promiseResolvedWith(undefined); - if (transformer.transform !== undefined) { - transformAlgorithm = chunk => transformer.transform(chunk, controller); - } - if (transformer.flush !== undefined) { - flushAlgorithm = () => transformer.flush(controller); - } - SetUpTransformStreamDefaultController(stream, controller, transformAlgorithm, flushAlgorithm); - } - function TransformStreamDefaultControllerClearAlgorithms(controller) { - controller._transformAlgorithm = undefined; - controller._flushAlgorithm = undefined; - } - function TransformStreamDefaultControllerEnqueue(controller, chunk) { - const stream = controller._controlledTransformStream; - const readableController = stream._readable._readableStreamController; - if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(readableController)) { - throw new TypeError('Readable side is not in a state that permits enqueue'); - } - // We throttle transform invocations based on the backpressure of the ReadableStream, but we still - // accept TransformStreamDefaultControllerEnqueue() calls. - try { - ReadableStreamDefaultControllerEnqueue(readableController, chunk); - } - catch (e) { - // This happens when readableStrategy.size() throws. - TransformStreamErrorWritableAndUnblockWrite(stream, e); - throw stream._readable._storedError; - } - const backpressure = ReadableStreamDefaultControllerHasBackpressure(readableController); - if (backpressure !== stream._backpressure) { - TransformStreamSetBackpressure(stream, true); - } - } - function TransformStreamDefaultControllerError(controller, e) { - TransformStreamError(controller._controlledTransformStream, e); - } - function TransformStreamDefaultControllerPerformTransform(controller, chunk) { - const transformPromise = controller._transformAlgorithm(chunk); - return transformPromiseWith(transformPromise, undefined, r => { - TransformStreamError(controller._controlledTransformStream, r); - throw r; - }); - } - function TransformStreamDefaultControllerTerminate(controller) { - const stream = controller._controlledTransformStream; - const readableController = stream._readable._readableStreamController; - ReadableStreamDefaultControllerClose(readableController); - const error = new TypeError('TransformStream terminated'); - TransformStreamErrorWritableAndUnblockWrite(stream, error); - } - // TransformStreamDefaultSink Algorithms - function TransformStreamDefaultSinkWriteAlgorithm(stream, chunk) { - const controller = stream._transformStreamController; - if (stream._backpressure) { - const backpressureChangePromise = stream._backpressureChangePromise; - return transformPromiseWith(backpressureChangePromise, () => { - const writable = stream._writable; - const state = writable._state; - if (state === 'erroring') { - throw writable._storedError; - } - return TransformStreamDefaultControllerPerformTransform(controller, chunk); - }); - } - return TransformStreamDefaultControllerPerformTransform(controller, chunk); - } - function TransformStreamDefaultSinkAbortAlgorithm(stream, reason) { - // abort() is not called synchronously, so it is possible for abort() to be called when the stream is already - // errored. - TransformStreamError(stream, reason); - return promiseResolvedWith(undefined); - } - function TransformStreamDefaultSinkCloseAlgorithm(stream) { - // stream._readable cannot change after construction, so caching it across a call to user code is safe. - const readable = stream._readable; - const controller = stream._transformStreamController; - const flushPromise = controller._flushAlgorithm(); - TransformStreamDefaultControllerClearAlgorithms(controller); - // Return a promise that is fulfilled with undefined on success. - return transformPromiseWith(flushPromise, () => { - if (readable._state === 'errored') { - throw readable._storedError; - } - ReadableStreamDefaultControllerClose(readable._readableStreamController); - }, r => { - TransformStreamError(stream, r); - throw readable._storedError; - }); - } - // TransformStreamDefaultSource Algorithms - function TransformStreamDefaultSourcePullAlgorithm(stream) { - // Invariant. Enforced by the promises returned by start() and pull(). - TransformStreamSetBackpressure(stream, false); - // Prevent the next pull() call until there is backpressure. - return stream._backpressureChangePromise; - } - // Helper functions for the TransformStreamDefaultController. - function defaultControllerBrandCheckException(name) { - return new TypeError(`TransformStreamDefaultController.prototype.${name} can only be used on a TransformStreamDefaultController`); - } - // Helper functions for the TransformStream. - function streamBrandCheckException(name) { - return new TypeError(`TransformStream.prototype.${name} can only be used on a TransformStream`); - } - - exports.ByteLengthQueuingStrategy = ByteLengthQueuingStrategy; - exports.CountQueuingStrategy = CountQueuingStrategy; - exports.ReadableByteStreamController = ReadableByteStreamController; - exports.ReadableStream = ReadableStream; - exports.ReadableStreamBYOBReader = ReadableStreamBYOBReader; - exports.ReadableStreamBYOBRequest = ReadableStreamBYOBRequest; - exports.ReadableStreamDefaultController = ReadableStreamDefaultController; - exports.ReadableStreamDefaultReader = ReadableStreamDefaultReader; - exports.TransformStream = TransformStream; - exports.TransformStreamDefaultController = TransformStreamDefaultController; - exports.WritableStream = WritableStream; - exports.WritableStreamDefaultController = WritableStreamDefaultController; - exports.WritableStreamDefaultWriter = WritableStreamDefaultWriter; - - Object.defineProperty(exports, '__esModule', { value: true }); - -}))); -//# sourceMappingURL=ponyfill.es2018.js.map - - -/***/ }), - -/***/ 9491: -/***/ ((module) => { - -"use strict"; -module.exports = require("assert"); - -/***/ }), - -/***/ 4300: -/***/ ((module) => { - -"use strict"; -module.exports = require("buffer"); - -/***/ }), - -/***/ 6113: -/***/ ((module) => { - -"use strict"; -module.exports = require("crypto"); - -/***/ }), - -/***/ 2361: -/***/ ((module) => { - -"use strict"; -module.exports = require("events"); - -/***/ }), - -/***/ 7147: -/***/ ((module) => { - -"use strict"; -module.exports = require("fs"); - -/***/ }), - -/***/ 3685: -/***/ ((module) => { - -"use strict"; -module.exports = require("http"); - -/***/ }), - -/***/ 5687: -/***/ ((module) => { - -"use strict"; -module.exports = require("https"); - -/***/ }), - -/***/ 1808: -/***/ ((module) => { - -"use strict"; -module.exports = require("net"); - -/***/ }), - -/***/ 7742: -/***/ ((module) => { - -"use strict"; -module.exports = require("node:process"); - -/***/ }), - -/***/ 2477: -/***/ ((module) => { - -"use strict"; -module.exports = require("node:stream/web"); - -/***/ }), - -/***/ 2037: -/***/ ((module) => { - -"use strict"; -module.exports = require("os"); - -/***/ }), - -/***/ 1017: -/***/ ((module) => { - -"use strict"; -module.exports = require("path"); - -/***/ }), - -/***/ 4404: -/***/ ((module) => { - -"use strict"; -module.exports = require("tls"); - -/***/ }), - -/***/ 3837: -/***/ ((module) => { - -"use strict"; -module.exports = require("util"); - -/***/ }), - -/***/ 1267: -/***/ ((module) => { - -"use strict"; -module.exports = require("worker_threads"); - -/***/ }), - -/***/ 8572: -/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { - -/* c8 ignore start */ -// 64 KiB (same size chrome slice theirs blob into Uint8array's) -const POOL_SIZE = 65536 - -if (!globalThis.ReadableStream) { - // `node:stream/web` got introduced in v16.5.0 as experimental - // and it's preferred over the polyfilled version. So we also - // suppress the warning that gets emitted by NodeJS for using it. - try { - const process = __nccwpck_require__(7742) - const { emitWarning } = process - try { - process.emitWarning = () => {} - Object.assign(globalThis, __nccwpck_require__(2477)) - process.emitWarning = emitWarning - } catch (error) { - process.emitWarning = emitWarning - throw error - } - } catch (error) { - // fallback to polyfill implementation - Object.assign(globalThis, __nccwpck_require__(1452)) - } -} - -try { - // Don't use node: prefix for this, require+node: is not supported until node v14.14 - // Only `import()` can use prefix in 12.20 and later - const { Blob } = __nccwpck_require__(4300) - if (Blob && !Blob.prototype.stream) { - Blob.prototype.stream = function name (params) { - let position = 0 - const blob = this - - return new ReadableStream({ - type: 'bytes', - async pull (ctrl) { - const chunk = blob.slice(position, Math.min(blob.size, position + POOL_SIZE)) - const buffer = await chunk.arrayBuffer() - position += buffer.byteLength - ctrl.enqueue(new Uint8Array(buffer)) - - if (position === blob.size) { - ctrl.close() - } - } - }) - } - } -} catch (error) {} -/* c8 ignore end */ - - -/***/ }), - -/***/ 3213: -/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __nccwpck_require__) => { - -"use strict"; -/* harmony export */ __nccwpck_require__.d(__webpack_exports__, { -/* harmony export */ "Z": () => (__WEBPACK_DEFAULT_EXPORT__) -/* harmony export */ }); -/* unused harmony export File */ -/* harmony import */ var _index_js__WEBPACK_IMPORTED_MODULE_0__ = __nccwpck_require__(1410); - - -const _File = class File extends _index_js__WEBPACK_IMPORTED_MODULE_0__/* ["default"] */ .Z { - #lastModified = 0 - #name = '' - - /** - * @param {*[]} fileBits - * @param {string} fileName - * @param {{lastModified?: number, type?: string}} options - */// @ts-ignore - constructor (fileBits, fileName, options = {}) { - if (arguments.length < 2) { - throw new TypeError(`Failed to construct 'File': 2 arguments required, but only ${arguments.length} present.`) - } - super(fileBits, options) - - if (options === null) options = {} - - // Simulate WebIDL type casting for NaN value in lastModified option. - const lastModified = options.lastModified === undefined ? Date.now() : Number(options.lastModified) - if (!Number.isNaN(lastModified)) { - this.#lastModified = lastModified - } - - this.#name = String(fileName) - } - - get name () { - return this.#name - } - - get lastModified () { - return this.#lastModified - } - - get [Symbol.toStringTag] () { - return 'File' - } - - static [Symbol.hasInstance] (object) { - return !!object && object instanceof _index_js__WEBPACK_IMPORTED_MODULE_0__/* ["default"] */ .Z && - /^(File)$/.test(object[Symbol.toStringTag]) - } -} - -/** @type {typeof globalThis.File} */// @ts-ignore -const File = _File -/* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = (File); - - -/***/ }), - -/***/ 2777: -/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __nccwpck_require__) => { - -"use strict"; - -// EXPORTS -__nccwpck_require__.d(__webpack_exports__, { - "t6": () => (/* reexport */ fetch_blob/* default */.Z), - "$B": () => (/* reexport */ file/* default */.Z), - "xB": () => (/* binding */ blobFrom), - "SX": () => (/* binding */ blobFromSync), - "e2": () => (/* binding */ fileFrom), - "RA": () => (/* binding */ fileFromSync) -}); - -// UNUSED EXPORTS: default - -;// CONCATENATED MODULE: external "node:fs" -const external_node_fs_namespaceObject = require("node:fs"); -;// CONCATENATED MODULE: external "node:path" -const external_node_path_namespaceObject = require("node:path"); -// EXTERNAL MODULE: ./node_modules/node-domexception/index.js -var node_domexception = __nccwpck_require__(7760); -// EXTERNAL MODULE: ./node_modules/fetch-blob/file.js -var file = __nccwpck_require__(3213); -// EXTERNAL MODULE: ./node_modules/fetch-blob/index.js -var fetch_blob = __nccwpck_require__(1410); -;// CONCATENATED MODULE: ./node_modules/fetch-blob/from.js - - - - - - - -const { stat } = external_node_fs_namespaceObject.promises - -/** - * @param {string} path filepath on the disk - * @param {string} [type] mimetype to use - */ -const blobFromSync = (path, type) => fromBlob((0,external_node_fs_namespaceObject.statSync)(path), path, type) - -/** - * @param {string} path filepath on the disk - * @param {string} [type] mimetype to use - * @returns {Promise} - */ -const blobFrom = (path, type) => stat(path).then(stat => fromBlob(stat, path, type)) - -/** - * @param {string} path filepath on the disk - * @param {string} [type] mimetype to use - * @returns {Promise} - */ -const fileFrom = (path, type) => stat(path).then(stat => fromFile(stat, path, type)) - -/** - * @param {string} path filepath on the disk - * @param {string} [type] mimetype to use - */ -const fileFromSync = (path, type) => fromFile((0,external_node_fs_namespaceObject.statSync)(path), path, type) - -// @ts-ignore -const fromBlob = (stat, path, type = '') => new fetch_blob/* default */.Z([new BlobDataItem({ - path, - size: stat.size, - lastModified: stat.mtimeMs, - start: 0 -})], { type }) - -// @ts-ignore -const fromFile = (stat, path, type = '') => new file/* default */.Z([new BlobDataItem({ - path, - size: stat.size, - lastModified: stat.mtimeMs, - start: 0 -})], (0,external_node_path_namespaceObject.basename)(path), { type, lastModified: stat.mtimeMs }) - -/** - * This is a blob backed up by a file on the disk - * with minium requirement. Its wrapped around a Blob as a blobPart - * so you have no direct access to this. - * - * @private - */ -class BlobDataItem { - #path - #start - - constructor (options) { - this.#path = options.path - this.#start = options.start - this.size = options.size - this.lastModified = options.lastModified - this.originalSize = options.originalSize === undefined - ? options.size - : options.originalSize - } - - /** - * Slicing arguments is first validated and formatted - * to not be out of range by Blob.prototype.slice - */ - slice (start, end) { - return new BlobDataItem({ - path: this.#path, - lastModified: this.lastModified, - originalSize: this.originalSize, - size: end - start, - start: this.#start + start - }) - } - - async * stream () { - const { mtimeMs, size } = await stat(this.#path) - - if (mtimeMs > this.lastModified || this.originalSize !== size) { - throw new node_domexception('The requested file could not be read, typically due to permission problems that have occurred after a reference to a file was acquired.', 'NotReadableError') - } - - yield * (0,external_node_fs_namespaceObject.createReadStream)(this.#path, { - start: this.#start, - end: this.#start + this.size - 1 - }) - } - - get [Symbol.toStringTag] () { - return 'Blob' - } -} - -/* harmony default export */ const from = ((/* unused pure expression or super */ null && (blobFromSync))); - - - -/***/ }), - -/***/ 1410: -/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __nccwpck_require__) => { - -"use strict"; -/* harmony export */ __nccwpck_require__.d(__webpack_exports__, { -/* harmony export */ "Z": () => (__WEBPACK_DEFAULT_EXPORT__) -/* harmony export */ }); -/* unused harmony export Blob */ -/* harmony import */ var _streams_cjs__WEBPACK_IMPORTED_MODULE_0__ = __nccwpck_require__(8572); -/*! fetch-blob. MIT License. Jimmy Wärting */ - -// TODO (jimmywarting): in the feature use conditional loading with top level await (requires 14.x) -// Node has recently added whatwg stream into core - - - -// 64 KiB (same size chrome slice theirs blob into Uint8array's) -const POOL_SIZE = 65536 - -/** @param {(Blob | Uint8Array)[]} parts */ -async function * toIterator (parts, clone = true) { - for (const part of parts) { - if ('stream' in part) { - yield * (/** @type {AsyncIterableIterator} */ (part.stream())) - } else if (ArrayBuffer.isView(part)) { - if (clone) { - let position = part.byteOffset - const end = part.byteOffset + part.byteLength - while (position !== end) { - const size = Math.min(end - position, POOL_SIZE) - const chunk = part.buffer.slice(position, position + size) - position += chunk.byteLength - yield new Uint8Array(chunk) - } - } else { - yield part - } - /* c8 ignore next 10 */ - } else { - // For blobs that have arrayBuffer but no stream method (nodes buffer.Blob) - let position = 0, b = (/** @type {Blob} */ (part)) - while (position !== b.size) { - const chunk = b.slice(position, Math.min(b.size, position + POOL_SIZE)) - const buffer = await chunk.arrayBuffer() - position += buffer.byteLength - yield new Uint8Array(buffer) - } - } - } -} - -const _Blob = class Blob { - /** @type {Array.<(Blob|Uint8Array)>} */ - #parts = [] - #type = '' - #size = 0 - #endings = 'transparent' - - /** - * The Blob() constructor returns a new Blob object. The content - * of the blob consists of the concatenation of the values given - * in the parameter array. - * - * @param {*} blobParts - * @param {{ type?: string, endings?: string }} [options] - */ - constructor (blobParts = [], options = {}) { - if (typeof blobParts !== 'object' || blobParts === null) { - throw new TypeError('Failed to construct \'Blob\': The provided value cannot be converted to a sequence.') - } - - if (typeof blobParts[Symbol.iterator] !== 'function') { - throw new TypeError('Failed to construct \'Blob\': The object must have a callable @@iterator property.') - } - - if (typeof options !== 'object' && typeof options !== 'function') { - throw new TypeError('Failed to construct \'Blob\': parameter 2 cannot convert to dictionary.') - } - - if (options === null) options = {} - - const encoder = new TextEncoder() - for (const element of blobParts) { - let part - if (ArrayBuffer.isView(element)) { - part = new Uint8Array(element.buffer.slice(element.byteOffset, element.byteOffset + element.byteLength)) - } else if (element instanceof ArrayBuffer) { - part = new Uint8Array(element.slice(0)) - } else if (element instanceof Blob) { - part = element - } else { - part = encoder.encode(`${element}`) - } - - const size = ArrayBuffer.isView(part) ? part.byteLength : part.size - // Avoid pushing empty parts into the array to better GC them - if (size) { - this.#size += size - this.#parts.push(part) - } - } - - this.#endings = `${options.endings === undefined ? 'transparent' : options.endings}` - const type = options.type === undefined ? '' : String(options.type) - this.#type = /^[\x20-\x7E]*$/.test(type) ? type : '' - } - - /** - * The Blob interface's size property returns the - * size of the Blob in bytes. - */ - get size () { - return this.#size - } - - /** - * The type property of a Blob object returns the MIME type of the file. - */ - get type () { - return this.#type - } - - /** - * The text() method in the Blob interface returns a Promise - * that resolves with a string containing the contents of - * the blob, interpreted as UTF-8. - * - * @return {Promise} - */ - async text () { - // More optimized than using this.arrayBuffer() - // that requires twice as much ram - const decoder = new TextDecoder() - let str = '' - for await (const part of toIterator(this.#parts, false)) { - str += decoder.decode(part, { stream: true }) - } - // Remaining - str += decoder.decode() - return str - } - - /** - * The arrayBuffer() method in the Blob interface returns a - * Promise that resolves with the contents of the blob as - * binary data contained in an ArrayBuffer. - * - * @return {Promise} - */ - async arrayBuffer () { - // Easier way... Just a unnecessary overhead - // const view = new Uint8Array(this.size); - // await this.stream().getReader({mode: 'byob'}).read(view); - // return view.buffer; - - const data = new Uint8Array(this.size) - let offset = 0 - for await (const chunk of toIterator(this.#parts, false)) { - data.set(chunk, offset) - offset += chunk.length - } - - return data.buffer - } - - stream () { - const it = toIterator(this.#parts, true) - - return new globalThis.ReadableStream({ - // @ts-ignore - type: 'bytes', - async pull (ctrl) { - const chunk = await it.next() - chunk.done ? ctrl.close() : ctrl.enqueue(chunk.value) - }, - - async cancel () { - await it.return() - } - }) - } - - /** - * The Blob interface's slice() method creates and returns a - * new Blob object which contains data from a subset of the - * blob on which it's called. - * - * @param {number} [start] - * @param {number} [end] - * @param {string} [type] - */ - slice (start = 0, end = this.size, type = '') { - const { size } = this - - let relativeStart = start < 0 ? Math.max(size + start, 0) : Math.min(start, size) - let relativeEnd = end < 0 ? Math.max(size + end, 0) : Math.min(end, size) - - const span = Math.max(relativeEnd - relativeStart, 0) - const parts = this.#parts - const blobParts = [] - let added = 0 - - for (const part of parts) { - // don't add the overflow to new blobParts - if (added >= span) { - break - } - - const size = ArrayBuffer.isView(part) ? part.byteLength : part.size - if (relativeStart && size <= relativeStart) { - // Skip the beginning and change the relative - // start & end position as we skip the unwanted parts - relativeStart -= size - relativeEnd -= size - } else { - let chunk - if (ArrayBuffer.isView(part)) { - chunk = part.subarray(relativeStart, Math.min(size, relativeEnd)) - added += chunk.byteLength - } else { - chunk = part.slice(relativeStart, Math.min(size, relativeEnd)) - added += chunk.size - } - relativeEnd -= size - blobParts.push(chunk) - relativeStart = 0 // All next sequential parts should start at 0 - } - } - - const blob = new Blob([], { type: String(type).toLowerCase() }) - blob.#size = span - blob.#parts = blobParts - - return blob - } - - get [Symbol.toStringTag] () { - return 'Blob' - } - - static [Symbol.hasInstance] (object) { - return ( - object && - typeof object === 'object' && - typeof object.constructor === 'function' && - ( - typeof object.stream === 'function' || - typeof object.arrayBuffer === 'function' - ) && - /^(Blob|File)$/.test(object[Symbol.toStringTag]) - ) - } -} - -Object.defineProperties(_Blob.prototype, { - size: { enumerable: true }, - type: { enumerable: true }, - slice: { enumerable: true } -}) - -/** @type {typeof globalThis.Blob} */ -const Blob = _Blob -/* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = (Blob); - - -/***/ }), - -/***/ 8010: -/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __nccwpck_require__) => { - -"use strict"; -/* harmony export */ __nccwpck_require__.d(__webpack_exports__, { -/* harmony export */ "Ct": () => (/* binding */ FormData), -/* harmony export */ "au": () => (/* binding */ formDataToBlob) -/* harmony export */ }); -/* unused harmony export File */ -/* harmony import */ var fetch_blob__WEBPACK_IMPORTED_MODULE_0__ = __nccwpck_require__(1410); -/* harmony import */ var fetch_blob_file_js__WEBPACK_IMPORTED_MODULE_1__ = __nccwpck_require__(3213); -/*! formdata-polyfill. MIT License. Jimmy Wärting */ - - - - -var {toStringTag:t,iterator:i,hasInstance:h}=Symbol, -r=Math.random, -m='append,set,get,getAll,delete,keys,values,entries,forEach,constructor'.split(','), -f=(a,b,c)=>(a+='',/^(Blob|File)$/.test(b && b[t])?[(c=c!==void 0?c+'':b[t]=='File'?b.name:'blob',a),b.name!==c||b[t]=='blob'?new fetch_blob_file_js__WEBPACK_IMPORTED_MODULE_1__/* ["default"] */ .Z([b],c,b):b]:[a,b+'']), -e=(c,f)=>(f?c:c.replace(/\r?\n|\r/g,'\r\n')).replace(/\n/g,'%0A').replace(/\r/g,'%0D').replace(/"/g,'%22'), -x=(n, a, e)=>{if(a.lengthtypeof o[m]!='function')} -append(...a){x('append',arguments,2);this.#d.push(f(...a))} -delete(a){x('delete',arguments,1);a+='';this.#d=this.#d.filter(([b])=>b!==a)} -get(a){x('get',arguments,1);a+='';for(var b=this.#d,l=b.length,c=0;cc[0]===a&&b.push(c[1]));return b} -has(a){x('has',arguments,1);a+='';return this.#d.some(b=>b[0]===a)} -forEach(a,b){x('forEach',arguments,1);for(var [c,d]of this)a.call(b,d,c,this)} -set(...a){x('set',arguments,2);var b=[],c=!0;a=f(...a);this.#d.forEach(d=>{d[0]===a[0]?c&&(c=!b.push(a)):b.push(d)});c&&b.push(a);this.#d=b} -*entries(){yield*this.#d} -*keys(){for(var[a]of this)yield a} -*values(){for(var[,a]of this)yield a}} - -/** @param {FormData} F */ -function formDataToBlob (F,B=fetch_blob__WEBPACK_IMPORTED_MODULE_0__/* ["default"] */ .Z){ -var b=`${r()}${r()}`.replace(/\./g, '').slice(-28).padStart(32, '-'),c=[],p=`--${b}\r\nContent-Disposition: form-data; name="` -F.forEach((v,n)=>typeof v=='string' -?c.push(p+e(n)+`"\r\n\r\n${v.replace(/\r(?!\n)|(? { - -"use strict"; -// ESM COMPAT FLAG -__nccwpck_require__.r(__webpack_exports__); - -// EXPORTS -__nccwpck_require__.d(__webpack_exports__, { - "AbortError": () => (/* reexport */ AbortError), - "Blob": () => (/* reexport */ from/* Blob */.t6), - "FetchError": () => (/* reexport */ FetchError), - "File": () => (/* reexport */ from/* File */.$B), - "FormData": () => (/* reexport */ esm_min/* FormData */.Ct), - "Headers": () => (/* reexport */ Headers), - "Request": () => (/* reexport */ Request), - "Response": () => (/* reexport */ Response), - "blobFrom": () => (/* reexport */ from/* blobFrom */.xB), - "blobFromSync": () => (/* reexport */ from/* blobFromSync */.SX), - "default": () => (/* binding */ fetch), - "fileFrom": () => (/* reexport */ from/* fileFrom */.e2), - "fileFromSync": () => (/* reexport */ from/* fileFromSync */.RA), - "isRedirect": () => (/* reexport */ isRedirect) -}); - -;// CONCATENATED MODULE: external "node:http" -const external_node_http_namespaceObject = require("node:http"); -;// CONCATENATED MODULE: external "node:https" -const external_node_https_namespaceObject = require("node:https"); -;// CONCATENATED MODULE: external "node:zlib" -const external_node_zlib_namespaceObject = require("node:zlib"); -;// CONCATENATED MODULE: external "node:stream" -const external_node_stream_namespaceObject = require("node:stream"); -;// CONCATENATED MODULE: external "node:buffer" -const external_node_buffer_namespaceObject = require("node:buffer"); -;// CONCATENATED MODULE: ./node_modules/data-uri-to-buffer/dist/index.js -/** - * Returns a `Buffer` instance from the given data URI `uri`. - * - * @param {String} uri Data URI to turn into a Buffer instance - * @returns {Buffer} Buffer instance from Data URI - * @api public - */ -function dataUriToBuffer(uri) { - if (!/^data:/i.test(uri)) { - throw new TypeError('`uri` does not appear to be a Data URI (must begin with "data:")'); - } - // strip newlines - uri = uri.replace(/\r?\n/g, ''); - // split the URI up into the "metadata" and the "data" portions - const firstComma = uri.indexOf(','); - if (firstComma === -1 || firstComma <= 4) { - throw new TypeError('malformed data: URI'); - } - // remove the "data:" scheme and parse the metadata - const meta = uri.substring(5, firstComma).split(';'); - let charset = ''; - let base64 = false; - const type = meta[0] || 'text/plain'; - let typeFull = type; - for (let i = 1; i < meta.length; i++) { - if (meta[i] === 'base64') { - base64 = true; - } - else { - typeFull += `;${meta[i]}`; - if (meta[i].indexOf('charset=') === 0) { - charset = meta[i].substring(8); - } - } - } - // defaults to US-ASCII only if type is not provided - if (!meta[0] && !charset.length) { - typeFull += ';charset=US-ASCII'; - charset = 'US-ASCII'; - } - // get the encoded data portion and decode URI-encoded chars - const encoding = base64 ? 'base64' : 'ascii'; - const data = unescape(uri.substring(firstComma + 1)); - const buffer = Buffer.from(data, encoding); - // set `.type` and `.typeFull` properties to MIME type - buffer.type = type; - buffer.typeFull = typeFull; - // set the `.charset` property - buffer.charset = charset; - return buffer; -} -/* harmony default export */ const dist = (dataUriToBuffer); -//# sourceMappingURL=index.js.map -;// CONCATENATED MODULE: external "node:util" -const external_node_util_namespaceObject = require("node:util"); -// EXTERNAL MODULE: ./node_modules/fetch-blob/index.js -var fetch_blob = __nccwpck_require__(1410); -// EXTERNAL MODULE: ./node_modules/formdata-polyfill/esm.min.js -var esm_min = __nccwpck_require__(8010); -;// CONCATENATED MODULE: ./node_modules/node-fetch/src/errors/base.js -class FetchBaseError extends Error { - constructor(message, type) { - super(message); - // Hide custom error implementation details from end-users - Error.captureStackTrace(this, this.constructor); - - this.type = type; - } - - get name() { - return this.constructor.name; - } - - get [Symbol.toStringTag]() { - return this.constructor.name; - } -} - -;// CONCATENATED MODULE: ./node_modules/node-fetch/src/errors/fetch-error.js - - - -/** - * @typedef {{ address?: string, code: string, dest?: string, errno: number, info?: object, message: string, path?: string, port?: number, syscall: string}} SystemError -*/ - -/** - * FetchError interface for operational errors - */ -class FetchError extends FetchBaseError { - /** - * @param {string} message - Error message for human - * @param {string} [type] - Error type for machine - * @param {SystemError} [systemError] - For Node.js system error - */ - constructor(message, type, systemError) { - super(message, type); - // When err.type is `system`, err.erroredSysCall contains system error and err.code contains system error code - if (systemError) { - // eslint-disable-next-line no-multi-assign - this.code = this.errno = systemError.code; - this.erroredSysCall = systemError.syscall; - } - } -} - -;// CONCATENATED MODULE: ./node_modules/node-fetch/src/utils/is.js -/** - * Is.js - * - * Object type checks. - */ - -const NAME = Symbol.toStringTag; - -/** - * Check if `obj` is a URLSearchParams object - * ref: https://github.com/node-fetch/node-fetch/issues/296#issuecomment-307598143 - * @param {*} object - Object to check for - * @return {boolean} - */ -const isURLSearchParameters = object => { - return ( - typeof object === 'object' && - typeof object.append === 'function' && - typeof object.delete === 'function' && - typeof object.get === 'function' && - typeof object.getAll === 'function' && - typeof object.has === 'function' && - typeof object.set === 'function' && - typeof object.sort === 'function' && - object[NAME] === 'URLSearchParams' - ); -}; - -/** - * Check if `object` is a W3C `Blob` object (which `File` inherits from) - * @param {*} object - Object to check for - * @return {boolean} - */ -const isBlob = object => { - return ( - object && - typeof object === 'object' && - typeof object.arrayBuffer === 'function' && - typeof object.type === 'string' && - typeof object.stream === 'function' && - typeof object.constructor === 'function' && - /^(Blob|File)$/.test(object[NAME]) - ); -}; - -/** - * Check if `obj` is an instance of AbortSignal. - * @param {*} object - Object to check for - * @return {boolean} - */ -const isAbortSignal = object => { - return ( - typeof object === 'object' && ( - object[NAME] === 'AbortSignal' || - object[NAME] === 'EventTarget' - ) - ); -}; - -/** - * isDomainOrSubdomain reports whether sub is a subdomain (or exact match) of - * the parent domain. - * - * Both domains must already be in canonical form. - * @param {string|URL} original - * @param {string|URL} destination - */ -const isDomainOrSubdomain = (destination, original) => { - const orig = new URL(original).hostname; - const dest = new URL(destination).hostname; - - return orig === dest || orig.endsWith(`.${dest}`); -}; - -/** - * isSameProtocol reports whether the two provided URLs use the same protocol. - * - * Both domains must already be in canonical form. - * @param {string|URL} original - * @param {string|URL} destination - */ -const isSameProtocol = (destination, original) => { - const orig = new URL(original).protocol; - const dest = new URL(destination).protocol; - - return orig === dest; -}; - -;// CONCATENATED MODULE: ./node_modules/node-fetch/src/body.js - -/** - * Body.js - * - * Body interface provides common methods for Request and Response - */ - - - - - - - - - - - - -const pipeline = (0,external_node_util_namespaceObject.promisify)(external_node_stream_namespaceObject.pipeline); -const INTERNALS = Symbol('Body internals'); - -/** - * Body mixin - * - * Ref: https://fetch.spec.whatwg.org/#body - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void - */ -class Body { - constructor(body, { - size = 0 - } = {}) { - let boundary = null; - - if (body === null) { - // Body is undefined or null - body = null; - } else if (isURLSearchParameters(body)) { - // Body is a URLSearchParams - body = external_node_buffer_namespaceObject.Buffer.from(body.toString()); - } else if (isBlob(body)) { - // Body is blob - } else if (external_node_buffer_namespaceObject.Buffer.isBuffer(body)) { - // Body is Buffer - } else if (external_node_util_namespaceObject.types.isAnyArrayBuffer(body)) { - // Body is ArrayBuffer - body = external_node_buffer_namespaceObject.Buffer.from(body); - } else if (ArrayBuffer.isView(body)) { - // Body is ArrayBufferView - body = external_node_buffer_namespaceObject.Buffer.from(body.buffer, body.byteOffset, body.byteLength); - } else if (body instanceof external_node_stream_namespaceObject) { - // Body is stream - } else if (body instanceof esm_min/* FormData */.Ct) { - // Body is FormData - body = (0,esm_min/* formDataToBlob */.au)(body); - boundary = body.type.split('=')[1]; - } else { - // None of the above - // coerce to string then buffer - body = external_node_buffer_namespaceObject.Buffer.from(String(body)); - } - - let stream = body; - - if (external_node_buffer_namespaceObject.Buffer.isBuffer(body)) { - stream = external_node_stream_namespaceObject.Readable.from(body); - } else if (isBlob(body)) { - stream = external_node_stream_namespaceObject.Readable.from(body.stream()); - } - - this[INTERNALS] = { - body, - stream, - boundary, - disturbed: false, - error: null - }; - this.size = size; - - if (body instanceof external_node_stream_namespaceObject) { - body.on('error', error_ => { - const error = error_ instanceof FetchBaseError ? - error_ : - new FetchError(`Invalid response body while trying to fetch ${this.url}: ${error_.message}`, 'system', error_); - this[INTERNALS].error = error; - }); - } - } - - get body() { - return this[INTERNALS].stream; - } - - get bodyUsed() { - return this[INTERNALS].disturbed; - } - - /** - * Decode response as ArrayBuffer - * - * @return Promise - */ - async arrayBuffer() { - const {buffer, byteOffset, byteLength} = await consumeBody(this); - return buffer.slice(byteOffset, byteOffset + byteLength); - } - - async formData() { - const ct = this.headers.get('content-type'); - - if (ct.startsWith('application/x-www-form-urlencoded')) { - const formData = new esm_min/* FormData */.Ct(); - const parameters = new URLSearchParams(await this.text()); - - for (const [name, value] of parameters) { - formData.append(name, value); - } - - return formData; - } - - const {toFormData} = await __nccwpck_require__.e(/* import() */ 37).then(__nccwpck_require__.bind(__nccwpck_require__, 4037)); - return toFormData(this.body, ct); - } - - /** - * Return raw response as Blob - * - * @return Promise - */ - async blob() { - const ct = (this.headers && this.headers.get('content-type')) || (this[INTERNALS].body && this[INTERNALS].body.type) || ''; - const buf = await this.arrayBuffer(); - - return new fetch_blob/* default */.Z([buf], { - type: ct - }); - } - - /** - * Decode response as json - * - * @return Promise - */ - async json() { - const text = await this.text(); - return JSON.parse(text); - } - - /** - * Decode response as text - * - * @return Promise - */ - async text() { - const buffer = await consumeBody(this); - return new TextDecoder().decode(buffer); - } - - /** - * Decode response as buffer (non-spec api) - * - * @return Promise - */ - buffer() { - return consumeBody(this); - } -} - -Body.prototype.buffer = (0,external_node_util_namespaceObject.deprecate)(Body.prototype.buffer, 'Please use \'response.arrayBuffer()\' instead of \'response.buffer()\'', 'node-fetch#buffer'); - -// In browsers, all properties are enumerable. -Object.defineProperties(Body.prototype, { - body: {enumerable: true}, - bodyUsed: {enumerable: true}, - arrayBuffer: {enumerable: true}, - blob: {enumerable: true}, - json: {enumerable: true}, - text: {enumerable: true}, - data: {get: (0,external_node_util_namespaceObject.deprecate)(() => {}, - 'data doesn\'t exist, use json(), text(), arrayBuffer(), or body instead', - 'https://github.com/node-fetch/node-fetch/issues/1000 (response)')} -}); - -/** - * Consume and convert an entire Body to a Buffer. - * - * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body - * - * @return Promise - */ -async function consumeBody(data) { - if (data[INTERNALS].disturbed) { - throw new TypeError(`body used already for: ${data.url}`); - } - - data[INTERNALS].disturbed = true; - - if (data[INTERNALS].error) { - throw data[INTERNALS].error; - } - - const {body} = data; - - // Body is null - if (body === null) { - return external_node_buffer_namespaceObject.Buffer.alloc(0); - } - - /* c8 ignore next 3 */ - if (!(body instanceof external_node_stream_namespaceObject)) { - return external_node_buffer_namespaceObject.Buffer.alloc(0); - } - - // Body is stream - // get ready to actually consume the body - const accum = []; - let accumBytes = 0; - - try { - for await (const chunk of body) { - if (data.size > 0 && accumBytes + chunk.length > data.size) { - const error = new FetchError(`content size at ${data.url} over limit: ${data.size}`, 'max-size'); - body.destroy(error); - throw error; - } - - accumBytes += chunk.length; - accum.push(chunk); - } - } catch (error) { - const error_ = error instanceof FetchBaseError ? error : new FetchError(`Invalid response body while trying to fetch ${data.url}: ${error.message}`, 'system', error); - throw error_; - } - - if (body.readableEnded === true || body._readableState.ended === true) { - try { - if (accum.every(c => typeof c === 'string')) { - return external_node_buffer_namespaceObject.Buffer.from(accum.join('')); - } - - return external_node_buffer_namespaceObject.Buffer.concat(accum, accumBytes); - } catch (error) { - throw new FetchError(`Could not create Buffer from response body for ${data.url}: ${error.message}`, 'system', error); - } - } else { - throw new FetchError(`Premature close of server response while trying to fetch ${data.url}`); - } -} - -/** - * Clone body given Res/Req instance - * - * @param Mixed instance Response or Request instance - * @param String highWaterMark highWaterMark for both PassThrough body streams - * @return Mixed - */ -const clone = (instance, highWaterMark) => { - let p1; - let p2; - let {body} = instance[INTERNALS]; - - // Don't allow cloning a used body - if (instance.bodyUsed) { - throw new Error('cannot clone body after it is used'); - } - - // Check that body is a stream and not form-data object - // note: we can't clone the form-data object without having it as a dependency - if ((body instanceof external_node_stream_namespaceObject) && (typeof body.getBoundary !== 'function')) { - // Tee instance body - p1 = new external_node_stream_namespaceObject.PassThrough({highWaterMark}); - p2 = new external_node_stream_namespaceObject.PassThrough({highWaterMark}); - body.pipe(p1); - body.pipe(p2); - // Set instance body to teed body and return the other teed body - instance[INTERNALS].stream = p1; - body = p2; - } - - return body; -}; - -const getNonSpecFormDataBoundary = (0,external_node_util_namespaceObject.deprecate)( - body => body.getBoundary(), - 'form-data doesn\'t follow the spec and requires special treatment. Use alternative package', - 'https://github.com/node-fetch/node-fetch/issues/1167' -); - -/** - * Performs the operation "extract a `Content-Type` value from |object|" as - * specified in the specification: - * https://fetch.spec.whatwg.org/#concept-bodyinit-extract - * - * This function assumes that instance.body is present. - * - * @param {any} body Any options.body input - * @returns {string | null} - */ -const extractContentType = (body, request) => { - // Body is null or undefined - if (body === null) { - return null; - } - - // Body is string - if (typeof body === 'string') { - return 'text/plain;charset=UTF-8'; - } - - // Body is a URLSearchParams - if (isURLSearchParameters(body)) { - return 'application/x-www-form-urlencoded;charset=UTF-8'; - } - - // Body is blob - if (isBlob(body)) { - return body.type || null; - } - - // Body is a Buffer (Buffer, ArrayBuffer or ArrayBufferView) - if (external_node_buffer_namespaceObject.Buffer.isBuffer(body) || external_node_util_namespaceObject.types.isAnyArrayBuffer(body) || ArrayBuffer.isView(body)) { - return null; - } - - if (body instanceof esm_min/* FormData */.Ct) { - return `multipart/form-data; boundary=${request[INTERNALS].boundary}`; - } - - // Detect form data input from form-data module - if (body && typeof body.getBoundary === 'function') { - return `multipart/form-data;boundary=${getNonSpecFormDataBoundary(body)}`; - } - - // Body is stream - can't really do much about this - if (body instanceof external_node_stream_namespaceObject) { - return null; - } - - // Body constructor defaults other things to string - return 'text/plain;charset=UTF-8'; -}; - -/** - * The Fetch Standard treats this as if "total bytes" is a property on the body. - * For us, we have to explicitly get it with a function. - * - * ref: https://fetch.spec.whatwg.org/#concept-body-total-bytes - * - * @param {any} obj.body Body object from the Body instance. - * @returns {number | null} - */ -const getTotalBytes = request => { - const {body} = request[INTERNALS]; - - // Body is null or undefined - if (body === null) { - return 0; - } - - // Body is Blob - if (isBlob(body)) { - return body.size; - } - - // Body is Buffer - if (external_node_buffer_namespaceObject.Buffer.isBuffer(body)) { - return body.length; - } - - // Detect form data input from form-data module - if (body && typeof body.getLengthSync === 'function') { - return body.hasKnownLength && body.hasKnownLength() ? body.getLengthSync() : null; - } - - // Body is stream - return null; -}; - -/** - * Write a Body to a Node.js WritableStream (e.g. http.Request) object. - * - * @param {Stream.Writable} dest The stream to write to. - * @param obj.body Body object from the Body instance. - * @returns {Promise} - */ -const writeToStream = async (dest, {body}) => { - if (body === null) { - // Body is null - dest.end(); - } else { - // Body is stream - await pipeline(body, dest); - } -}; - -;// CONCATENATED MODULE: ./node_modules/node-fetch/src/headers.js -/** - * Headers.js - * - * Headers class offers convenient helpers - */ - - - - -/* c8 ignore next 9 */ -const validateHeaderName = typeof external_node_http_namespaceObject.validateHeaderName === 'function' ? - external_node_http_namespaceObject.validateHeaderName : - name => { - if (!/^[\^`\-\w!#$%&'*+.|~]+$/.test(name)) { - const error = new TypeError(`Header name must be a valid HTTP token [${name}]`); - Object.defineProperty(error, 'code', {value: 'ERR_INVALID_HTTP_TOKEN'}); - throw error; - } - }; - -/* c8 ignore next 9 */ -const validateHeaderValue = typeof external_node_http_namespaceObject.validateHeaderValue === 'function' ? - external_node_http_namespaceObject.validateHeaderValue : - (name, value) => { - if (/[^\t\u0020-\u007E\u0080-\u00FF]/.test(value)) { - const error = new TypeError(`Invalid character in header content ["${name}"]`); - Object.defineProperty(error, 'code', {value: 'ERR_INVALID_CHAR'}); - throw error; - } - }; - -/** - * @typedef {Headers | Record | Iterable | Iterable>} HeadersInit - */ - -/** - * This Fetch API interface allows you to perform various actions on HTTP request and response headers. - * These actions include retrieving, setting, adding to, and removing. - * A Headers object has an associated header list, which is initially empty and consists of zero or more name and value pairs. - * You can add to this using methods like append() (see Examples.) - * In all methods of this interface, header names are matched by case-insensitive byte sequence. - * - */ -class Headers extends URLSearchParams { - /** - * Headers class - * - * @constructor - * @param {HeadersInit} [init] - Response headers - */ - constructor(init) { - // Validate and normalize init object in [name, value(s)][] - /** @type {string[][]} */ - let result = []; - if (init instanceof Headers) { - const raw = init.raw(); - for (const [name, values] of Object.entries(raw)) { - result.push(...values.map(value => [name, value])); - } - } else if (init == null) { // eslint-disable-line no-eq-null, eqeqeq - // No op - } else if (typeof init === 'object' && !external_node_util_namespaceObject.types.isBoxedPrimitive(init)) { - const method = init[Symbol.iterator]; - // eslint-disable-next-line no-eq-null, eqeqeq - if (method == null) { - // Record - result.push(...Object.entries(init)); - } else { - if (typeof method !== 'function') { - throw new TypeError('Header pairs must be iterable'); - } - - // Sequence> - // Note: per spec we have to first exhaust the lists then process them - result = [...init] - .map(pair => { - if ( - typeof pair !== 'object' || external_node_util_namespaceObject.types.isBoxedPrimitive(pair) - ) { - throw new TypeError('Each header pair must be an iterable object'); - } - - return [...pair]; - }).map(pair => { - if (pair.length !== 2) { - throw new TypeError('Each header pair must be a name/value tuple'); - } - - return [...pair]; - }); - } - } else { - throw new TypeError('Failed to construct \'Headers\': The provided value is not of type \'(sequence> or record)'); - } - - // Validate and lowercase - result = - result.length > 0 ? - result.map(([name, value]) => { - validateHeaderName(name); - validateHeaderValue(name, String(value)); - return [String(name).toLowerCase(), String(value)]; - }) : - undefined; - - super(result); - - // Returning a Proxy that will lowercase key names, validate parameters and sort keys - // eslint-disable-next-line no-constructor-return - return new Proxy(this, { - get(target, p, receiver) { - switch (p) { - case 'append': - case 'set': - return (name, value) => { - validateHeaderName(name); - validateHeaderValue(name, String(value)); - return URLSearchParams.prototype[p].call( - target, - String(name).toLowerCase(), - String(value) - ); - }; - - case 'delete': - case 'has': - case 'getAll': - return name => { - validateHeaderName(name); - return URLSearchParams.prototype[p].call( - target, - String(name).toLowerCase() - ); - }; - - case 'keys': - return () => { - target.sort(); - return new Set(URLSearchParams.prototype.keys.call(target)).keys(); - }; - - default: - return Reflect.get(target, p, receiver); - } - } - }); - /* c8 ignore next */ - } - - get [Symbol.toStringTag]() { - return this.constructor.name; - } - - toString() { - return Object.prototype.toString.call(this); - } - - get(name) { - const values = this.getAll(name); - if (values.length === 0) { - return null; - } - - let value = values.join(', '); - if (/^content-encoding$/i.test(name)) { - value = value.toLowerCase(); - } - - return value; - } - - forEach(callback, thisArg = undefined) { - for (const name of this.keys()) { - Reflect.apply(callback, thisArg, [this.get(name), name, this]); - } - } - - * values() { - for (const name of this.keys()) { - yield this.get(name); - } - } - - /** - * @type {() => IterableIterator<[string, string]>} - */ - * entries() { - for (const name of this.keys()) { - yield [name, this.get(name)]; - } - } - - [Symbol.iterator]() { - return this.entries(); - } - - /** - * Node-fetch non-spec method - * returning all headers and their values as array - * @returns {Record} - */ - raw() { - return [...this.keys()].reduce((result, key) => { - result[key] = this.getAll(key); - return result; - }, {}); - } - - /** - * For better console.log(headers) and also to convert Headers into Node.js Request compatible format - */ - [Symbol.for('nodejs.util.inspect.custom')]() { - return [...this.keys()].reduce((result, key) => { - const values = this.getAll(key); - // Http.request() only supports string as Host header. - // This hack makes specifying custom Host header possible. - if (key === 'host') { - result[key] = values[0]; - } else { - result[key] = values.length > 1 ? values : values[0]; - } - - return result; - }, {}); - } -} - -/** - * Re-shaping object for Web IDL tests - * Only need to do it for overridden methods - */ -Object.defineProperties( - Headers.prototype, - ['get', 'entries', 'forEach', 'values'].reduce((result, property) => { - result[property] = {enumerable: true}; - return result; - }, {}) -); - -/** - * Create a Headers object from an http.IncomingMessage.rawHeaders, ignoring those that do - * not conform to HTTP grammar productions. - * @param {import('http').IncomingMessage['rawHeaders']} headers - */ -function fromRawHeaders(headers = []) { - return new Headers( - headers - // Split into pairs - .reduce((result, value, index, array) => { - if (index % 2 === 0) { - result.push(array.slice(index, index + 2)); - } - - return result; - }, []) - .filter(([name, value]) => { - try { - validateHeaderName(name); - validateHeaderValue(name, String(value)); - return true; - } catch { - return false; - } - }) - - ); -} - -;// CONCATENATED MODULE: ./node_modules/node-fetch/src/utils/is-redirect.js -const redirectStatus = new Set([301, 302, 303, 307, 308]); - -/** - * Redirect code matching - * - * @param {number} code - Status code - * @return {boolean} - */ -const isRedirect = code => { - return redirectStatus.has(code); -}; - -;// CONCATENATED MODULE: ./node_modules/node-fetch/src/response.js -/** - * Response.js - * - * Response class provides content decoding - */ - - - - - -const response_INTERNALS = Symbol('Response internals'); - -/** - * Response class - * - * Ref: https://fetch.spec.whatwg.org/#response-class - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void - */ -class Response extends Body { - constructor(body = null, options = {}) { - super(body, options); - - // eslint-disable-next-line no-eq-null, eqeqeq, no-negated-condition - const status = options.status != null ? options.status : 200; - - const headers = new Headers(options.headers); - - if (body !== null && !headers.has('Content-Type')) { - const contentType = extractContentType(body, this); - if (contentType) { - headers.append('Content-Type', contentType); - } - } - - this[response_INTERNALS] = { - type: 'default', - url: options.url, - status, - statusText: options.statusText || '', - headers, - counter: options.counter, - highWaterMark: options.highWaterMark - }; - } - - get type() { - return this[response_INTERNALS].type; - } - - get url() { - return this[response_INTERNALS].url || ''; - } - - get status() { - return this[response_INTERNALS].status; - } - - /** - * Convenience property representing if the request ended normally - */ - get ok() { - return this[response_INTERNALS].status >= 200 && this[response_INTERNALS].status < 300; - } - - get redirected() { - return this[response_INTERNALS].counter > 0; - } - - get statusText() { - return this[response_INTERNALS].statusText; - } - - get headers() { - return this[response_INTERNALS].headers; - } - - get highWaterMark() { - return this[response_INTERNALS].highWaterMark; - } - - /** - * Clone this response - * - * @return Response - */ - clone() { - return new Response(clone(this, this.highWaterMark), { - type: this.type, - url: this.url, - status: this.status, - statusText: this.statusText, - headers: this.headers, - ok: this.ok, - redirected: this.redirected, - size: this.size, - highWaterMark: this.highWaterMark - }); - } - - /** - * @param {string} url The URL that the new response is to originate from. - * @param {number} status An optional status code for the response (e.g., 302.) - * @returns {Response} A Response object. - */ - static redirect(url, status = 302) { - if (!isRedirect(status)) { - throw new RangeError('Failed to execute "redirect" on "response": Invalid status code'); - } - - return new Response(null, { - headers: { - location: new URL(url).toString() - }, - status - }); - } - - static error() { - const response = new Response(null, {status: 0, statusText: ''}); - response[response_INTERNALS].type = 'error'; - return response; - } - - get [Symbol.toStringTag]() { - return 'Response'; - } -} - -Object.defineProperties(Response.prototype, { - type: {enumerable: true}, - url: {enumerable: true}, - status: {enumerable: true}, - ok: {enumerable: true}, - redirected: {enumerable: true}, - statusText: {enumerable: true}, - headers: {enumerable: true}, - clone: {enumerable: true} -}); - -;// CONCATENATED MODULE: external "node:url" -const external_node_url_namespaceObject = require("node:url"); -;// CONCATENATED MODULE: ./node_modules/node-fetch/src/utils/get-search.js -const getSearch = parsedURL => { - if (parsedURL.search) { - return parsedURL.search; - } - - const lastOffset = parsedURL.href.length - 1; - const hash = parsedURL.hash || (parsedURL.href[lastOffset] === '#' ? '#' : ''); - return parsedURL.href[lastOffset - hash.length] === '?' ? '?' : ''; -}; - -;// CONCATENATED MODULE: external "node:net" -const external_node_net_namespaceObject = require("node:net"); -;// CONCATENATED MODULE: ./node_modules/node-fetch/src/utils/referrer.js - - -/** - * @external URL - * @see {@link https://developer.mozilla.org/en-US/docs/Web/API/URL|URL} - */ - -/** - * @module utils/referrer - * @private - */ - -/** - * @see {@link https://w3c.github.io/webappsec-referrer-policy/#strip-url|Referrer Policy §8.4. Strip url for use as a referrer} - * @param {string} URL - * @param {boolean} [originOnly=false] - */ -function stripURLForUseAsAReferrer(url, originOnly = false) { - // 1. If url is null, return no referrer. - if (url == null) { // eslint-disable-line no-eq-null, eqeqeq - return 'no-referrer'; - } - - url = new URL(url); - - // 2. If url's scheme is a local scheme, then return no referrer. - if (/^(about|blob|data):$/.test(url.protocol)) { - return 'no-referrer'; - } - - // 3. Set url's username to the empty string. - url.username = ''; - - // 4. Set url's password to null. - // Note: `null` appears to be a mistake as this actually results in the password being `"null"`. - url.password = ''; - - // 5. Set url's fragment to null. - // Note: `null` appears to be a mistake as this actually results in the fragment being `"#null"`. - url.hash = ''; - - // 6. If the origin-only flag is true, then: - if (originOnly) { - // 6.1. Set url's path to null. - // Note: `null` appears to be a mistake as this actually results in the path being `"/null"`. - url.pathname = ''; - - // 6.2. Set url's query to null. - // Note: `null` appears to be a mistake as this actually results in the query being `"?null"`. - url.search = ''; - } - - // 7. Return url. - return url; -} - -/** - * @see {@link https://w3c.github.io/webappsec-referrer-policy/#enumdef-referrerpolicy|enum ReferrerPolicy} - */ -const ReferrerPolicy = new Set([ - '', - 'no-referrer', - 'no-referrer-when-downgrade', - 'same-origin', - 'origin', - 'strict-origin', - 'origin-when-cross-origin', - 'strict-origin-when-cross-origin', - 'unsafe-url' -]); - -/** - * @see {@link https://w3c.github.io/webappsec-referrer-policy/#default-referrer-policy|default referrer policy} - */ -const DEFAULT_REFERRER_POLICY = 'strict-origin-when-cross-origin'; - -/** - * @see {@link https://w3c.github.io/webappsec-referrer-policy/#referrer-policies|Referrer Policy §3. Referrer Policies} - * @param {string} referrerPolicy - * @returns {string} referrerPolicy - */ -function validateReferrerPolicy(referrerPolicy) { - if (!ReferrerPolicy.has(referrerPolicy)) { - throw new TypeError(`Invalid referrerPolicy: ${referrerPolicy}`); - } - - return referrerPolicy; -} - -/** - * @see {@link https://w3c.github.io/webappsec-secure-contexts/#is-origin-trustworthy|Referrer Policy §3.2. Is origin potentially trustworthy?} - * @param {external:URL} url - * @returns `true`: "Potentially Trustworthy", `false`: "Not Trustworthy" - */ -function isOriginPotentiallyTrustworthy(url) { - // 1. If origin is an opaque origin, return "Not Trustworthy". - // Not applicable - - // 2. Assert: origin is a tuple origin. - // Not for implementations - - // 3. If origin's scheme is either "https" or "wss", return "Potentially Trustworthy". - if (/^(http|ws)s:$/.test(url.protocol)) { - return true; - } - - // 4. If origin's host component matches one of the CIDR notations 127.0.0.0/8 or ::1/128 [RFC4632], return "Potentially Trustworthy". - const hostIp = url.host.replace(/(^\[)|(]$)/g, ''); - const hostIPVersion = (0,external_node_net_namespaceObject.isIP)(hostIp); - - if (hostIPVersion === 4 && /^127\./.test(hostIp)) { - return true; - } - - if (hostIPVersion === 6 && /^(((0+:){7})|(::(0+:){0,6}))0*1$/.test(hostIp)) { - return true; - } - - // 5. If origin's host component is "localhost" or falls within ".localhost", and the user agent conforms to the name resolution rules in [let-localhost-be-localhost], return "Potentially Trustworthy". - // We are returning FALSE here because we cannot ensure conformance to - // let-localhost-be-loalhost (https://tools.ietf.org/html/draft-west-let-localhost-be-localhost) - if (url.host === 'localhost' || url.host.endsWith('.localhost')) { - return false; - } - - // 6. If origin's scheme component is file, return "Potentially Trustworthy". - if (url.protocol === 'file:') { - return true; - } - - // 7. If origin's scheme component is one which the user agent considers to be authenticated, return "Potentially Trustworthy". - // Not supported - - // 8. If origin has been configured as a trustworthy origin, return "Potentially Trustworthy". - // Not supported - - // 9. Return "Not Trustworthy". - return false; -} - -/** - * @see {@link https://w3c.github.io/webappsec-secure-contexts/#is-url-trustworthy|Referrer Policy §3.3. Is url potentially trustworthy?} - * @param {external:URL} url - * @returns `true`: "Potentially Trustworthy", `false`: "Not Trustworthy" - */ -function isUrlPotentiallyTrustworthy(url) { - // 1. If url is "about:blank" or "about:srcdoc", return "Potentially Trustworthy". - if (/^about:(blank|srcdoc)$/.test(url)) { - return true; - } - - // 2. If url's scheme is "data", return "Potentially Trustworthy". - if (url.protocol === 'data:') { - return true; - } - - // Note: The origin of blob: and filesystem: URLs is the origin of the context in which they were - // created. Therefore, blobs created in a trustworthy origin will themselves be potentially - // trustworthy. - if (/^(blob|filesystem):$/.test(url.protocol)) { - return true; - } - - // 3. Return the result of executing §3.2 Is origin potentially trustworthy? on url's origin. - return isOriginPotentiallyTrustworthy(url); -} - -/** - * Modifies the referrerURL to enforce any extra security policy considerations. - * @see {@link https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer|Referrer Policy §8.3. Determine request's Referrer}, step 7 - * @callback module:utils/referrer~referrerURLCallback - * @param {external:URL} referrerURL - * @returns {external:URL} modified referrerURL - */ - -/** - * Modifies the referrerOrigin to enforce any extra security policy considerations. - * @see {@link https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer|Referrer Policy §8.3. Determine request's Referrer}, step 7 - * @callback module:utils/referrer~referrerOriginCallback - * @param {external:URL} referrerOrigin - * @returns {external:URL} modified referrerOrigin - */ - -/** - * @see {@link https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer|Referrer Policy §8.3. Determine request's Referrer} - * @param {Request} request - * @param {object} o - * @param {module:utils/referrer~referrerURLCallback} o.referrerURLCallback - * @param {module:utils/referrer~referrerOriginCallback} o.referrerOriginCallback - * @returns {external:URL} Request's referrer - */ -function determineRequestsReferrer(request, {referrerURLCallback, referrerOriginCallback} = {}) { - // There are 2 notes in the specification about invalid pre-conditions. We return null, here, for - // these cases: - // > Note: If request's referrer is "no-referrer", Fetch will not call into this algorithm. - // > Note: If request's referrer policy is the empty string, Fetch will not call into this - // > algorithm. - if (request.referrer === 'no-referrer' || request.referrerPolicy === '') { - return null; - } - - // 1. Let policy be request's associated referrer policy. - const policy = request.referrerPolicy; - - // 2. Let environment be request's client. - // not applicable to node.js - - // 3. Switch on request's referrer: - if (request.referrer === 'about:client') { - return 'no-referrer'; - } - - // "a URL": Let referrerSource be request's referrer. - const referrerSource = request.referrer; - - // 4. Let request's referrerURL be the result of stripping referrerSource for use as a referrer. - let referrerURL = stripURLForUseAsAReferrer(referrerSource); - - // 5. Let referrerOrigin be the result of stripping referrerSource for use as a referrer, with the - // origin-only flag set to true. - let referrerOrigin = stripURLForUseAsAReferrer(referrerSource, true); - - // 6. If the result of serializing referrerURL is a string whose length is greater than 4096, set - // referrerURL to referrerOrigin. - if (referrerURL.toString().length > 4096) { - referrerURL = referrerOrigin; - } - - // 7. The user agent MAY alter referrerURL or referrerOrigin at this point to enforce arbitrary - // policy considerations in the interests of minimizing data leakage. For example, the user - // agent could strip the URL down to an origin, modify its host, replace it with an empty - // string, etc. - if (referrerURLCallback) { - referrerURL = referrerURLCallback(referrerURL); - } - - if (referrerOriginCallback) { - referrerOrigin = referrerOriginCallback(referrerOrigin); - } - - // 8.Execute the statements corresponding to the value of policy: - const currentURL = new URL(request.url); - - switch (policy) { - case 'no-referrer': - return 'no-referrer'; - - case 'origin': - return referrerOrigin; - - case 'unsafe-url': - return referrerURL; - - case 'strict-origin': - // 1. If referrerURL is a potentially trustworthy URL and request's current URL is not a - // potentially trustworthy URL, then return no referrer. - if (isUrlPotentiallyTrustworthy(referrerURL) && !isUrlPotentiallyTrustworthy(currentURL)) { - return 'no-referrer'; - } - - // 2. Return referrerOrigin. - return referrerOrigin.toString(); - - case 'strict-origin-when-cross-origin': - // 1. If the origin of referrerURL and the origin of request's current URL are the same, then - // return referrerURL. - if (referrerURL.origin === currentURL.origin) { - return referrerURL; - } - - // 2. If referrerURL is a potentially trustworthy URL and request's current URL is not a - // potentially trustworthy URL, then return no referrer. - if (isUrlPotentiallyTrustworthy(referrerURL) && !isUrlPotentiallyTrustworthy(currentURL)) { - return 'no-referrer'; - } - - // 3. Return referrerOrigin. - return referrerOrigin; - - case 'same-origin': - // 1. If the origin of referrerURL and the origin of request's current URL are the same, then - // return referrerURL. - if (referrerURL.origin === currentURL.origin) { - return referrerURL; - } - - // 2. Return no referrer. - return 'no-referrer'; - - case 'origin-when-cross-origin': - // 1. If the origin of referrerURL and the origin of request's current URL are the same, then - // return referrerURL. - if (referrerURL.origin === currentURL.origin) { - return referrerURL; - } - - // Return referrerOrigin. - return referrerOrigin; - - case 'no-referrer-when-downgrade': - // 1. If referrerURL is a potentially trustworthy URL and request's current URL is not a - // potentially trustworthy URL, then return no referrer. - if (isUrlPotentiallyTrustworthy(referrerURL) && !isUrlPotentiallyTrustworthy(currentURL)) { - return 'no-referrer'; - } - - // 2. Return referrerURL. - return referrerURL; - - default: - throw new TypeError(`Invalid referrerPolicy: ${policy}`); - } -} - -/** - * @see {@link https://w3c.github.io/webappsec-referrer-policy/#parse-referrer-policy-from-header|Referrer Policy §8.1. Parse a referrer policy from a Referrer-Policy header} - * @param {Headers} headers Response headers - * @returns {string} policy - */ -function parseReferrerPolicyFromHeader(headers) { - // 1. Let policy-tokens be the result of extracting header list values given `Referrer-Policy` - // and response’s header list. - const policyTokens = (headers.get('referrer-policy') || '').split(/[,\s]+/); - - // 2. Let policy be the empty string. - let policy = ''; - - // 3. For each token in policy-tokens, if token is a referrer policy and token is not the empty - // string, then set policy to token. - // Note: This algorithm loops over multiple policy values to allow deployment of new policy - // values with fallbacks for older user agents, as described in § 11.1 Unknown Policy Values. - for (const token of policyTokens) { - if (token && ReferrerPolicy.has(token)) { - policy = token; - } - } - - // 4. Return policy. - return policy; -} - -;// CONCATENATED MODULE: ./node_modules/node-fetch/src/request.js -/** - * Request.js - * - * Request class contains server only options - * - * All spec algorithm step numbers are based on https://fetch.spec.whatwg.org/commit-snapshots/ae716822cb3a61843226cd090eefc6589446c1d2/. - */ - - - - - - - - - -const request_INTERNALS = Symbol('Request internals'); - -/** - * Check if `obj` is an instance of Request. - * - * @param {*} object - * @return {boolean} - */ -const isRequest = object => { - return ( - typeof object === 'object' && - typeof object[request_INTERNALS] === 'object' - ); -}; - -const doBadDataWarn = (0,external_node_util_namespaceObject.deprecate)(() => {}, - '.data is not a valid RequestInit property, use .body instead', - 'https://github.com/node-fetch/node-fetch/issues/1000 (request)'); - -/** - * Request class - * - * Ref: https://fetch.spec.whatwg.org/#request-class - * - * @param Mixed input Url or Request instance - * @param Object init Custom options - * @return Void - */ -class Request extends Body { - constructor(input, init = {}) { - let parsedURL; - - // Normalize input and force URL to be encoded as UTF-8 (https://github.com/node-fetch/node-fetch/issues/245) - if (isRequest(input)) { - parsedURL = new URL(input.url); - } else { - parsedURL = new URL(input); - input = {}; - } - - if (parsedURL.username !== '' || parsedURL.password !== '') { - throw new TypeError(`${parsedURL} is an url with embedded credentials.`); - } - - let method = init.method || input.method || 'GET'; - if (/^(delete|get|head|options|post|put)$/i.test(method)) { - method = method.toUpperCase(); - } - - if (!isRequest(init) && 'data' in init) { - doBadDataWarn(); - } - - // eslint-disable-next-line no-eq-null, eqeqeq - if ((init.body != null || (isRequest(input) && input.body !== null)) && - (method === 'GET' || method === 'HEAD')) { - throw new TypeError('Request with GET/HEAD method cannot have body'); - } - - const inputBody = init.body ? - init.body : - (isRequest(input) && input.body !== null ? - clone(input) : - null); - - super(inputBody, { - size: init.size || input.size || 0 - }); - - const headers = new Headers(init.headers || input.headers || {}); - - if (inputBody !== null && !headers.has('Content-Type')) { - const contentType = extractContentType(inputBody, this); - if (contentType) { - headers.set('Content-Type', contentType); - } - } - - let signal = isRequest(input) ? - input.signal : - null; - if ('signal' in init) { - signal = init.signal; - } - - // eslint-disable-next-line no-eq-null, eqeqeq - if (signal != null && !isAbortSignal(signal)) { - throw new TypeError('Expected signal to be an instanceof AbortSignal or EventTarget'); - } - - // §5.4, Request constructor steps, step 15.1 - // eslint-disable-next-line no-eq-null, eqeqeq - let referrer = init.referrer == null ? input.referrer : init.referrer; - if (referrer === '') { - // §5.4, Request constructor steps, step 15.2 - referrer = 'no-referrer'; - } else if (referrer) { - // §5.4, Request constructor steps, step 15.3.1, 15.3.2 - const parsedReferrer = new URL(referrer); - // §5.4, Request constructor steps, step 15.3.3, 15.3.4 - referrer = /^about:(\/\/)?client$/.test(parsedReferrer) ? 'client' : parsedReferrer; - } else { - referrer = undefined; - } - - this[request_INTERNALS] = { - method, - redirect: init.redirect || input.redirect || 'follow', - headers, - parsedURL, - signal, - referrer - }; - - // Node-fetch-only options - this.follow = init.follow === undefined ? (input.follow === undefined ? 20 : input.follow) : init.follow; - this.compress = init.compress === undefined ? (input.compress === undefined ? true : input.compress) : init.compress; - this.counter = init.counter || input.counter || 0; - this.agent = init.agent || input.agent; - this.highWaterMark = init.highWaterMark || input.highWaterMark || 16384; - this.insecureHTTPParser = init.insecureHTTPParser || input.insecureHTTPParser || false; - - // §5.4, Request constructor steps, step 16. - // Default is empty string per https://fetch.spec.whatwg.org/#concept-request-referrer-policy - this.referrerPolicy = init.referrerPolicy || input.referrerPolicy || ''; - } - - /** @returns {string} */ - get method() { - return this[request_INTERNALS].method; - } - - /** @returns {string} */ - get url() { - return (0,external_node_url_namespaceObject.format)(this[request_INTERNALS].parsedURL); - } - - /** @returns {Headers} */ - get headers() { - return this[request_INTERNALS].headers; - } - - get redirect() { - return this[request_INTERNALS].redirect; - } - - /** @returns {AbortSignal} */ - get signal() { - return this[request_INTERNALS].signal; - } - - // https://fetch.spec.whatwg.org/#dom-request-referrer - get referrer() { - if (this[request_INTERNALS].referrer === 'no-referrer') { - return ''; - } - - if (this[request_INTERNALS].referrer === 'client') { - return 'about:client'; - } - - if (this[request_INTERNALS].referrer) { - return this[request_INTERNALS].referrer.toString(); - } - - return undefined; - } - - get referrerPolicy() { - return this[request_INTERNALS].referrerPolicy; - } - - set referrerPolicy(referrerPolicy) { - this[request_INTERNALS].referrerPolicy = validateReferrerPolicy(referrerPolicy); - } - - /** - * Clone this request - * - * @return Request - */ - clone() { - return new Request(this); - } - - get [Symbol.toStringTag]() { - return 'Request'; - } -} - -Object.defineProperties(Request.prototype, { - method: {enumerable: true}, - url: {enumerable: true}, - headers: {enumerable: true}, - redirect: {enumerable: true}, - clone: {enumerable: true}, - signal: {enumerable: true}, - referrer: {enumerable: true}, - referrerPolicy: {enumerable: true} -}); - -/** - * Convert a Request to Node.js http request options. - * - * @param {Request} request - A Request instance - * @return The options object to be passed to http.request - */ -const getNodeRequestOptions = request => { - const {parsedURL} = request[request_INTERNALS]; - const headers = new Headers(request[request_INTERNALS].headers); - - // Fetch step 1.3 - if (!headers.has('Accept')) { - headers.set('Accept', '*/*'); - } - - // HTTP-network-or-cache fetch steps 2.4-2.7 - let contentLengthValue = null; - if (request.body === null && /^(post|put)$/i.test(request.method)) { - contentLengthValue = '0'; - } - - if (request.body !== null) { - const totalBytes = getTotalBytes(request); - // Set Content-Length if totalBytes is a number (that is not NaN) - if (typeof totalBytes === 'number' && !Number.isNaN(totalBytes)) { - contentLengthValue = String(totalBytes); - } - } - - if (contentLengthValue) { - headers.set('Content-Length', contentLengthValue); - } - - // 4.1. Main fetch, step 2.6 - // > If request's referrer policy is the empty string, then set request's referrer policy to the - // > default referrer policy. - if (request.referrerPolicy === '') { - request.referrerPolicy = DEFAULT_REFERRER_POLICY; - } - - // 4.1. Main fetch, step 2.7 - // > If request's referrer is not "no-referrer", set request's referrer to the result of invoking - // > determine request's referrer. - if (request.referrer && request.referrer !== 'no-referrer') { - request[request_INTERNALS].referrer = determineRequestsReferrer(request); - } else { - request[request_INTERNALS].referrer = 'no-referrer'; - } - - // 4.5. HTTP-network-or-cache fetch, step 6.9 - // > If httpRequest's referrer is a URL, then append `Referer`/httpRequest's referrer, serialized - // > and isomorphic encoded, to httpRequest's header list. - if (request[request_INTERNALS].referrer instanceof URL) { - headers.set('Referer', request.referrer); - } - - // HTTP-network-or-cache fetch step 2.11 - if (!headers.has('User-Agent')) { - headers.set('User-Agent', 'node-fetch'); - } - - // HTTP-network-or-cache fetch step 2.15 - if (request.compress && !headers.has('Accept-Encoding')) { - headers.set('Accept-Encoding', 'gzip, deflate, br'); - } - - let {agent} = request; - if (typeof agent === 'function') { - agent = agent(parsedURL); - } - - if (!headers.has('Connection') && !agent) { - headers.set('Connection', 'close'); - } - - // HTTP-network fetch step 4.2 - // chunked encoding is handled by Node.js - - const search = getSearch(parsedURL); - - // Pass the full URL directly to request(), but overwrite the following - // options: - const options = { - // Overwrite search to retain trailing ? (issue #776) - path: parsedURL.pathname + search, - // The following options are not expressed in the URL - method: request.method, - headers: headers[Symbol.for('nodejs.util.inspect.custom')](), - insecureHTTPParser: request.insecureHTTPParser, - agent - }; - - return { - /** @type {URL} */ - parsedURL, - options - }; -}; - -;// CONCATENATED MODULE: ./node_modules/node-fetch/src/errors/abort-error.js - - -/** - * AbortError interface for cancelled requests - */ -class AbortError extends FetchBaseError { - constructor(message, type = 'aborted') { - super(message, type); - } -} - -// EXTERNAL MODULE: ./node_modules/fetch-blob/from.js + 2 modules -var from = __nccwpck_require__(2777); -;// CONCATENATED MODULE: ./node_modules/node-fetch/src/index.js -/** - * Index.js - * - * a request API compatible with window.fetch - * - * All spec algorithm step numbers are based on https://fetch.spec.whatwg.org/commit-snapshots/ae716822cb3a61843226cd090eefc6589446c1d2/. - */ - - - - - - - - - - - - - - - - - - - - - - - - -const supportedSchemas = new Set(['data:', 'http:', 'https:']); - -/** - * Fetch function - * - * @param {string | URL | import('./request').default} url - Absolute url or Request instance - * @param {*} [options_] - Fetch options - * @return {Promise} - */ -async function fetch(url, options_) { - return new Promise((resolve, reject) => { - // Build request object - const request = new Request(url, options_); - const {parsedURL, options} = getNodeRequestOptions(request); - if (!supportedSchemas.has(parsedURL.protocol)) { - throw new TypeError(`node-fetch cannot load ${url}. URL scheme "${parsedURL.protocol.replace(/:$/, '')}" is not supported.`); - } - - if (parsedURL.protocol === 'data:') { - const data = dist(request.url); - const response = new Response(data, {headers: {'Content-Type': data.typeFull}}); - resolve(response); - return; - } - - // Wrap http.request into fetch - const send = (parsedURL.protocol === 'https:' ? external_node_https_namespaceObject : external_node_http_namespaceObject).request; - const {signal} = request; - let response = null; - - const abort = () => { - const error = new AbortError('The operation was aborted.'); - reject(error); - if (request.body && request.body instanceof external_node_stream_namespaceObject.Readable) { - request.body.destroy(error); - } - - if (!response || !response.body) { - return; - } - - response.body.emit('error', error); - }; - - if (signal && signal.aborted) { - abort(); - return; - } - - const abortAndFinalize = () => { - abort(); - finalize(); - }; - - // Send request - const request_ = send(parsedURL.toString(), options); - - if (signal) { - signal.addEventListener('abort', abortAndFinalize); - } - - const finalize = () => { - request_.abort(); - if (signal) { - signal.removeEventListener('abort', abortAndFinalize); - } - }; - - request_.on('error', error => { - reject(new FetchError(`request to ${request.url} failed, reason: ${error.message}`, 'system', error)); - finalize(); - }); - - fixResponseChunkedTransferBadEnding(request_, error => { - if (response && response.body) { - response.body.destroy(error); - } - }); - - /* c8 ignore next 18 */ - if (process.version < 'v14') { - // Before Node.js 14, pipeline() does not fully support async iterators and does not always - // properly handle when the socket close/end events are out of order. - request_.on('socket', s => { - let endedWithEventsCount; - s.prependListener('end', () => { - endedWithEventsCount = s._eventsCount; - }); - s.prependListener('close', hadError => { - // if end happened before close but the socket didn't emit an error, do it now - if (response && endedWithEventsCount < s._eventsCount && !hadError) { - const error = new Error('Premature close'); - error.code = 'ERR_STREAM_PREMATURE_CLOSE'; - response.body.emit('error', error); - } - }); - }); - } - - request_.on('response', response_ => { - request_.setTimeout(0); - const headers = fromRawHeaders(response_.rawHeaders); - - // HTTP fetch step 5 - if (isRedirect(response_.statusCode)) { - // HTTP fetch step 5.2 - const location = headers.get('Location'); - - // HTTP fetch step 5.3 - let locationURL = null; - try { - locationURL = location === null ? null : new URL(location, request.url); - } catch { - // error here can only be invalid URL in Location: header - // do not throw when options.redirect == manual - // let the user extract the errorneous redirect URL - if (request.redirect !== 'manual') { - reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); - finalize(); - return; - } - } - - // HTTP fetch step 5.5 - switch (request.redirect) { - case 'error': - reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); - finalize(); - return; - case 'manual': - // Nothing to do - break; - case 'follow': { - // HTTP-redirect fetch step 2 - if (locationURL === null) { - break; - } - - // HTTP-redirect fetch step 5 - if (request.counter >= request.follow) { - reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); - finalize(); - return; - } - - // HTTP-redirect fetch step 6 (counter increment) - // Create a new Request object. - const requestOptions = { - headers: new Headers(request.headers), - follow: request.follow, - counter: request.counter + 1, - agent: request.agent, - compress: request.compress, - method: request.method, - body: clone(request), - signal: request.signal, - size: request.size, - referrer: request.referrer, - referrerPolicy: request.referrerPolicy - }; - - // when forwarding sensitive headers like "Authorization", - // "WWW-Authenticate", and "Cookie" to untrusted targets, - // headers will be ignored when following a redirect to a domain - // that is not a subdomain match or exact match of the initial domain. - // For example, a redirect from "foo.com" to either "foo.com" or "sub.foo.com" - // will forward the sensitive headers, but a redirect to "bar.com" will not. - // headers will also be ignored when following a redirect to a domain using - // a different protocol. For example, a redirect from "https://foo.com" to "http://foo.com" - // will not forward the sensitive headers - if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) { - for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { - requestOptions.headers.delete(name); - } - } - - // HTTP-redirect fetch step 9 - if (response_.statusCode !== 303 && request.body && options_.body instanceof external_node_stream_namespaceObject.Readable) { - reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); - finalize(); - return; - } - - // HTTP-redirect fetch step 11 - if (response_.statusCode === 303 || ((response_.statusCode === 301 || response_.statusCode === 302) && request.method === 'POST')) { - requestOptions.method = 'GET'; - requestOptions.body = undefined; - requestOptions.headers.delete('content-length'); - } - - // HTTP-redirect fetch step 14 - const responseReferrerPolicy = parseReferrerPolicyFromHeader(headers); - if (responseReferrerPolicy) { - requestOptions.referrerPolicy = responseReferrerPolicy; - } - - // HTTP-redirect fetch step 15 - resolve(fetch(new Request(locationURL, requestOptions))); - finalize(); - return; - } - - default: - return reject(new TypeError(`Redirect option '${request.redirect}' is not a valid value of RequestRedirect`)); - } - } - - // Prepare response - if (signal) { - response_.once('end', () => { - signal.removeEventListener('abort', abortAndFinalize); - }); - } - - let body = (0,external_node_stream_namespaceObject.pipeline)(response_, new external_node_stream_namespaceObject.PassThrough(), error => { - if (error) { - reject(error); - } - }); - // see https://github.com/nodejs/node/pull/29376 - /* c8 ignore next 3 */ - if (process.version < 'v12.10') { - response_.on('aborted', abortAndFinalize); - } - - const responseOptions = { - url: request.url, - status: response_.statusCode, - statusText: response_.statusMessage, - headers, - size: request.size, - counter: request.counter, - highWaterMark: request.highWaterMark - }; - - // HTTP-network fetch step 12.1.1.3 - const codings = headers.get('Content-Encoding'); - - // HTTP-network fetch step 12.1.1.4: handle content codings - - // in following scenarios we ignore compression support - // 1. compression support is disabled - // 2. HEAD request - // 3. no Content-Encoding header - // 4. no content response (204) - // 5. content not modified response (304) - if (!request.compress || request.method === 'HEAD' || codings === null || response_.statusCode === 204 || response_.statusCode === 304) { - response = new Response(body, responseOptions); - resolve(response); - return; - } - - // For Node v6+ - // Be less strict when decoding compressed responses, since sometimes - // servers send slightly invalid responses that are still accepted - // by common browsers. - // Always using Z_SYNC_FLUSH is what cURL does. - const zlibOptions = { - flush: external_node_zlib_namespaceObject.Z_SYNC_FLUSH, - finishFlush: external_node_zlib_namespaceObject.Z_SYNC_FLUSH - }; - - // For gzip - if (codings === 'gzip' || codings === 'x-gzip') { - body = (0,external_node_stream_namespaceObject.pipeline)(body, external_node_zlib_namespaceObject.createGunzip(zlibOptions), error => { - if (error) { - reject(error); - } - }); - response = new Response(body, responseOptions); - resolve(response); - return; - } - - // For deflate - if (codings === 'deflate' || codings === 'x-deflate') { - // Handle the infamous raw deflate response from old servers - // a hack for old IIS and Apache servers - const raw = (0,external_node_stream_namespaceObject.pipeline)(response_, new external_node_stream_namespaceObject.PassThrough(), error => { - if (error) { - reject(error); - } - }); - raw.once('data', chunk => { - // See http://stackoverflow.com/questions/37519828 - if ((chunk[0] & 0x0F) === 0x08) { - body = (0,external_node_stream_namespaceObject.pipeline)(body, external_node_zlib_namespaceObject.createInflate(), error => { - if (error) { - reject(error); - } - }); - } else { - body = (0,external_node_stream_namespaceObject.pipeline)(body, external_node_zlib_namespaceObject.createInflateRaw(), error => { - if (error) { - reject(error); - } - }); - } - - response = new Response(body, responseOptions); - resolve(response); - }); - raw.once('end', () => { - // Some old IIS servers return zero-length OK deflate responses, so - // 'data' is never emitted. See https://github.com/node-fetch/node-fetch/pull/903 - if (!response) { - response = new Response(body, responseOptions); - resolve(response); - } - }); - return; - } - - // For br - if (codings === 'br') { - body = (0,external_node_stream_namespaceObject.pipeline)(body, external_node_zlib_namespaceObject.createBrotliDecompress(), error => { - if (error) { - reject(error); - } - }); - response = new Response(body, responseOptions); - resolve(response); - return; - } - - // Otherwise, use response as-is - response = new Response(body, responseOptions); - resolve(response); - }); +/***/ }), - // eslint-disable-next-line promise/prefer-await-to-then - writeToStream(request_, request).catch(reject); - }); -} +/***/ 2037: +/***/ ((module) => { -function fixResponseChunkedTransferBadEnding(request, errorCallback) { - const LAST_CHUNK = external_node_buffer_namespaceObject.Buffer.from('0\r\n\r\n'); +"use strict"; +module.exports = require("os"); - let isChunkedTransfer = false; - let properLastChunkReceived = false; - let previousChunk; +/***/ }), - request.on('response', response => { - const {headers} = response; - isChunkedTransfer = headers['transfer-encoding'] === 'chunked' && !headers['content-length']; - }); +/***/ 1017: +/***/ ((module) => { - request.on('socket', socket => { - const onSocketClose = () => { - if (isChunkedTransfer && !properLastChunkReceived) { - const error = new Error('Premature close'); - error.code = 'ERR_STREAM_PREMATURE_CLOSE'; - errorCallback(error); - } - }; +"use strict"; +module.exports = require("path"); - const onData = buf => { - properLastChunkReceived = external_node_buffer_namespaceObject.Buffer.compare(buf.slice(-5), LAST_CHUNK) === 0; +/***/ }), - // Sometimes final 0-length chunk and end of message code are in separate packets - if (!properLastChunkReceived && previousChunk) { - properLastChunkReceived = ( - external_node_buffer_namespaceObject.Buffer.compare(previousChunk.slice(-3), LAST_CHUNK.slice(0, 3)) === 0 && - external_node_buffer_namespaceObject.Buffer.compare(buf.slice(-2), LAST_CHUNK.slice(3)) === 0 - ); - } +/***/ 4404: +/***/ ((module) => { - previousChunk = buf; - }; +"use strict"; +module.exports = require("tls"); - socket.prependListener('close', onSocketClose); - socket.on('data', onData); +/***/ }), - request.on('close', () => { - socket.removeListener('close', onSocketClose); - socket.removeListener('data', onData); - }); - }); -} +/***/ 3837: +/***/ ((module) => { +"use strict"; +module.exports = require("util"); /***/ }), @@ -18023,106 +11019,11 @@ module.exports = JSON.parse('{"$schema":"http://json-schema.org/draft-07/schema# /******/ return module.exports; /******/ } /******/ -/******/ // expose the modules object (__webpack_modules__) -/******/ __nccwpck_require__.m = __webpack_modules__; -/******/ /************************************************************************/ -/******/ /* webpack/runtime/define property getters */ -/******/ (() => { -/******/ // define getter functions for harmony exports -/******/ __nccwpck_require__.d = (exports, definition) => { -/******/ for(var key in definition) { -/******/ if(__nccwpck_require__.o(definition, key) && !__nccwpck_require__.o(exports, key)) { -/******/ Object.defineProperty(exports, key, { enumerable: true, get: definition[key] }); -/******/ } -/******/ } -/******/ }; -/******/ })(); -/******/ -/******/ /* webpack/runtime/ensure chunk */ -/******/ (() => { -/******/ __nccwpck_require__.f = {}; -/******/ // This file contains only the entry chunk. -/******/ // The chunk loading function for additional chunks -/******/ __nccwpck_require__.e = (chunkId) => { -/******/ return Promise.all(Object.keys(__nccwpck_require__.f).reduce((promises, key) => { -/******/ __nccwpck_require__.f[key](chunkId, promises); -/******/ return promises; -/******/ }, [])); -/******/ }; -/******/ })(); -/******/ -/******/ /* webpack/runtime/get javascript chunk filename */ -/******/ (() => { -/******/ // This function allow to reference async chunks -/******/ __nccwpck_require__.u = (chunkId) => { -/******/ // return url for filenames based on template -/******/ return "" + chunkId + ".index.js"; -/******/ }; -/******/ })(); -/******/ -/******/ /* webpack/runtime/hasOwnProperty shorthand */ -/******/ (() => { -/******/ __nccwpck_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop)) -/******/ })(); -/******/ -/******/ /* webpack/runtime/make namespace object */ -/******/ (() => { -/******/ // define __esModule on exports -/******/ __nccwpck_require__.r = (exports) => { -/******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) { -/******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' }); -/******/ } -/******/ Object.defineProperty(exports, '__esModule', { value: true }); -/******/ }; -/******/ })(); -/******/ /******/ /* webpack/runtime/compat */ /******/ /******/ if (typeof __nccwpck_require__ !== 'undefined') __nccwpck_require__.ab = __dirname + "/"; /******/ -/******/ /* webpack/runtime/require chunk loading */ -/******/ (() => { -/******/ // no baseURI -/******/ -/******/ // object to store loaded chunks -/******/ // "1" means "loaded", otherwise not loaded yet -/******/ var installedChunks = { -/******/ 179: 1 -/******/ }; -/******/ -/******/ // no on chunks loaded -/******/ -/******/ var installChunk = (chunk) => { -/******/ var moreModules = chunk.modules, chunkIds = chunk.ids, runtime = chunk.runtime; -/******/ for(var moduleId in moreModules) { -/******/ if(__nccwpck_require__.o(moreModules, moduleId)) { -/******/ __nccwpck_require__.m[moduleId] = moreModules[moduleId]; -/******/ } -/******/ } -/******/ if(runtime) runtime(__nccwpck_require__); -/******/ for(var i = 0; i < chunkIds.length; i++) -/******/ installedChunks[chunkIds[i]] = 1; -/******/ -/******/ }; -/******/ -/******/ // require() chunk loading for javascript -/******/ __nccwpck_require__.f.require = (chunkId, promises) => { -/******/ // "1" is the signal for "already loaded" -/******/ if(!installedChunks[chunkId]) { -/******/ if(true) { // all chunks have JS -/******/ installChunk(require("./" + __nccwpck_require__.u(chunkId))); -/******/ } else installedChunks[chunkId] = 1; -/******/ } -/******/ }; -/******/ -/******/ // no external install chunk -/******/ -/******/ // no HMR -/******/ -/******/ // no HMR manifest -/******/ })(); -/******/ /************************************************************************/ /******/ /******/ // startup diff --git a/dist/index.js.map b/dist/index.js.map index d9b0db5..de7c048 100644 --- a/dist/index.js.map +++ b/dist/index.js.map @@ -1 +1 @@ -{"version":3,"file":"index.js","mappings":";;;;;;;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACpBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1RA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5lBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1JA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACxrBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9IA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1HA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACXA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1JA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7fA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3HA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACxmBA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACXA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACXA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACZA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACRA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3FA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACxBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvQA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACl6CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;ACpBA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACpnIA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;AClDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;;;;;;;AChDA;;ACAA;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;ACzGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;;AC7PA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACvCA;;ACAA;;ACAA;;ACAA;;ACAA;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;ACpDA;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;ACzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;ACtFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AC5YA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AC1QA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;ACVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AC5IA;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;ACRA;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;ACnVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AC5TA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;ACTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;;;;;;;AChaA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;AChCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;ACRA;AACA;AACA;AACA;AACA;;;;;ACJA;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;;;;ACNA;AACA;;;;ACDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;AErCA;AACA;AACA;AACA","sources":["../webpack://deploying-marker-with-issue/./lib/actions/attach-marker.js","../webpack://deploying-marker-with-issue/./lib/actions/check-marker-attached.js","../webpack://deploying-marker-with-issue/./lib/actions/check-marker-detached-or-assigned-actor.js","../webpack://deploying-marker-with-issue/./lib/actions/check-marker-detached.js","../webpack://deploying-marker-with-issue/./lib/actions/detach-marker.js","../webpack://deploying-marker-with-issue/./lib/common/env.js","../webpack://deploying-marker-with-issue/./lib/common/error.js","../webpack://deploying-marker-with-issue/./lib/common/input.js","../webpack://deploying-marker-with-issue/./lib/common/label.js","../webpack://deploying-marker-with-issue/./lib/common/messages.js","../webpack://deploying-marker-with-issue/./lib/common/validater.js","../webpack://deploying-marker-with-issue/./lib/github/common.js","../webpack://deploying-marker-with-issue/./lib/github/issue-get.js","../webpack://deploying-marker-with-issue/./lib/github/issue-update.js","../webpack://deploying-marker-with-issue/./lib/github/label-create.js","../webpack://deploying-marker-with-issue/./lib/github/label-get.js","../webpack://deploying-marker-with-issue/./lib/github/user-get.js","../webpack://deploying-marker-with-issue/./lib/main.js","../webpack://deploying-marker-with-issue/./node_modules/@actions/core/lib/command.js","../webpack://deploying-marker-with-issue/./node_modules/@actions/core/lib/core.js","../webpack://deploying-marker-with-issue/./node_modules/@actions/core/lib/file-command.js","../webpack://deploying-marker-with-issue/./node_modules/@actions/core/lib/oidc-utils.js","../webpack://deploying-marker-with-issue/./node_modules/@actions/core/lib/path-utils.js","../webpack://deploying-marker-with-issue/./node_modules/@actions/core/lib/summary.js","../webpack://deploying-marker-with-issue/./node_modules/@actions/core/lib/utils.js","../webpack://deploying-marker-with-issue/./node_modules/@actions/http-client/lib/auth.js","../webpack://deploying-marker-with-issue/./node_modules/@actions/http-client/lib/index.js","../webpack://deploying-marker-with-issue/./node_modules/@actions/http-client/lib/proxy.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/ajv.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/compile/codegen/code.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/compile/codegen/index.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/compile/codegen/scope.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/compile/errors.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/compile/index.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/compile/names.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/compile/ref_error.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/compile/resolve.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/compile/rules.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/compile/util.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/compile/validate/applicability.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/compile/validate/boolSchema.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/compile/validate/dataType.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/compile/validate/defaults.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/compile/validate/index.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/compile/validate/keyword.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/compile/validate/subschema.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/core.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/runtime/equal.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/runtime/ucs2length.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/runtime/uri.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/runtime/validation_error.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/applicator/additionalItems.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/applicator/additionalProperties.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/applicator/allOf.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/applicator/anyOf.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/applicator/contains.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/applicator/dependencies.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/applicator/if.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/applicator/index.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/applicator/items.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/applicator/items2020.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/applicator/not.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/applicator/oneOf.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/applicator/patternProperties.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/applicator/prefixItems.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/applicator/properties.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/applicator/propertyNames.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/applicator/thenElse.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/code.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/core/id.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/core/index.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/core/ref.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/discriminator/index.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/discriminator/types.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/draft7.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/format/format.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/format/index.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/metadata.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/validation/const.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/validation/enum.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/validation/index.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/validation/limitItems.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/validation/limitLength.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/validation/limitNumber.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/validation/limitProperties.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/validation/multipleOf.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/validation/pattern.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/validation/required.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/validation/uniqueItems.js","../webpack://deploying-marker-with-issue/./node_modules/fast-deep-equal/index.js","../webpack://deploying-marker-with-issue/./node_modules/json-schema-traverse/index.js","../webpack://deploying-marker-with-issue/./node_modules/node-domexception/index.js","../webpack://deploying-marker-with-issue/./node_modules/tunnel/index.js","../webpack://deploying-marker-with-issue/./node_modules/tunnel/lib/tunnel.js","../webpack://deploying-marker-with-issue/./node_modules/uri-js/dist/es5/uri.all.js","../webpack://deploying-marker-with-issue/./node_modules/uuid/dist/index.js","../webpack://deploying-marker-with-issue/./node_modules/uuid/dist/md5.js","../webpack://deploying-marker-with-issue/./node_modules/uuid/dist/nil.js","../webpack://deploying-marker-with-issue/./node_modules/uuid/dist/parse.js","../webpack://deploying-marker-with-issue/./node_modules/uuid/dist/regex.js","../webpack://deploying-marker-with-issue/./node_modules/uuid/dist/rng.js","../webpack://deploying-marker-with-issue/./node_modules/uuid/dist/sha1.js","../webpack://deploying-marker-with-issue/./node_modules/uuid/dist/stringify.js","../webpack://deploying-marker-with-issue/./node_modules/uuid/dist/v1.js","../webpack://deploying-marker-with-issue/./node_modules/uuid/dist/v3.js","../webpack://deploying-marker-with-issue/./node_modules/uuid/dist/v35.js","../webpack://deploying-marker-with-issue/./node_modules/uuid/dist/v4.js","../webpack://deploying-marker-with-issue/./node_modules/uuid/dist/v5.js","../webpack://deploying-marker-with-issue/./node_modules/uuid/dist/validate.js","../webpack://deploying-marker-with-issue/./node_modules/uuid/dist/version.js","../webpack://deploying-marker-with-issue/./node_modules/web-streams-polyfill/dist/ponyfill.es2018.js","../webpack://deploying-marker-with-issue/external node-commonjs \"assert\"","../webpack://deploying-marker-with-issue/external node-commonjs \"buffer\"","../webpack://deploying-marker-with-issue/external node-commonjs \"crypto\"","../webpack://deploying-marker-with-issue/external node-commonjs \"events\"","../webpack://deploying-marker-with-issue/external node-commonjs \"fs\"","../webpack://deploying-marker-with-issue/external node-commonjs \"http\"","../webpack://deploying-marker-with-issue/external node-commonjs \"https\"","../webpack://deploying-marker-with-issue/external node-commonjs \"net\"","../webpack://deploying-marker-with-issue/external node-commonjs \"node:process\"","../webpack://deploying-marker-with-issue/external node-commonjs \"node:stream/web\"","../webpack://deploying-marker-with-issue/external node-commonjs \"os\"","../webpack://deploying-marker-with-issue/external node-commonjs \"path\"","../webpack://deploying-marker-with-issue/external node-commonjs \"tls\"","../webpack://deploying-marker-with-issue/external node-commonjs \"util\"","../webpack://deploying-marker-with-issue/external node-commonjs \"worker_threads\"","../webpack://deploying-marker-with-issue/./node_modules/fetch-blob/streams.cjs","../webpack://deploying-marker-with-issue/./node_modules/fetch-blob/file.js","../webpack://deploying-marker-with-issue/external node-commonjs \"node:fs\"","../webpack://deploying-marker-with-issue/external node-commonjs \"node:path\"","../webpack://deploying-marker-with-issue/./node_modules/fetch-blob/from.js","../webpack://deploying-marker-with-issue/./node_modules/fetch-blob/index.js","../webpack://deploying-marker-with-issue/./node_modules/formdata-polyfill/esm.min.js","../webpack://deploying-marker-with-issue/external node-commonjs \"node:http\"","../webpack://deploying-marker-with-issue/external node-commonjs \"node:https\"","../webpack://deploying-marker-with-issue/external node-commonjs \"node:zlib\"","../webpack://deploying-marker-with-issue/external node-commonjs \"node:stream\"","../webpack://deploying-marker-with-issue/external node-commonjs \"node:buffer\"","../webpack://deploying-marker-with-issue/./node_modules/data-uri-to-buffer/dist/index.js","../webpack://deploying-marker-with-issue/external node-commonjs \"node:util\"","../webpack://deploying-marker-with-issue/./node_modules/node-fetch/src/errors/base.js","../webpack://deploying-marker-with-issue/./node_modules/node-fetch/src/errors/fetch-error.js","../webpack://deploying-marker-with-issue/./node_modules/node-fetch/src/utils/is.js","../webpack://deploying-marker-with-issue/./node_modules/node-fetch/src/body.js","../webpack://deploying-marker-with-issue/./node_modules/node-fetch/src/headers.js","../webpack://deploying-marker-with-issue/./node_modules/node-fetch/src/utils/is-redirect.js","../webpack://deploying-marker-with-issue/./node_modules/node-fetch/src/response.js","../webpack://deploying-marker-with-issue/external node-commonjs \"node:url\"","../webpack://deploying-marker-with-issue/./node_modules/node-fetch/src/utils/get-search.js","../webpack://deploying-marker-with-issue/external node-commonjs \"node:net\"","../webpack://deploying-marker-with-issue/./node_modules/node-fetch/src/utils/referrer.js","../webpack://deploying-marker-with-issue/./node_modules/node-fetch/src/request.js","../webpack://deploying-marker-with-issue/./node_modules/node-fetch/src/errors/abort-error.js","../webpack://deploying-marker-with-issue/./node_modules/node-fetch/src/index.js","../webpack://deploying-marker-with-issue/webpack/bootstrap","../webpack://deploying-marker-with-issue/webpack/runtime/define property getters","../webpack://deploying-marker-with-issue/webpack/runtime/ensure chunk","../webpack://deploying-marker-with-issue/webpack/runtime/get javascript chunk filename","../webpack://deploying-marker-with-issue/webpack/runtime/hasOwnProperty shorthand","../webpack://deploying-marker-with-issue/webpack/runtime/make namespace object","../webpack://deploying-marker-with-issue/webpack/runtime/compat","../webpack://deploying-marker-with-issue/webpack/runtime/require chunk loading","../webpack://deploying-marker-with-issue/webpack/before-startup","../webpack://deploying-marker-with-issue/webpack/startup","../webpack://deploying-marker-with-issue/webpack/after-startup"],"sourcesContent":["\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.attachMarker = void 0;\nconst issue_get_1 = require(\"../github/issue-get\");\nconst issue_update_1 = require(\"../github/issue-update\");\nconst label_get_1 = require(\"../github/label-get\");\nconst label_1 = require(\"../common/label\");\nconst label_create_1 = require(\"../github/label-create\");\nconst error_1 = require(\"../common/error\");\nconst messages_1 = require(\"../common/messages\");\nconst attachMarker = (input) => __awaiter(void 0, void 0, void 0, function* () {\n const { repoOwner, repoName, issueNumber, exitWithError, actorId } = input;\n const attached = yield (0, label_1.attachedMarkerOnIssue)(repoOwner, repoName, issueNumber);\n if (attached) {\n if (exitWithError) {\n (0, error_1.onError)((0, messages_1.getMessage)('error:label_already_attached'));\n }\n return;\n }\n const beforeIssueData = yield (0, issue_get_1.getIssue)({ repoOwner, repoName, issueNumber });\n const { issue } = beforeIssueData.data.organization.repository;\n const labels = (yield (0, label_get_1.getLabels)({ repoOwner, repoName, labelName: label_1.LabelName }))\n .data.organization.repository.labels.nodes;\n const label = labels.find(({ name }) => name === label_1.LabelName);\n let labelId = label === null || label === void 0 ? void 0 : label.id;\n if (labelId == null) {\n const createLabelResult = yield (0, label_create_1.createLabel)({\n repoOwner,\n repoName,\n labelName: label_1.LabelName\n });\n labelId = createLabelResult.node_id;\n }\n yield (0, issue_update_1.updateIssue)({\n issueId: issue.id,\n body: issue.body,\n assigneeIds: [actorId],\n labelIds: [labelId]\n });\n});\nexports.attachMarker = attachMarker;\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.checkMarkerAttached = void 0;\nconst error_1 = require(\"../common/error\");\nconst label_1 = require(\"../common/label\");\nconst messages_1 = require(\"../common/messages\");\nconst checkMarkerAttached = (input) => __awaiter(void 0, void 0, void 0, function* () {\n const { repoOwner, repoName, issueNumber, exitWithError } = input;\n const attached = yield (0, label_1.attachedMarkerOnIssue)(repoOwner, repoName, issueNumber);\n if (!attached) {\n if (exitWithError) {\n (0, error_1.onError)((0, messages_1.getMessage)('error:label_already_detached'));\n }\n else {\n (0, error_1.onWarning)((0, messages_1.getMessage)('warning:label_already_detached'));\n }\n }\n});\nexports.checkMarkerAttached = checkMarkerAttached;\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.checkMarkerDetachedOrAssignedActor = void 0;\nconst label_1 = require(\"../common/label\");\nconst error_1 = require(\"../common/error\");\nconst messages_1 = require(\"../common/messages\");\nconst issue_get_1 = require(\"../github/issue-get\");\nconst checkMarkerDetachedOrAssignedActor = (input) => __awaiter(void 0, void 0, void 0, function* () {\n const { repoOwner, repoName, issueNumber, exitWithError, actorId } = input;\n const attached = yield (0, label_1.attachedMarkerOnIssue)(repoOwner, repoName, issueNumber);\n const { assignees } = (yield (0, issue_get_1.getIssue)({ repoOwner, repoName, issueNumber })).data\n .organization.repository.issue;\n const assigned = assignees.nodes.some(({ id }) => id === actorId);\n if (!attached || assigned) {\n return;\n }\n else if (exitWithError) {\n (0, error_1.onError)((0, messages_1.getMessage)('error:label_already_attached_and_not_assigned_actor'));\n }\n else {\n (0, error_1.onWarning)((0, messages_1.getMessage)('warning:label_already_attached_and_not_assigned_actor'));\n }\n});\nexports.checkMarkerDetachedOrAssignedActor = checkMarkerDetachedOrAssignedActor;\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.checkMarkerDetached = void 0;\nconst label_1 = require(\"../common/label\");\nconst error_1 = require(\"../common/error\");\nconst messages_1 = require(\"../common/messages\");\nconst checkMarkerDetached = (input) => __awaiter(void 0, void 0, void 0, function* () {\n const { repoOwner, repoName, issueNumber, exitWithError } = input;\n const attached = yield (0, label_1.attachedMarkerOnIssue)(repoOwner, repoName, issueNumber);\n if (attached) {\n if (exitWithError) {\n (0, error_1.onError)((0, messages_1.getMessage)('error:label_already_attached'));\n }\n else {\n (0, error_1.onWarning)((0, messages_1.getMessage)('warning:label_already_attached'));\n }\n }\n});\nexports.checkMarkerDetached = checkMarkerDetached;\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.detachMarker = void 0;\nconst issue_get_1 = require(\"../github/issue-get\");\nconst issue_update_1 = require(\"../github/issue-update\");\nconst label_1 = require(\"../common/label\");\nconst error_1 = require(\"../common/error\");\nconst messages_1 = require(\"../common/messages\");\nconst detachMarker = (input) => __awaiter(void 0, void 0, void 0, function* () {\n const { repoOwner, repoName, issueNumber, exitWithError } = input;\n const attached = yield (0, label_1.attachedMarkerOnIssue)(repoOwner, repoName, issueNumber);\n if (!attached) {\n if (exitWithError) {\n (0, error_1.onError)((0, messages_1.getMessage)('error:label_already_detached'));\n }\n return;\n }\n const issue = yield (0, issue_get_1.getIssue)({ repoOwner, repoName, issueNumber });\n const { id: issueId, body, labels } = issue.data.organization.repository.issue;\n const labelIds = labels.nodes\n .filter(({ name }) => name !== label_1.LabelName)\n .map(({ id }) => id);\n yield (0, issue_update_1.updateIssue)({ issueId, body, assigneeIds: [], labelIds });\n});\nexports.detachMarker = detachMarker;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getEnvVar = void 0;\nconst getEnvVar = (name) => {\n const value = process.env[name];\n if (!value)\n throw new Error(`Missing environment variable ${name}`);\n return value;\n};\nexports.getEnvVar = getEnvVar;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.onWarning = exports.onError = void 0;\nconst core = __importStar(require(\"@actions/core\"));\nconst onError = (message) => {\n core.setFailed(message);\n};\nexports.onError = onError;\nconst onWarning = (message) => {\n core.warning(message);\n};\nexports.onWarning = onWarning;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getInput = void 0;\nconst core = __importStar(require(\"@actions/core\"));\nconst user_get_1 = require(\"../github/user-get\");\nconst env_1 = require(\"./env\");\nconst getInput = () => __awaiter(void 0, void 0, void 0, function* () {\n const action = core.getInput('action', { required: true });\n const issueNumber = parseInt(core.getInput('issue-number', { required: true }), 10);\n const exitWithError = core.getBooleanInput('exit-with-error', {\n required: false\n });\n // https://docs.github.com/en/actions/learn-github-actions/environment-variables\n const [repoOwner, repoName] = (0, env_1.getEnvVar)('GITHUB_REPOSITORY').split('/');\n const actor = (0, env_1.getEnvVar)('GITHUB_ACTOR');\n const { id: actorId } = (yield (0, user_get_1.getUser)({ login: actor })).data.user;\n return {\n action,\n issueNumber,\n exitWithError,\n repoOwner,\n repoName,\n actor,\n actorId\n };\n});\nexports.getInput = getInput;\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.attachedMarkerOnIssue = exports.DefaultLabelDescription = exports.DefaultLabelColor = exports.LabelName = void 0;\nconst issue_get_1 = require(\"../github/issue-get\");\nexports.LabelName = 'Deploying';\nexports.DefaultLabelColor = 'FF0000';\nexports.DefaultLabelDescription = 'This label indicates that deployment is in progress.';\nconst attachedMarkerOnIssue = (repoOwner, repoName, issueNumber) => __awaiter(void 0, void 0, void 0, function* () {\n const issueData = yield (0, issue_get_1.getIssue)({ repoOwner, repoName, issueNumber });\n const labels = issueData.data.organization.repository.issue.labels.nodes;\n const label = labels.find(({ name }) => name === exports.LabelName);\n return label != null;\n});\nexports.attachedMarkerOnIssue = attachedMarkerOnIssue;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getMessage = void 0;\nconst label_1 = require(\"./label\");\nconst getMessage = (key) => {\n const message = Messages[key];\n if (message != null) {\n return message;\n }\n throw new Error(`Unknown message key: ${key}`);\n};\nexports.getMessage = getMessage;\nconst Messages = {\n 'error:label_already_attached': [\n `ERROR: \"${label_1.LabelName}\" label already attached.`,\n '- Please check latest issue comments',\n `- If to detach \"${label_1.LabelName}\" label is no problem, please detach the label manually.`\n ].join('\\n'),\n 'error:label_already_detached': [\n `ERROR: \"${label_1.LabelName}\" label already detached.`,\n '- Please check latest issue comments',\n `- If to attach \"${label_1.LabelName}\" label is no problem, please attach the label manually.`\n ].join('\\n'),\n 'error:label_already_attached_and_not_assigned_actor': [\n `ERROR: \"${label_1.LabelName}\" label already attached and not assigned actor.`,\n '- Please check latest issue comments',\n `- If to detach \"${label_1.LabelName}\" label is no problem, please attach the label manually.`,\n `- If to remove actor in assignees is no problem, please remove assign manually.`\n ].join('\\n'),\n 'warning:label_already_attached': `WARNING: \"${label_1.LabelName}\" label already attached, but not errored because exit-with-error is false.`,\n 'warning:label_already_detached': `WARNING: \"${label_1.LabelName}\" label already detached, but not errored because exit-with-error is false.`,\n 'warning:label_already_attached_and_not_assigned_actor': `WARNING: \"${label_1.LabelName}\" label already attached and not assigned actor, but not errored because exit-with-error is false.`\n};\n","\"use strict\";\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.buildValidator = void 0;\nconst ajv_1 = __importDefault(require(\"ajv\"));\nconst ajv = new ajv_1.default({ allErrors: true });\nconst buildValidator = (schema) => {\n const validate = ajv.compile(schema);\n return (params) => {\n if (validate(params)) {\n return params;\n }\n throw new Error(`Schema Error: ${JSON.stringify(validate.errors)}`);\n };\n};\nexports.buildValidator = buildValidator;\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.fetchGitHubApiV3 = exports.fetchGitHubGraphQL = void 0;\nconst node_fetch_1 = __importDefault(require(\"node-fetch\"));\nconst env_1 = require(\"../common/env\");\nconst fetchGitHubGraphQL = (query, variables = {}) => __awaiter(void 0, void 0, void 0, function* () {\n const response = yield (0, node_fetch_1.default)('https://api.github.com/graphql', {\n method: 'POST',\n headers: {\n Authorization: `token ${(0, env_1.getEnvVar)('GITHUB_TOKEN')}`\n },\n body: JSON.stringify({ query, variables })\n });\n if (response.status !== 200)\n throw new Error(`Invalid status code: ${response.status}`);\n const body = yield response.json();\n if (typeof body === 'object' && body != null && 'errors' in body) {\n throw new Error(`Response has error: ${JSON.stringify(body)}`);\n }\n return body;\n});\nexports.fetchGitHubGraphQL = fetchGitHubGraphQL;\nconst fetchGitHubApiV3 = (method, path, body = '') => __awaiter(void 0, void 0, void 0, function* () {\n var _a;\n const response = yield (0, node_fetch_1.default)(`https://api.github.com${path}`, {\n method,\n headers: {\n Accept: 'application/vnd.github.v3+json',\n Authorization: `token ${(0, env_1.getEnvVar)('GITHUB_TOKEN')}`\n },\n body\n });\n if (!response.ok)\n throw new Error(`Invalid status code: ${response.status} => ${(_a = (yield response.text())) !== null && _a !== void 0 ? _a : '(Empty body)'}`);\n return response;\n});\nexports.fetchGitHubApiV3 = fetchGitHubApiV3;\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getIssue = void 0;\nconst validater_1 = require(\"../common/validater\");\nconst common_1 = require(\"./common\");\nconst IssueSchema = {\n type: 'object',\n additionalProperties: false,\n required: ['data'],\n properties: {\n data: {\n type: 'object',\n additionalProperties: false,\n required: ['organization'],\n properties: {\n organization: {\n type: 'object',\n additionalProperties: false,\n required: ['repository'],\n properties: {\n repository: {\n type: 'object',\n additionalProperties: false,\n required: ['issue'],\n properties: {\n issue: {\n type: 'object',\n additionalProperties: false,\n required: ['id', 'body', 'assignees', 'labels'],\n properties: {\n id: { type: 'string' },\n body: { type: 'string' },\n assignees: {\n type: 'object',\n additionalProperties: false,\n required: ['nodes'],\n properties: {\n nodes: {\n type: 'array',\n items: {\n type: 'object',\n additionalProperties: false,\n required: ['id', 'login'],\n properties: {\n id: { type: 'string' },\n login: { type: 'string' }\n }\n }\n }\n }\n },\n labels: {\n type: 'object',\n additionalProperties: false,\n required: ['nodes'],\n properties: {\n nodes: {\n type: 'array',\n items: {\n type: 'object',\n additionalProperties: false,\n required: ['id', 'name'],\n properties: {\n id: { type: 'string' },\n name: { type: 'string' }\n }\n }\n }\n }\n }\n }\n }\n }\n }\n }\n }\n }\n }\n }\n};\nconst getIssue = (args) => __awaiter(void 0, void 0, void 0, function* () {\n const result = yield (0, common_1.fetchGitHubGraphQL)(`query ($repoOwner: String!, $repoName: String!, $issueNumber: Int!) {\n organization(login: $repoOwner) {\n repository(name: $repoName) {\n issue(number: $issueNumber) {\n id\n body\n assignees(first: 100) {\n nodes {\n id\n login\n }\n }\n labels(first: 100) {\n nodes {\n id\n name\n }\n }\n }\n }\n }\n }`, Object.assign({}, args));\n return (0, validater_1.buildValidator)(IssueSchema)(result);\n});\nexports.getIssue = getIssue;\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.updateIssue = void 0;\nconst common_1 = require(\"./common\");\nconst updateIssue = (args) => __awaiter(void 0, void 0, void 0, function* () {\n yield (0, common_1.fetchGitHubGraphQL)(`mutation ($issueId: ID!, $body: String!, $assigneeIds: [ID!] $labelIds: [ID!]) {\n updateIssue(input: {id: $issueId, body: $body, assigneeIds: $assigneeIds, labelIds: $labelIds}) {\n clientMutationId\n }\n }`, Object.assign({}, args));\n});\nexports.updateIssue = updateIssue;\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.createLabel = void 0;\nconst common_1 = require(\"./common\");\nconst label_1 = require(\"../common/label\");\nconst validater_1 = require(\"../common/validater\");\n// https://docs.github.com/ja/rest/issues/labels#create-a-label\nconst Schema = {\n title: 'Label',\n description: 'Color-coded labels help you categorize and filter your issues (just like labels in Gmail).',\n type: 'object',\n properties: {\n id: {\n type: 'integer',\n examples: [208045946]\n },\n node_id: {\n type: 'string',\n examples: ['MDU6TGFiZWwyMDgwNDU5NDY=']\n },\n url: {\n description: 'URL for the label',\n type: 'string',\n examples: ['https://api.github.com/repositories/42/labels/bug']\n },\n name: {\n description: 'The name of the label.',\n type: 'string',\n examples: ['bug']\n },\n description: {\n type: ['string', 'null'],\n examples: [\"Something isn't working\"]\n },\n color: {\n description: '6-character hex code, without the leading #, identifying the color',\n type: 'string',\n examples: ['FFFFFF']\n },\n default: {\n type: 'boolean',\n examples: [true]\n }\n },\n required: ['id', 'node_id', 'url', 'name', 'description', 'color', 'default']\n};\nconst createLabel = (args) => __awaiter(void 0, void 0, void 0, function* () {\n const { repoOwner, repoName, labelName, labelColor, labelDescription } = args;\n const response = yield (0, common_1.fetchGitHubApiV3)('POST', `/repos/${repoOwner}/${repoName}/labels`, JSON.stringify({\n name: labelName,\n color: labelColor !== null && labelColor !== void 0 ? labelColor : label_1.DefaultLabelColor,\n description: labelDescription !== null && labelDescription !== void 0 ? labelDescription : label_1.DefaultLabelDescription\n }));\n return (0, validater_1.buildValidator)(Schema)(yield response.json());\n});\nexports.createLabel = createLabel;\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getLabels = exports.Schema = void 0;\nconst common_1 = require(\"./common\");\nconst validater_1 = require(\"../common/validater\");\nexports.Schema = {\n type: 'object',\n additionalProperties: false,\n required: ['data'],\n properties: {\n data: {\n type: 'object',\n additionalProperties: false,\n required: ['organization'],\n properties: {\n organization: {\n type: 'object',\n additionalProperties: false,\n required: ['repository'],\n properties: {\n repository: {\n type: 'object',\n additionalProperties: false,\n required: ['labels'],\n properties: {\n labels: {\n type: 'object',\n additionalProperties: false,\n required: ['nodes'],\n properties: {\n nodes: {\n type: 'array',\n items: {\n type: 'object',\n additionalProperties: false,\n required: ['id', 'name'],\n properties: {\n id: { type: 'string' },\n name: { type: 'string' }\n }\n }\n }\n }\n }\n }\n }\n }\n }\n }\n }\n }\n};\nconst getLabels = (args) => __awaiter(void 0, void 0, void 0, function* () {\n const result = yield (0, common_1.fetchGitHubGraphQL)(`query ($repoOwner: String!, $repoName: String!, $labelName: String!) {\n organization(login: $repoOwner) {\n repository(name: $repoName) {\n labels(first: 100, query: $labelName) {\n nodes {\n id\n name\n }\n }\n }\n }\n }`, Object.assign({}, args));\n return (0, validater_1.buildValidator)(exports.Schema)(result);\n});\nexports.getLabels = getLabels;\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getUser = void 0;\nconst validater_1 = require(\"../common/validater\");\nconst common_1 = require(\"./common\");\nconst UserSchema = {\n type: 'object',\n additionalProperties: false,\n required: ['data'],\n properties: {\n data: {\n type: 'object',\n additionalProperties: false,\n required: ['user'],\n properties: {\n user: {\n type: 'object',\n additionalProperties: false,\n required: ['id'],\n properties: {\n id: {\n type: 'string'\n }\n }\n }\n }\n }\n }\n};\nconst getUser = (args) => __awaiter(void 0, void 0, void 0, function* () {\n const result = yield (0, common_1.fetchGitHubGraphQL)(`query ($login: String!) {\n user(login: $login) {\n id\n }\n }`, Object.assign({}, args));\n return (0, validater_1.buildValidator)(UserSchema)(result);\n});\nexports.getUser = getUser;\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst input_1 = require(\"./common/input\");\nconst attach_marker_1 = require(\"./actions/attach-marker\");\nconst check_marker_attached_1 = require(\"./actions/check-marker-attached\");\nconst check_marker_detached_1 = require(\"./actions/check-marker-detached\");\nconst detach_marker_1 = require(\"./actions/detach-marker\");\nconst error_1 = require(\"./common/error\");\nconst check_marker_detached_or_assigned_actor_1 = require(\"./actions/check-marker-detached-or-assigned-actor\");\nconst run = () => __awaiter(void 0, void 0, void 0, function* () {\n try {\n const input = yield (0, input_1.getInput)();\n switch (input.action) {\n case 'check-marker-attached':\n yield (0, check_marker_attached_1.checkMarkerAttached)(input);\n break;\n case 'check-marker-detached':\n yield (0, check_marker_detached_1.checkMarkerDetached)(input);\n break;\n case 'attach-marker':\n yield (0, attach_marker_1.attachMarker)(input);\n break;\n case 'detach-marker':\n yield (0, detach_marker_1.detachMarker)(input);\n break;\n case 'check-marker-detached-or-assigned-actor':\n yield (0, check_marker_detached_or_assigned_actor_1.checkMarkerDetachedOrAssignedActor)(input);\n break;\n default:\n (0, error_1.onError)(`Undefined action: ${input.action}`);\n }\n }\n catch (error) {\n if (error instanceof Error) {\n (0, error_1.onError)(error.message);\n }\n else {\n (0, error_1.onError)(`ERROR: ${JSON.stringify(error)}`);\n }\n }\n});\nrun();\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.issue = exports.issueCommand = void 0;\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\n/**\n * Commands\n *\n * Command Format:\n * ::name key=value,key=value::message\n *\n * Examples:\n * ::warning::This is the message\n * ::set-env name=MY_VAR::some value\n */\nfunction issueCommand(command, properties, message) {\n const cmd = new Command(command, properties, message);\n process.stdout.write(cmd.toString() + os.EOL);\n}\nexports.issueCommand = issueCommand;\nfunction issue(name, message = '') {\n issueCommand(name, {}, message);\n}\nexports.issue = issue;\nconst CMD_STRING = '::';\nclass Command {\n constructor(command, properties, message) {\n if (!command) {\n command = 'missing.command';\n }\n this.command = command;\n this.properties = properties;\n this.message = message;\n }\n toString() {\n let cmdStr = CMD_STRING + this.command;\n if (this.properties && Object.keys(this.properties).length > 0) {\n cmdStr += ' ';\n let first = true;\n for (const key in this.properties) {\n if (this.properties.hasOwnProperty(key)) {\n const val = this.properties[key];\n if (val) {\n if (first) {\n first = false;\n }\n else {\n cmdStr += ',';\n }\n cmdStr += `${key}=${escapeProperty(val)}`;\n }\n }\n }\n }\n cmdStr += `${CMD_STRING}${escapeData(this.message)}`;\n return cmdStr;\n }\n}\nfunction escapeData(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A');\n}\nfunction escapeProperty(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A')\n .replace(/:/g, '%3A')\n .replace(/,/g, '%2C');\n}\n//# sourceMappingURL=command.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;\nconst command_1 = require(\"./command\");\nconst file_command_1 = require(\"./file-command\");\nconst utils_1 = require(\"./utils\");\nconst os = __importStar(require(\"os\"));\nconst path = __importStar(require(\"path\"));\nconst uuid_1 = require(\"uuid\");\nconst oidc_utils_1 = require(\"./oidc-utils\");\n/**\n * The code to exit an action\n */\nvar ExitCode;\n(function (ExitCode) {\n /**\n * A code indicating that the action was successful\n */\n ExitCode[ExitCode[\"Success\"] = 0] = \"Success\";\n /**\n * A code indicating that the action was a failure\n */\n ExitCode[ExitCode[\"Failure\"] = 1] = \"Failure\";\n})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));\n//-----------------------------------------------------------------------\n// Variables\n//-----------------------------------------------------------------------\n/**\n * Sets env variable for this action and future actions in the job\n * @param name the name of the variable to set\n * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction exportVariable(name, val) {\n const convertedVal = utils_1.toCommandValue(val);\n process.env[name] = convertedVal;\n const filePath = process.env['GITHUB_ENV'] || '';\n if (filePath) {\n const delimiter = `ghadelimiter_${uuid_1.v4()}`;\n // These should realistically never happen, but just in case someone finds a way to exploit uuid generation let's not allow keys or values that contain the delimiter.\n if (name.includes(delimiter)) {\n throw new Error(`Unexpected input: name should not contain the delimiter \"${delimiter}\"`);\n }\n if (convertedVal.includes(delimiter)) {\n throw new Error(`Unexpected input: value should not contain the delimiter \"${delimiter}\"`);\n }\n const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;\n file_command_1.issueCommand('ENV', commandValue);\n }\n else {\n command_1.issueCommand('set-env', { name }, convertedVal);\n }\n}\nexports.exportVariable = exportVariable;\n/**\n * Registers a secret which will get masked from logs\n * @param secret value of the secret\n */\nfunction setSecret(secret) {\n command_1.issueCommand('add-mask', {}, secret);\n}\nexports.setSecret = setSecret;\n/**\n * Prepends inputPath to the PATH (for this action and future actions)\n * @param inputPath\n */\nfunction addPath(inputPath) {\n const filePath = process.env['GITHUB_PATH'] || '';\n if (filePath) {\n file_command_1.issueCommand('PATH', inputPath);\n }\n else {\n command_1.issueCommand('add-path', {}, inputPath);\n }\n process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;\n}\nexports.addPath = addPath;\n/**\n * Gets the value of an input.\n * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed.\n * Returns an empty string if the value is not defined.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string\n */\nfunction getInput(name, options) {\n const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';\n if (options && options.required && !val) {\n throw new Error(`Input required and not supplied: ${name}`);\n }\n if (options && options.trimWhitespace === false) {\n return val;\n }\n return val.trim();\n}\nexports.getInput = getInput;\n/**\n * Gets the values of an multiline input. Each value is also trimmed.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string[]\n *\n */\nfunction getMultilineInput(name, options) {\n const inputs = getInput(name, options)\n .split('\\n')\n .filter(x => x !== '');\n return inputs;\n}\nexports.getMultilineInput = getMultilineInput;\n/**\n * Gets the input value of the boolean type in the YAML 1.2 \"core schema\" specification.\n * Support boolean input list: `true | True | TRUE | false | False | FALSE` .\n * The return value is also in boolean type.\n * ref: https://yaml.org/spec/1.2/spec.html#id2804923\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns boolean\n */\nfunction getBooleanInput(name, options) {\n const trueValue = ['true', 'True', 'TRUE'];\n const falseValue = ['false', 'False', 'FALSE'];\n const val = getInput(name, options);\n if (trueValue.includes(val))\n return true;\n if (falseValue.includes(val))\n return false;\n throw new TypeError(`Input does not meet YAML 1.2 \"Core Schema\" specification: ${name}\\n` +\n `Support boolean input list: \\`true | True | TRUE | false | False | FALSE\\``);\n}\nexports.getBooleanInput = getBooleanInput;\n/**\n * Sets the value of an output.\n *\n * @param name name of the output to set\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction setOutput(name, value) {\n process.stdout.write(os.EOL);\n command_1.issueCommand('set-output', { name }, value);\n}\nexports.setOutput = setOutput;\n/**\n * Enables or disables the echoing of commands into stdout for the rest of the step.\n * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.\n *\n */\nfunction setCommandEcho(enabled) {\n command_1.issue('echo', enabled ? 'on' : 'off');\n}\nexports.setCommandEcho = setCommandEcho;\n//-----------------------------------------------------------------------\n// Results\n//-----------------------------------------------------------------------\n/**\n * Sets the action status to failed.\n * When the action exits it will be with an exit code of 1\n * @param message add error issue message\n */\nfunction setFailed(message) {\n process.exitCode = ExitCode.Failure;\n error(message);\n}\nexports.setFailed = setFailed;\n//-----------------------------------------------------------------------\n// Logging Commands\n//-----------------------------------------------------------------------\n/**\n * Gets whether Actions Step Debug is on or not\n */\nfunction isDebug() {\n return process.env['RUNNER_DEBUG'] === '1';\n}\nexports.isDebug = isDebug;\n/**\n * Writes debug message to user log\n * @param message debug message\n */\nfunction debug(message) {\n command_1.issueCommand('debug', {}, message);\n}\nexports.debug = debug;\n/**\n * Adds an error issue\n * @param message error issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction error(message, properties = {}) {\n command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.error = error;\n/**\n * Adds a warning issue\n * @param message warning issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction warning(message, properties = {}) {\n command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.warning = warning;\n/**\n * Adds a notice issue\n * @param message notice issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction notice(message, properties = {}) {\n command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.notice = notice;\n/**\n * Writes info to log with console.log.\n * @param message info message\n */\nfunction info(message) {\n process.stdout.write(message + os.EOL);\n}\nexports.info = info;\n/**\n * Begin an output group.\n *\n * Output until the next `groupEnd` will be foldable in this group\n *\n * @param name The name of the output group\n */\nfunction startGroup(name) {\n command_1.issue('group', name);\n}\nexports.startGroup = startGroup;\n/**\n * End an output group.\n */\nfunction endGroup() {\n command_1.issue('endgroup');\n}\nexports.endGroup = endGroup;\n/**\n * Wrap an asynchronous function call in a group.\n *\n * Returns the same type as the function itself.\n *\n * @param name The name of the group\n * @param fn The function to wrap in the group\n */\nfunction group(name, fn) {\n return __awaiter(this, void 0, void 0, function* () {\n startGroup(name);\n let result;\n try {\n result = yield fn();\n }\n finally {\n endGroup();\n }\n return result;\n });\n}\nexports.group = group;\n//-----------------------------------------------------------------------\n// Wrapper action state\n//-----------------------------------------------------------------------\n/**\n * Saves state for current action, the state can only be retrieved by this action's post job execution.\n *\n * @param name name of the state to store\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction saveState(name, value) {\n command_1.issueCommand('save-state', { name }, value);\n}\nexports.saveState = saveState;\n/**\n * Gets the value of an state set by this action's main execution.\n *\n * @param name name of the state to get\n * @returns string\n */\nfunction getState(name) {\n return process.env[`STATE_${name}`] || '';\n}\nexports.getState = getState;\nfunction getIDToken(aud) {\n return __awaiter(this, void 0, void 0, function* () {\n return yield oidc_utils_1.OidcClient.getIDToken(aud);\n });\n}\nexports.getIDToken = getIDToken;\n/**\n * Summary exports\n */\nvar summary_1 = require(\"./summary\");\nObject.defineProperty(exports, \"summary\", { enumerable: true, get: function () { return summary_1.summary; } });\n/**\n * @deprecated use core.summary\n */\nvar summary_2 = require(\"./summary\");\nObject.defineProperty(exports, \"markdownSummary\", { enumerable: true, get: function () { return summary_2.markdownSummary; } });\n/**\n * Path exports\n */\nvar path_utils_1 = require(\"./path-utils\");\nObject.defineProperty(exports, \"toPosixPath\", { enumerable: true, get: function () { return path_utils_1.toPosixPath; } });\nObject.defineProperty(exports, \"toWin32Path\", { enumerable: true, get: function () { return path_utils_1.toWin32Path; } });\nObject.defineProperty(exports, \"toPlatformPath\", { enumerable: true, get: function () { return path_utils_1.toPlatformPath; } });\n//# sourceMappingURL=core.js.map","\"use strict\";\n// For internal use, subject to change.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.issueCommand = void 0;\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nconst fs = __importStar(require(\"fs\"));\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\nfunction issueCommand(command, message) {\n const filePath = process.env[`GITHUB_${command}`];\n if (!filePath) {\n throw new Error(`Unable to find environment variable for file command ${command}`);\n }\n if (!fs.existsSync(filePath)) {\n throw new Error(`Missing file at path: ${filePath}`);\n }\n fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {\n encoding: 'utf8'\n });\n}\nexports.issueCommand = issueCommand;\n//# sourceMappingURL=file-command.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.OidcClient = void 0;\nconst http_client_1 = require(\"@actions/http-client\");\nconst auth_1 = require(\"@actions/http-client/lib/auth\");\nconst core_1 = require(\"./core\");\nclass OidcClient {\n static createHttpClient(allowRetry = true, maxRetry = 10) {\n const requestOptions = {\n allowRetries: allowRetry,\n maxRetries: maxRetry\n };\n return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions);\n }\n static getRequestToken() {\n const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN'];\n if (!token) {\n throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable');\n }\n return token;\n }\n static getIDTokenUrl() {\n const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL'];\n if (!runtimeUrl) {\n throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable');\n }\n return runtimeUrl;\n }\n static getCall(id_token_url) {\n var _a;\n return __awaiter(this, void 0, void 0, function* () {\n const httpclient = OidcClient.createHttpClient();\n const res = yield httpclient\n .getJson(id_token_url)\n .catch(error => {\n throw new Error(`Failed to get ID Token. \\n \n Error Code : ${error.statusCode}\\n \n Error Message: ${error.result.message}`);\n });\n const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;\n if (!id_token) {\n throw new Error('Response json body do not have ID Token field');\n }\n return id_token;\n });\n }\n static getIDToken(audience) {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n // New ID Token is requested from action service\n let id_token_url = OidcClient.getIDTokenUrl();\n if (audience) {\n const encodedAudience = encodeURIComponent(audience);\n id_token_url = `${id_token_url}&audience=${encodedAudience}`;\n }\n core_1.debug(`ID token url is ${id_token_url}`);\n const id_token = yield OidcClient.getCall(id_token_url);\n core_1.setSecret(id_token);\n return id_token;\n }\n catch (error) {\n throw new Error(`Error message: ${error.message}`);\n }\n });\n }\n}\nexports.OidcClient = OidcClient;\n//# sourceMappingURL=oidc-utils.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0;\nconst path = __importStar(require(\"path\"));\n/**\n * toPosixPath converts the given path to the posix form. On Windows, \\\\ will be\n * replaced with /.\n *\n * @param pth. Path to transform.\n * @return string Posix path.\n */\nfunction toPosixPath(pth) {\n return pth.replace(/[\\\\]/g, '/');\n}\nexports.toPosixPath = toPosixPath;\n/**\n * toWin32Path converts the given path to the win32 form. On Linux, / will be\n * replaced with \\\\.\n *\n * @param pth. Path to transform.\n * @return string Win32 path.\n */\nfunction toWin32Path(pth) {\n return pth.replace(/[/]/g, '\\\\');\n}\nexports.toWin32Path = toWin32Path;\n/**\n * toPlatformPath converts the given path to a platform-specific path. It does\n * this by replacing instances of / and \\ with the platform-specific path\n * separator.\n *\n * @param pth The path to platformize.\n * @return string The platform-specific path.\n */\nfunction toPlatformPath(pth) {\n return pth.replace(/[/\\\\]/g, path.sep);\n}\nexports.toPlatformPath = toPlatformPath;\n//# sourceMappingURL=path-utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0;\nconst os_1 = require(\"os\");\nconst fs_1 = require(\"fs\");\nconst { access, appendFile, writeFile } = fs_1.promises;\nexports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';\nexports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';\nclass Summary {\n constructor() {\n this._buffer = '';\n }\n /**\n * Finds the summary file path from the environment, rejects if env var is not found or file does not exist\n * Also checks r/w permissions.\n *\n * @returns step summary file path\n */\n filePath() {\n return __awaiter(this, void 0, void 0, function* () {\n if (this._filePath) {\n return this._filePath;\n }\n const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR];\n if (!pathFromEnv) {\n throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);\n }\n try {\n yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);\n }\n catch (_a) {\n throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);\n }\n this._filePath = pathFromEnv;\n return this._filePath;\n });\n }\n /**\n * Wraps content in an HTML tag, adding any HTML attributes\n *\n * @param {string} tag HTML tag to wrap\n * @param {string | null} content content within the tag\n * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add\n *\n * @returns {string} content wrapped in HTML element\n */\n wrap(tag, content, attrs = {}) {\n const htmlAttrs = Object.entries(attrs)\n .map(([key, value]) => ` ${key}=\"${value}\"`)\n .join('');\n if (!content) {\n return `<${tag}${htmlAttrs}>`;\n }\n return `<${tag}${htmlAttrs}>${content}`;\n }\n /**\n * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default.\n *\n * @param {SummaryWriteOptions} [options] (optional) options for write operation\n *\n * @returns {Promise} summary instance\n */\n write(options) {\n return __awaiter(this, void 0, void 0, function* () {\n const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite);\n const filePath = yield this.filePath();\n const writeFunc = overwrite ? writeFile : appendFile;\n yield writeFunc(filePath, this._buffer, { encoding: 'utf8' });\n return this.emptyBuffer();\n });\n }\n /**\n * Clears the summary buffer and wipes the summary file\n *\n * @returns {Summary} summary instance\n */\n clear() {\n return __awaiter(this, void 0, void 0, function* () {\n return this.emptyBuffer().write({ overwrite: true });\n });\n }\n /**\n * Returns the current summary buffer as a string\n *\n * @returns {string} string of summary buffer\n */\n stringify() {\n return this._buffer;\n }\n /**\n * If the summary buffer is empty\n *\n * @returns {boolen} true if the buffer is empty\n */\n isEmptyBuffer() {\n return this._buffer.length === 0;\n }\n /**\n * Resets the summary buffer without writing to summary file\n *\n * @returns {Summary} summary instance\n */\n emptyBuffer() {\n this._buffer = '';\n return this;\n }\n /**\n * Adds raw text to the summary buffer\n *\n * @param {string} text content to add\n * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false)\n *\n * @returns {Summary} summary instance\n */\n addRaw(text, addEOL = false) {\n this._buffer += text;\n return addEOL ? this.addEOL() : this;\n }\n /**\n * Adds the operating system-specific end-of-line marker to the buffer\n *\n * @returns {Summary} summary instance\n */\n addEOL() {\n return this.addRaw(os_1.EOL);\n }\n /**\n * Adds an HTML codeblock to the summary buffer\n *\n * @param {string} code content to render within fenced code block\n * @param {string} lang (optional) language to syntax highlight code\n *\n * @returns {Summary} summary instance\n */\n addCodeBlock(code, lang) {\n const attrs = Object.assign({}, (lang && { lang }));\n const element = this.wrap('pre', this.wrap('code', code), attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML list to the summary buffer\n *\n * @param {string[]} items list of items to render\n * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false)\n *\n * @returns {Summary} summary instance\n */\n addList(items, ordered = false) {\n const tag = ordered ? 'ol' : 'ul';\n const listItems = items.map(item => this.wrap('li', item)).join('');\n const element = this.wrap(tag, listItems);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML table to the summary buffer\n *\n * @param {SummaryTableCell[]} rows table rows\n *\n * @returns {Summary} summary instance\n */\n addTable(rows) {\n const tableBody = rows\n .map(row => {\n const cells = row\n .map(cell => {\n if (typeof cell === 'string') {\n return this.wrap('td', cell);\n }\n const { header, data, colspan, rowspan } = cell;\n const tag = header ? 'th' : 'td';\n const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan }));\n return this.wrap(tag, data, attrs);\n })\n .join('');\n return this.wrap('tr', cells);\n })\n .join('');\n const element = this.wrap('table', tableBody);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds a collapsable HTML details element to the summary buffer\n *\n * @param {string} label text for the closed state\n * @param {string} content collapsable content\n *\n * @returns {Summary} summary instance\n */\n addDetails(label, content) {\n const element = this.wrap('details', this.wrap('summary', label) + content);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML image tag to the summary buffer\n *\n * @param {string} src path to the image you to embed\n * @param {string} alt text description of the image\n * @param {SummaryImageOptions} options (optional) addition image attributes\n *\n * @returns {Summary} summary instance\n */\n addImage(src, alt, options) {\n const { width, height } = options || {};\n const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height }));\n const element = this.wrap('img', null, Object.assign({ src, alt }, attrs));\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML section heading element\n *\n * @param {string} text heading text\n * @param {number | string} [level=1] (optional) the heading level, default: 1\n *\n * @returns {Summary} summary instance\n */\n addHeading(text, level) {\n const tag = `h${level}`;\n const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag)\n ? tag\n : 'h1';\n const element = this.wrap(allowedTag, text);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML thematic break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addSeparator() {\n const element = this.wrap('hr', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML line break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addBreak() {\n const element = this.wrap('br', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML blockquote to the summary buffer\n *\n * @param {string} text quote text\n * @param {string} cite (optional) citation url\n *\n * @returns {Summary} summary instance\n */\n addQuote(text, cite) {\n const attrs = Object.assign({}, (cite && { cite }));\n const element = this.wrap('blockquote', text, attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML anchor tag to the summary buffer\n *\n * @param {string} text link text/content\n * @param {string} href hyperlink\n *\n * @returns {Summary} summary instance\n */\n addLink(text, href) {\n const element = this.wrap('a', text, { href });\n return this.addRaw(element).addEOL();\n }\n}\nconst _summary = new Summary();\n/**\n * @deprecated use `core.summary`\n */\nexports.markdownSummary = _summary;\nexports.summary = _summary;\n//# sourceMappingURL=summary.js.map","\"use strict\";\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toCommandProperties = exports.toCommandValue = void 0;\n/**\n * Sanitizes an input into a string so it can be passed into issueCommand safely\n * @param input input to sanitize into a string\n */\nfunction toCommandValue(input) {\n if (input === null || input === undefined) {\n return '';\n }\n else if (typeof input === 'string' || input instanceof String) {\n return input;\n }\n return JSON.stringify(input);\n}\nexports.toCommandValue = toCommandValue;\n/**\n *\n * @param annotationProperties\n * @returns The command properties to send with the actual annotation command\n * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646\n */\nfunction toCommandProperties(annotationProperties) {\n if (!Object.keys(annotationProperties).length) {\n return {};\n }\n return {\n title: annotationProperties.title,\n file: annotationProperties.file,\n line: annotationProperties.startLine,\n endLine: annotationProperties.endLine,\n col: annotationProperties.startColumn,\n endColumn: annotationProperties.endColumn\n };\n}\nexports.toCommandProperties = toCommandProperties;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0;\nclass BasicCredentialHandler {\n constructor(username, password) {\n this.username = username;\n this.password = password;\n }\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BasicCredentialHandler = BasicCredentialHandler;\nclass BearerCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Bearer ${this.token}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BearerCredentialHandler = BearerCredentialHandler;\nclass PersonalAccessTokenCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;\n//# sourceMappingURL=auth.js.map","\"use strict\";\n/* eslint-disable @typescript-eslint/no-explicit-any */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;\nconst http = __importStar(require(\"http\"));\nconst https = __importStar(require(\"https\"));\nconst pm = __importStar(require(\"./proxy\"));\nconst tunnel = __importStar(require(\"tunnel\"));\nvar HttpCodes;\n(function (HttpCodes) {\n HttpCodes[HttpCodes[\"OK\"] = 200] = \"OK\";\n HttpCodes[HttpCodes[\"MultipleChoices\"] = 300] = \"MultipleChoices\";\n HttpCodes[HttpCodes[\"MovedPermanently\"] = 301] = \"MovedPermanently\";\n HttpCodes[HttpCodes[\"ResourceMoved\"] = 302] = \"ResourceMoved\";\n HttpCodes[HttpCodes[\"SeeOther\"] = 303] = \"SeeOther\";\n HttpCodes[HttpCodes[\"NotModified\"] = 304] = \"NotModified\";\n HttpCodes[HttpCodes[\"UseProxy\"] = 305] = \"UseProxy\";\n HttpCodes[HttpCodes[\"SwitchProxy\"] = 306] = \"SwitchProxy\";\n HttpCodes[HttpCodes[\"TemporaryRedirect\"] = 307] = \"TemporaryRedirect\";\n HttpCodes[HttpCodes[\"PermanentRedirect\"] = 308] = \"PermanentRedirect\";\n HttpCodes[HttpCodes[\"BadRequest\"] = 400] = \"BadRequest\";\n HttpCodes[HttpCodes[\"Unauthorized\"] = 401] = \"Unauthorized\";\n HttpCodes[HttpCodes[\"PaymentRequired\"] = 402] = \"PaymentRequired\";\n HttpCodes[HttpCodes[\"Forbidden\"] = 403] = \"Forbidden\";\n HttpCodes[HttpCodes[\"NotFound\"] = 404] = \"NotFound\";\n HttpCodes[HttpCodes[\"MethodNotAllowed\"] = 405] = \"MethodNotAllowed\";\n HttpCodes[HttpCodes[\"NotAcceptable\"] = 406] = \"NotAcceptable\";\n HttpCodes[HttpCodes[\"ProxyAuthenticationRequired\"] = 407] = \"ProxyAuthenticationRequired\";\n HttpCodes[HttpCodes[\"RequestTimeout\"] = 408] = \"RequestTimeout\";\n HttpCodes[HttpCodes[\"Conflict\"] = 409] = \"Conflict\";\n HttpCodes[HttpCodes[\"Gone\"] = 410] = \"Gone\";\n HttpCodes[HttpCodes[\"TooManyRequests\"] = 429] = \"TooManyRequests\";\n HttpCodes[HttpCodes[\"InternalServerError\"] = 500] = \"InternalServerError\";\n HttpCodes[HttpCodes[\"NotImplemented\"] = 501] = \"NotImplemented\";\n HttpCodes[HttpCodes[\"BadGateway\"] = 502] = \"BadGateway\";\n HttpCodes[HttpCodes[\"ServiceUnavailable\"] = 503] = \"ServiceUnavailable\";\n HttpCodes[HttpCodes[\"GatewayTimeout\"] = 504] = \"GatewayTimeout\";\n})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));\nvar Headers;\n(function (Headers) {\n Headers[\"Accept\"] = \"accept\";\n Headers[\"ContentType\"] = \"content-type\";\n})(Headers = exports.Headers || (exports.Headers = {}));\nvar MediaTypes;\n(function (MediaTypes) {\n MediaTypes[\"ApplicationJson\"] = \"application/json\";\n})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));\n/**\n * Returns the proxy URL, depending upon the supplied url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\nfunction getProxyUrl(serverUrl) {\n const proxyUrl = pm.getProxyUrl(new URL(serverUrl));\n return proxyUrl ? proxyUrl.href : '';\n}\nexports.getProxyUrl = getProxyUrl;\nconst HttpRedirectCodes = [\n HttpCodes.MovedPermanently,\n HttpCodes.ResourceMoved,\n HttpCodes.SeeOther,\n HttpCodes.TemporaryRedirect,\n HttpCodes.PermanentRedirect\n];\nconst HttpResponseRetryCodes = [\n HttpCodes.BadGateway,\n HttpCodes.ServiceUnavailable,\n HttpCodes.GatewayTimeout\n];\nconst RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];\nconst ExponentialBackoffCeiling = 10;\nconst ExponentialBackoffTimeSlice = 5;\nclass HttpClientError extends Error {\n constructor(message, statusCode) {\n super(message);\n this.name = 'HttpClientError';\n this.statusCode = statusCode;\n Object.setPrototypeOf(this, HttpClientError.prototype);\n }\n}\nexports.HttpClientError = HttpClientError;\nclass HttpClientResponse {\n constructor(message) {\n this.message = message;\n }\n readBody() {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {\n let output = Buffer.alloc(0);\n this.message.on('data', (chunk) => {\n output = Buffer.concat([output, chunk]);\n });\n this.message.on('end', () => {\n resolve(output.toString());\n });\n }));\n });\n }\n}\nexports.HttpClientResponse = HttpClientResponse;\nfunction isHttps(requestUrl) {\n const parsedUrl = new URL(requestUrl);\n return parsedUrl.protocol === 'https:';\n}\nexports.isHttps = isHttps;\nclass HttpClient {\n constructor(userAgent, handlers, requestOptions) {\n this._ignoreSslError = false;\n this._allowRedirects = true;\n this._allowRedirectDowngrade = false;\n this._maxRedirects = 50;\n this._allowRetries = false;\n this._maxRetries = 1;\n this._keepAlive = false;\n this._disposed = false;\n this.userAgent = userAgent;\n this.handlers = handlers || [];\n this.requestOptions = requestOptions;\n if (requestOptions) {\n if (requestOptions.ignoreSslError != null) {\n this._ignoreSslError = requestOptions.ignoreSslError;\n }\n this._socketTimeout = requestOptions.socketTimeout;\n if (requestOptions.allowRedirects != null) {\n this._allowRedirects = requestOptions.allowRedirects;\n }\n if (requestOptions.allowRedirectDowngrade != null) {\n this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;\n }\n if (requestOptions.maxRedirects != null) {\n this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);\n }\n if (requestOptions.keepAlive != null) {\n this._keepAlive = requestOptions.keepAlive;\n }\n if (requestOptions.allowRetries != null) {\n this._allowRetries = requestOptions.allowRetries;\n }\n if (requestOptions.maxRetries != null) {\n this._maxRetries = requestOptions.maxRetries;\n }\n }\n }\n options(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});\n });\n }\n get(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('GET', requestUrl, null, additionalHeaders || {});\n });\n }\n del(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('DELETE', requestUrl, null, additionalHeaders || {});\n });\n }\n post(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('POST', requestUrl, data, additionalHeaders || {});\n });\n }\n patch(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PATCH', requestUrl, data, additionalHeaders || {});\n });\n }\n put(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PUT', requestUrl, data, additionalHeaders || {});\n });\n }\n head(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('HEAD', requestUrl, null, additionalHeaders || {});\n });\n }\n sendStream(verb, requestUrl, stream, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request(verb, requestUrl, stream, additionalHeaders);\n });\n }\n /**\n * Gets a typed object from an endpoint\n * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise\n */\n getJson(requestUrl, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n const res = yield this.get(requestUrl, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n postJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.post(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n putJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.put(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n patchJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.patch(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n /**\n * Makes a raw http request.\n * All other methods such as get, post, patch, and request ultimately call this.\n * Prefer get, del, post and patch\n */\n request(verb, requestUrl, data, headers) {\n return __awaiter(this, void 0, void 0, function* () {\n if (this._disposed) {\n throw new Error('Client has already been disposed.');\n }\n const parsedUrl = new URL(requestUrl);\n let info = this._prepareRequest(verb, parsedUrl, headers);\n // Only perform retries on reads since writes may not be idempotent.\n const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb)\n ? this._maxRetries + 1\n : 1;\n let numTries = 0;\n let response;\n do {\n response = yield this.requestRaw(info, data);\n // Check if it's an authentication challenge\n if (response &&\n response.message &&\n response.message.statusCode === HttpCodes.Unauthorized) {\n let authenticationHandler;\n for (const handler of this.handlers) {\n if (handler.canHandleAuthentication(response)) {\n authenticationHandler = handler;\n break;\n }\n }\n if (authenticationHandler) {\n return authenticationHandler.handleAuthentication(this, info, data);\n }\n else {\n // We have received an unauthorized response but have no handlers to handle it.\n // Let the response return to the caller.\n return response;\n }\n }\n let redirectsRemaining = this._maxRedirects;\n while (response.message.statusCode &&\n HttpRedirectCodes.includes(response.message.statusCode) &&\n this._allowRedirects &&\n redirectsRemaining > 0) {\n const redirectUrl = response.message.headers['location'];\n if (!redirectUrl) {\n // if there's no location to redirect to, we won't\n break;\n }\n const parsedRedirectUrl = new URL(redirectUrl);\n if (parsedUrl.protocol === 'https:' &&\n parsedUrl.protocol !== parsedRedirectUrl.protocol &&\n !this._allowRedirectDowngrade) {\n throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');\n }\n // we need to finish reading the response before reassigning response\n // which will leak the open socket.\n yield response.readBody();\n // strip authorization header if redirected to a different hostname\n if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {\n for (const header in headers) {\n // header names are case insensitive\n if (header.toLowerCase() === 'authorization') {\n delete headers[header];\n }\n }\n }\n // let's make the request with the new redirectUrl\n info = this._prepareRequest(verb, parsedRedirectUrl, headers);\n response = yield this.requestRaw(info, data);\n redirectsRemaining--;\n }\n if (!response.message.statusCode ||\n !HttpResponseRetryCodes.includes(response.message.statusCode)) {\n // If not a retry code, return immediately instead of retrying\n return response;\n }\n numTries += 1;\n if (numTries < maxTries) {\n yield response.readBody();\n yield this._performExponentialBackoff(numTries);\n }\n } while (numTries < maxTries);\n return response;\n });\n }\n /**\n * Needs to be called if keepAlive is set to true in request options.\n */\n dispose() {\n if (this._agent) {\n this._agent.destroy();\n }\n this._disposed = true;\n }\n /**\n * Raw request.\n * @param info\n * @param data\n */\n requestRaw(info, data) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => {\n function callbackForResult(err, res) {\n if (err) {\n reject(err);\n }\n else if (!res) {\n // If `err` is not passed, then `res` must be passed.\n reject(new Error('Unknown error'));\n }\n else {\n resolve(res);\n }\n }\n this.requestRawWithCallback(info, data, callbackForResult);\n });\n });\n }\n /**\n * Raw request with callback.\n * @param info\n * @param data\n * @param onResult\n */\n requestRawWithCallback(info, data, onResult) {\n if (typeof data === 'string') {\n if (!info.options.headers) {\n info.options.headers = {};\n }\n info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');\n }\n let callbackCalled = false;\n function handleResult(err, res) {\n if (!callbackCalled) {\n callbackCalled = true;\n onResult(err, res);\n }\n }\n const req = info.httpModule.request(info.options, (msg) => {\n const res = new HttpClientResponse(msg);\n handleResult(undefined, res);\n });\n let socket;\n req.on('socket', sock => {\n socket = sock;\n });\n // If we ever get disconnected, we want the socket to timeout eventually\n req.setTimeout(this._socketTimeout || 3 * 60000, () => {\n if (socket) {\n socket.end();\n }\n handleResult(new Error(`Request timeout: ${info.options.path}`));\n });\n req.on('error', function (err) {\n // err has statusCode property\n // res should have headers\n handleResult(err);\n });\n if (data && typeof data === 'string') {\n req.write(data, 'utf8');\n }\n if (data && typeof data !== 'string') {\n data.on('close', function () {\n req.end();\n });\n data.pipe(req);\n }\n else {\n req.end();\n }\n }\n /**\n * Gets an http agent. This function is useful when you need an http agent that handles\n * routing through a proxy server - depending upon the url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\n getAgent(serverUrl) {\n const parsedUrl = new URL(serverUrl);\n return this._getAgent(parsedUrl);\n }\n _prepareRequest(method, requestUrl, headers) {\n const info = {};\n info.parsedUrl = requestUrl;\n const usingSsl = info.parsedUrl.protocol === 'https:';\n info.httpModule = usingSsl ? https : http;\n const defaultPort = usingSsl ? 443 : 80;\n info.options = {};\n info.options.host = info.parsedUrl.hostname;\n info.options.port = info.parsedUrl.port\n ? parseInt(info.parsedUrl.port)\n : defaultPort;\n info.options.path =\n (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');\n info.options.method = method;\n info.options.headers = this._mergeHeaders(headers);\n if (this.userAgent != null) {\n info.options.headers['user-agent'] = this.userAgent;\n }\n info.options.agent = this._getAgent(info.parsedUrl);\n // gives handlers an opportunity to participate\n if (this.handlers) {\n for (const handler of this.handlers) {\n handler.prepareRequest(info.options);\n }\n }\n return info;\n }\n _mergeHeaders(headers) {\n if (this.requestOptions && this.requestOptions.headers) {\n return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {}));\n }\n return lowercaseKeys(headers || {});\n }\n _getExistingOrDefaultHeader(additionalHeaders, header, _default) {\n let clientHeader;\n if (this.requestOptions && this.requestOptions.headers) {\n clientHeader = lowercaseKeys(this.requestOptions.headers)[header];\n }\n return additionalHeaders[header] || clientHeader || _default;\n }\n _getAgent(parsedUrl) {\n let agent;\n const proxyUrl = pm.getProxyUrl(parsedUrl);\n const useProxy = proxyUrl && proxyUrl.hostname;\n if (this._keepAlive && useProxy) {\n agent = this._proxyAgent;\n }\n if (this._keepAlive && !useProxy) {\n agent = this._agent;\n }\n // if agent is already assigned use that agent.\n if (agent) {\n return agent;\n }\n const usingSsl = parsedUrl.protocol === 'https:';\n let maxSockets = 100;\n if (this.requestOptions) {\n maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;\n }\n // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.\n if (proxyUrl && proxyUrl.hostname) {\n const agentOptions = {\n maxSockets,\n keepAlive: this._keepAlive,\n proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && {\n proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`\n })), { host: proxyUrl.hostname, port: proxyUrl.port })\n };\n let tunnelAgent;\n const overHttps = proxyUrl.protocol === 'https:';\n if (usingSsl) {\n tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;\n }\n else {\n tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;\n }\n agent = tunnelAgent(agentOptions);\n this._proxyAgent = agent;\n }\n // if reusing agent across request and tunneling agent isn't assigned create a new agent\n if (this._keepAlive && !agent) {\n const options = { keepAlive: this._keepAlive, maxSockets };\n agent = usingSsl ? new https.Agent(options) : new http.Agent(options);\n this._agent = agent;\n }\n // if not using private agent and tunnel agent isn't setup then use global agent\n if (!agent) {\n agent = usingSsl ? https.globalAgent : http.globalAgent;\n }\n if (usingSsl && this._ignoreSslError) {\n // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process\n // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options\n // we have to cast it to any and change it directly\n agent.options = Object.assign(agent.options || {}, {\n rejectUnauthorized: false\n });\n }\n return agent;\n }\n _performExponentialBackoff(retryNumber) {\n return __awaiter(this, void 0, void 0, function* () {\n retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);\n const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);\n return new Promise(resolve => setTimeout(() => resolve(), ms));\n });\n }\n _processResponse(res, options) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {\n const statusCode = res.message.statusCode || 0;\n const response = {\n statusCode,\n result: null,\n headers: {}\n };\n // not found leads to null obj returned\n if (statusCode === HttpCodes.NotFound) {\n resolve(response);\n }\n // get the result from the body\n function dateTimeDeserializer(key, value) {\n if (typeof value === 'string') {\n const a = new Date(value);\n if (!isNaN(a.valueOf())) {\n return a;\n }\n }\n return value;\n }\n let obj;\n let contents;\n try {\n contents = yield res.readBody();\n if (contents && contents.length > 0) {\n if (options && options.deserializeDates) {\n obj = JSON.parse(contents, dateTimeDeserializer);\n }\n else {\n obj = JSON.parse(contents);\n }\n response.result = obj;\n }\n response.headers = res.message.headers;\n }\n catch (err) {\n // Invalid resource (contents not json); leaving result obj null\n }\n // note that 3xx redirects are handled by the http layer.\n if (statusCode > 299) {\n let msg;\n // if exception/error in body, attempt to get better error\n if (obj && obj.message) {\n msg = obj.message;\n }\n else if (contents && contents.length > 0) {\n // it may be the case that the exception is in the body message as string\n msg = contents;\n }\n else {\n msg = `Failed request: (${statusCode})`;\n }\n const err = new HttpClientError(msg, statusCode);\n err.result = response.result;\n reject(err);\n }\n else {\n resolve(response);\n }\n }));\n });\n }\n}\nexports.HttpClient = HttpClient;\nconst lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.checkBypass = exports.getProxyUrl = void 0;\nfunction getProxyUrl(reqUrl) {\n const usingSsl = reqUrl.protocol === 'https:';\n if (checkBypass(reqUrl)) {\n return undefined;\n }\n const proxyVar = (() => {\n if (usingSsl) {\n return process.env['https_proxy'] || process.env['HTTPS_PROXY'];\n }\n else {\n return process.env['http_proxy'] || process.env['HTTP_PROXY'];\n }\n })();\n if (proxyVar) {\n return new URL(proxyVar);\n }\n else {\n return undefined;\n }\n}\nexports.getProxyUrl = getProxyUrl;\nfunction checkBypass(reqUrl) {\n if (!reqUrl.hostname) {\n return false;\n }\n const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';\n if (!noProxy) {\n return false;\n }\n // Determine the request port\n let reqPort;\n if (reqUrl.port) {\n reqPort = Number(reqUrl.port);\n }\n else if (reqUrl.protocol === 'http:') {\n reqPort = 80;\n }\n else if (reqUrl.protocol === 'https:') {\n reqPort = 443;\n }\n // Format the request hostname and hostname with port\n const upperReqHosts = [reqUrl.hostname.toUpperCase()];\n if (typeof reqPort === 'number') {\n upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);\n }\n // Compare request host against noproxy\n for (const upperNoProxyItem of noProxy\n .split(',')\n .map(x => x.trim().toUpperCase())\n .filter(x => x)) {\n if (upperReqHosts.some(x => x === upperNoProxyItem)) {\n return true;\n }\n }\n return false;\n}\nexports.checkBypass = checkBypass;\n//# sourceMappingURL=proxy.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.CodeGen = exports.Name = exports.nil = exports.stringify = exports.str = exports._ = exports.KeywordCxt = void 0;\nconst core_1 = require(\"./core\");\nconst draft7_1 = require(\"./vocabularies/draft7\");\nconst discriminator_1 = require(\"./vocabularies/discriminator\");\nconst draft7MetaSchema = require(\"./refs/json-schema-draft-07.json\");\nconst META_SUPPORT_DATA = [\"/properties\"];\nconst META_SCHEMA_ID = \"http://json-schema.org/draft-07/schema\";\nclass Ajv extends core_1.default {\n _addVocabularies() {\n super._addVocabularies();\n draft7_1.default.forEach((v) => this.addVocabulary(v));\n if (this.opts.discriminator)\n this.addKeyword(discriminator_1.default);\n }\n _addDefaultMetaSchema() {\n super._addDefaultMetaSchema();\n if (!this.opts.meta)\n return;\n const metaSchema = this.opts.$data\n ? this.$dataMetaSchema(draft7MetaSchema, META_SUPPORT_DATA)\n : draft7MetaSchema;\n this.addMetaSchema(metaSchema, META_SCHEMA_ID, false);\n this.refs[\"http://json-schema.org/schema\"] = META_SCHEMA_ID;\n }\n defaultMeta() {\n return (this.opts.defaultMeta =\n super.defaultMeta() || (this.getSchema(META_SCHEMA_ID) ? META_SCHEMA_ID : undefined));\n }\n}\nmodule.exports = exports = Ajv;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.default = Ajv;\nvar validate_1 = require(\"./compile/validate\");\nObject.defineProperty(exports, \"KeywordCxt\", { enumerable: true, get: function () { return validate_1.KeywordCxt; } });\nvar codegen_1 = require(\"./compile/codegen\");\nObject.defineProperty(exports, \"_\", { enumerable: true, get: function () { return codegen_1._; } });\nObject.defineProperty(exports, \"str\", { enumerable: true, get: function () { return codegen_1.str; } });\nObject.defineProperty(exports, \"stringify\", { enumerable: true, get: function () { return codegen_1.stringify; } });\nObject.defineProperty(exports, \"nil\", { enumerable: true, get: function () { return codegen_1.nil; } });\nObject.defineProperty(exports, \"Name\", { enumerable: true, get: function () { return codegen_1.Name; } });\nObject.defineProperty(exports, \"CodeGen\", { enumerable: true, get: function () { return codegen_1.CodeGen; } });\n//# sourceMappingURL=ajv.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.regexpCode = exports.getEsmExportName = exports.getProperty = exports.safeStringify = exports.stringify = exports.strConcat = exports.addCodeArg = exports.str = exports._ = exports.nil = exports._Code = exports.Name = exports.IDENTIFIER = exports._CodeOrName = void 0;\nclass _CodeOrName {\n}\nexports._CodeOrName = _CodeOrName;\nexports.IDENTIFIER = /^[a-z$_][a-z$_0-9]*$/i;\nclass Name extends _CodeOrName {\n constructor(s) {\n super();\n if (!exports.IDENTIFIER.test(s))\n throw new Error(\"CodeGen: name must be a valid identifier\");\n this.str = s;\n }\n toString() {\n return this.str;\n }\n emptyStr() {\n return false;\n }\n get names() {\n return { [this.str]: 1 };\n }\n}\nexports.Name = Name;\nclass _Code extends _CodeOrName {\n constructor(code) {\n super();\n this._items = typeof code === \"string\" ? [code] : code;\n }\n toString() {\n return this.str;\n }\n emptyStr() {\n if (this._items.length > 1)\n return false;\n const item = this._items[0];\n return item === \"\" || item === '\"\"';\n }\n get str() {\n var _a;\n return ((_a = this._str) !== null && _a !== void 0 ? _a : (this._str = this._items.reduce((s, c) => `${s}${c}`, \"\")));\n }\n get names() {\n var _a;\n return ((_a = this._names) !== null && _a !== void 0 ? _a : (this._names = this._items.reduce((names, c) => {\n if (c instanceof Name)\n names[c.str] = (names[c.str] || 0) + 1;\n return names;\n }, {})));\n }\n}\nexports._Code = _Code;\nexports.nil = new _Code(\"\");\nfunction _(strs, ...args) {\n const code = [strs[0]];\n let i = 0;\n while (i < args.length) {\n addCodeArg(code, args[i]);\n code.push(strs[++i]);\n }\n return new _Code(code);\n}\nexports._ = _;\nconst plus = new _Code(\"+\");\nfunction str(strs, ...args) {\n const expr = [safeStringify(strs[0])];\n let i = 0;\n while (i < args.length) {\n expr.push(plus);\n addCodeArg(expr, args[i]);\n expr.push(plus, safeStringify(strs[++i]));\n }\n optimize(expr);\n return new _Code(expr);\n}\nexports.str = str;\nfunction addCodeArg(code, arg) {\n if (arg instanceof _Code)\n code.push(...arg._items);\n else if (arg instanceof Name)\n code.push(arg);\n else\n code.push(interpolate(arg));\n}\nexports.addCodeArg = addCodeArg;\nfunction optimize(expr) {\n let i = 1;\n while (i < expr.length - 1) {\n if (expr[i] === plus) {\n const res = mergeExprItems(expr[i - 1], expr[i + 1]);\n if (res !== undefined) {\n expr.splice(i - 1, 3, res);\n continue;\n }\n expr[i++] = \"+\";\n }\n i++;\n }\n}\nfunction mergeExprItems(a, b) {\n if (b === '\"\"')\n return a;\n if (a === '\"\"')\n return b;\n if (typeof a == \"string\") {\n if (b instanceof Name || a[a.length - 1] !== '\"')\n return;\n if (typeof b != \"string\")\n return `${a.slice(0, -1)}${b}\"`;\n if (b[0] === '\"')\n return a.slice(0, -1) + b.slice(1);\n return;\n }\n if (typeof b == \"string\" && b[0] === '\"' && !(a instanceof Name))\n return `\"${a}${b.slice(1)}`;\n return;\n}\nfunction strConcat(c1, c2) {\n return c2.emptyStr() ? c1 : c1.emptyStr() ? c2 : str `${c1}${c2}`;\n}\nexports.strConcat = strConcat;\n// TODO do not allow arrays here\nfunction interpolate(x) {\n return typeof x == \"number\" || typeof x == \"boolean\" || x === null\n ? x\n : safeStringify(Array.isArray(x) ? x.join(\",\") : x);\n}\nfunction stringify(x) {\n return new _Code(safeStringify(x));\n}\nexports.stringify = stringify;\nfunction safeStringify(x) {\n return JSON.stringify(x)\n .replace(/\\u2028/g, \"\\\\u2028\")\n .replace(/\\u2029/g, \"\\\\u2029\");\n}\nexports.safeStringify = safeStringify;\nfunction getProperty(key) {\n return typeof key == \"string\" && exports.IDENTIFIER.test(key) ? new _Code(`.${key}`) : _ `[${key}]`;\n}\nexports.getProperty = getProperty;\n//Does best effort to format the name properly\nfunction getEsmExportName(key) {\n if (typeof key == \"string\" && exports.IDENTIFIER.test(key)) {\n return new _Code(`${key}`);\n }\n throw new Error(`CodeGen: invalid export name: ${key}, use explicit $id name mapping`);\n}\nexports.getEsmExportName = getEsmExportName;\nfunction regexpCode(rx) {\n return new _Code(rx.toString());\n}\nexports.regexpCode = regexpCode;\n//# sourceMappingURL=code.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.or = exports.and = exports.not = exports.CodeGen = exports.operators = exports.varKinds = exports.ValueScopeName = exports.ValueScope = exports.Scope = exports.Name = exports.regexpCode = exports.stringify = exports.getProperty = exports.nil = exports.strConcat = exports.str = exports._ = void 0;\nconst code_1 = require(\"./code\");\nconst scope_1 = require(\"./scope\");\nvar code_2 = require(\"./code\");\nObject.defineProperty(exports, \"_\", { enumerable: true, get: function () { return code_2._; } });\nObject.defineProperty(exports, \"str\", { enumerable: true, get: function () { return code_2.str; } });\nObject.defineProperty(exports, \"strConcat\", { enumerable: true, get: function () { return code_2.strConcat; } });\nObject.defineProperty(exports, \"nil\", { enumerable: true, get: function () { return code_2.nil; } });\nObject.defineProperty(exports, \"getProperty\", { enumerable: true, get: function () { return code_2.getProperty; } });\nObject.defineProperty(exports, \"stringify\", { enumerable: true, get: function () { return code_2.stringify; } });\nObject.defineProperty(exports, \"regexpCode\", { enumerable: true, get: function () { return code_2.regexpCode; } });\nObject.defineProperty(exports, \"Name\", { enumerable: true, get: function () { return code_2.Name; } });\nvar scope_2 = require(\"./scope\");\nObject.defineProperty(exports, \"Scope\", { enumerable: true, get: function () { return scope_2.Scope; } });\nObject.defineProperty(exports, \"ValueScope\", { enumerable: true, get: function () { return scope_2.ValueScope; } });\nObject.defineProperty(exports, \"ValueScopeName\", { enumerable: true, get: function () { return scope_2.ValueScopeName; } });\nObject.defineProperty(exports, \"varKinds\", { enumerable: true, get: function () { return scope_2.varKinds; } });\nexports.operators = {\n GT: new code_1._Code(\">\"),\n GTE: new code_1._Code(\">=\"),\n LT: new code_1._Code(\"<\"),\n LTE: new code_1._Code(\"<=\"),\n EQ: new code_1._Code(\"===\"),\n NEQ: new code_1._Code(\"!==\"),\n NOT: new code_1._Code(\"!\"),\n OR: new code_1._Code(\"||\"),\n AND: new code_1._Code(\"&&\"),\n ADD: new code_1._Code(\"+\"),\n};\nclass Node {\n optimizeNodes() {\n return this;\n }\n optimizeNames(_names, _constants) {\n return this;\n }\n}\nclass Def extends Node {\n constructor(varKind, name, rhs) {\n super();\n this.varKind = varKind;\n this.name = name;\n this.rhs = rhs;\n }\n render({ es5, _n }) {\n const varKind = es5 ? scope_1.varKinds.var : this.varKind;\n const rhs = this.rhs === undefined ? \"\" : ` = ${this.rhs}`;\n return `${varKind} ${this.name}${rhs};` + _n;\n }\n optimizeNames(names, constants) {\n if (!names[this.name.str])\n return;\n if (this.rhs)\n this.rhs = optimizeExpr(this.rhs, names, constants);\n return this;\n }\n get names() {\n return this.rhs instanceof code_1._CodeOrName ? this.rhs.names : {};\n }\n}\nclass Assign extends Node {\n constructor(lhs, rhs, sideEffects) {\n super();\n this.lhs = lhs;\n this.rhs = rhs;\n this.sideEffects = sideEffects;\n }\n render({ _n }) {\n return `${this.lhs} = ${this.rhs};` + _n;\n }\n optimizeNames(names, constants) {\n if (this.lhs instanceof code_1.Name && !names[this.lhs.str] && !this.sideEffects)\n return;\n this.rhs = optimizeExpr(this.rhs, names, constants);\n return this;\n }\n get names() {\n const names = this.lhs instanceof code_1.Name ? {} : { ...this.lhs.names };\n return addExprNames(names, this.rhs);\n }\n}\nclass AssignOp extends Assign {\n constructor(lhs, op, rhs, sideEffects) {\n super(lhs, rhs, sideEffects);\n this.op = op;\n }\n render({ _n }) {\n return `${this.lhs} ${this.op}= ${this.rhs};` + _n;\n }\n}\nclass Label extends Node {\n constructor(label) {\n super();\n this.label = label;\n this.names = {};\n }\n render({ _n }) {\n return `${this.label}:` + _n;\n }\n}\nclass Break extends Node {\n constructor(label) {\n super();\n this.label = label;\n this.names = {};\n }\n render({ _n }) {\n const label = this.label ? ` ${this.label}` : \"\";\n return `break${label};` + _n;\n }\n}\nclass Throw extends Node {\n constructor(error) {\n super();\n this.error = error;\n }\n render({ _n }) {\n return `throw ${this.error};` + _n;\n }\n get names() {\n return this.error.names;\n }\n}\nclass AnyCode extends Node {\n constructor(code) {\n super();\n this.code = code;\n }\n render({ _n }) {\n return `${this.code};` + _n;\n }\n optimizeNodes() {\n return `${this.code}` ? this : undefined;\n }\n optimizeNames(names, constants) {\n this.code = optimizeExpr(this.code, names, constants);\n return this;\n }\n get names() {\n return this.code instanceof code_1._CodeOrName ? this.code.names : {};\n }\n}\nclass ParentNode extends Node {\n constructor(nodes = []) {\n super();\n this.nodes = nodes;\n }\n render(opts) {\n return this.nodes.reduce((code, n) => code + n.render(opts), \"\");\n }\n optimizeNodes() {\n const { nodes } = this;\n let i = nodes.length;\n while (i--) {\n const n = nodes[i].optimizeNodes();\n if (Array.isArray(n))\n nodes.splice(i, 1, ...n);\n else if (n)\n nodes[i] = n;\n else\n nodes.splice(i, 1);\n }\n return nodes.length > 0 ? this : undefined;\n }\n optimizeNames(names, constants) {\n const { nodes } = this;\n let i = nodes.length;\n while (i--) {\n // iterating backwards improves 1-pass optimization\n const n = nodes[i];\n if (n.optimizeNames(names, constants))\n continue;\n subtractNames(names, n.names);\n nodes.splice(i, 1);\n }\n return nodes.length > 0 ? this : undefined;\n }\n get names() {\n return this.nodes.reduce((names, n) => addNames(names, n.names), {});\n }\n}\nclass BlockNode extends ParentNode {\n render(opts) {\n return \"{\" + opts._n + super.render(opts) + \"}\" + opts._n;\n }\n}\nclass Root extends ParentNode {\n}\nclass Else extends BlockNode {\n}\nElse.kind = \"else\";\nclass If extends BlockNode {\n constructor(condition, nodes) {\n super(nodes);\n this.condition = condition;\n }\n render(opts) {\n let code = `if(${this.condition})` + super.render(opts);\n if (this.else)\n code += \"else \" + this.else.render(opts);\n return code;\n }\n optimizeNodes() {\n super.optimizeNodes();\n const cond = this.condition;\n if (cond === true)\n return this.nodes; // else is ignored here\n let e = this.else;\n if (e) {\n const ns = e.optimizeNodes();\n e = this.else = Array.isArray(ns) ? new Else(ns) : ns;\n }\n if (e) {\n if (cond === false)\n return e instanceof If ? e : e.nodes;\n if (this.nodes.length)\n return this;\n return new If(not(cond), e instanceof If ? [e] : e.nodes);\n }\n if (cond === false || !this.nodes.length)\n return undefined;\n return this;\n }\n optimizeNames(names, constants) {\n var _a;\n this.else = (_a = this.else) === null || _a === void 0 ? void 0 : _a.optimizeNames(names, constants);\n if (!(super.optimizeNames(names, constants) || this.else))\n return;\n this.condition = optimizeExpr(this.condition, names, constants);\n return this;\n }\n get names() {\n const names = super.names;\n addExprNames(names, this.condition);\n if (this.else)\n addNames(names, this.else.names);\n return names;\n }\n}\nIf.kind = \"if\";\nclass For extends BlockNode {\n}\nFor.kind = \"for\";\nclass ForLoop extends For {\n constructor(iteration) {\n super();\n this.iteration = iteration;\n }\n render(opts) {\n return `for(${this.iteration})` + super.render(opts);\n }\n optimizeNames(names, constants) {\n if (!super.optimizeNames(names, constants))\n return;\n this.iteration = optimizeExpr(this.iteration, names, constants);\n return this;\n }\n get names() {\n return addNames(super.names, this.iteration.names);\n }\n}\nclass ForRange extends For {\n constructor(varKind, name, from, to) {\n super();\n this.varKind = varKind;\n this.name = name;\n this.from = from;\n this.to = to;\n }\n render(opts) {\n const varKind = opts.es5 ? scope_1.varKinds.var : this.varKind;\n const { name, from, to } = this;\n return `for(${varKind} ${name}=${from}; ${name}<${to}; ${name}++)` + super.render(opts);\n }\n get names() {\n const names = addExprNames(super.names, this.from);\n return addExprNames(names, this.to);\n }\n}\nclass ForIter extends For {\n constructor(loop, varKind, name, iterable) {\n super();\n this.loop = loop;\n this.varKind = varKind;\n this.name = name;\n this.iterable = iterable;\n }\n render(opts) {\n return `for(${this.varKind} ${this.name} ${this.loop} ${this.iterable})` + super.render(opts);\n }\n optimizeNames(names, constants) {\n if (!super.optimizeNames(names, constants))\n return;\n this.iterable = optimizeExpr(this.iterable, names, constants);\n return this;\n }\n get names() {\n return addNames(super.names, this.iterable.names);\n }\n}\nclass Func extends BlockNode {\n constructor(name, args, async) {\n super();\n this.name = name;\n this.args = args;\n this.async = async;\n }\n render(opts) {\n const _async = this.async ? \"async \" : \"\";\n return `${_async}function ${this.name}(${this.args})` + super.render(opts);\n }\n}\nFunc.kind = \"func\";\nclass Return extends ParentNode {\n render(opts) {\n return \"return \" + super.render(opts);\n }\n}\nReturn.kind = \"return\";\nclass Try extends BlockNode {\n render(opts) {\n let code = \"try\" + super.render(opts);\n if (this.catch)\n code += this.catch.render(opts);\n if (this.finally)\n code += this.finally.render(opts);\n return code;\n }\n optimizeNodes() {\n var _a, _b;\n super.optimizeNodes();\n (_a = this.catch) === null || _a === void 0 ? void 0 : _a.optimizeNodes();\n (_b = this.finally) === null || _b === void 0 ? void 0 : _b.optimizeNodes();\n return this;\n }\n optimizeNames(names, constants) {\n var _a, _b;\n super.optimizeNames(names, constants);\n (_a = this.catch) === null || _a === void 0 ? void 0 : _a.optimizeNames(names, constants);\n (_b = this.finally) === null || _b === void 0 ? void 0 : _b.optimizeNames(names, constants);\n return this;\n }\n get names() {\n const names = super.names;\n if (this.catch)\n addNames(names, this.catch.names);\n if (this.finally)\n addNames(names, this.finally.names);\n return names;\n }\n}\nclass Catch extends BlockNode {\n constructor(error) {\n super();\n this.error = error;\n }\n render(opts) {\n return `catch(${this.error})` + super.render(opts);\n }\n}\nCatch.kind = \"catch\";\nclass Finally extends BlockNode {\n render(opts) {\n return \"finally\" + super.render(opts);\n }\n}\nFinally.kind = \"finally\";\nclass CodeGen {\n constructor(extScope, opts = {}) {\n this._values = {};\n this._blockStarts = [];\n this._constants = {};\n this.opts = { ...opts, _n: opts.lines ? \"\\n\" : \"\" };\n this._extScope = extScope;\n this._scope = new scope_1.Scope({ parent: extScope });\n this._nodes = [new Root()];\n }\n toString() {\n return this._root.render(this.opts);\n }\n // returns unique name in the internal scope\n name(prefix) {\n return this._scope.name(prefix);\n }\n // reserves unique name in the external scope\n scopeName(prefix) {\n return this._extScope.name(prefix);\n }\n // reserves unique name in the external scope and assigns value to it\n scopeValue(prefixOrName, value) {\n const name = this._extScope.value(prefixOrName, value);\n const vs = this._values[name.prefix] || (this._values[name.prefix] = new Set());\n vs.add(name);\n return name;\n }\n getScopeValue(prefix, keyOrRef) {\n return this._extScope.getValue(prefix, keyOrRef);\n }\n // return code that assigns values in the external scope to the names that are used internally\n // (same names that were returned by gen.scopeName or gen.scopeValue)\n scopeRefs(scopeName) {\n return this._extScope.scopeRefs(scopeName, this._values);\n }\n scopeCode() {\n return this._extScope.scopeCode(this._values);\n }\n _def(varKind, nameOrPrefix, rhs, constant) {\n const name = this._scope.toName(nameOrPrefix);\n if (rhs !== undefined && constant)\n this._constants[name.str] = rhs;\n this._leafNode(new Def(varKind, name, rhs));\n return name;\n }\n // `const` declaration (`var` in es5 mode)\n const(nameOrPrefix, rhs, _constant) {\n return this._def(scope_1.varKinds.const, nameOrPrefix, rhs, _constant);\n }\n // `let` declaration with optional assignment (`var` in es5 mode)\n let(nameOrPrefix, rhs, _constant) {\n return this._def(scope_1.varKinds.let, nameOrPrefix, rhs, _constant);\n }\n // `var` declaration with optional assignment\n var(nameOrPrefix, rhs, _constant) {\n return this._def(scope_1.varKinds.var, nameOrPrefix, rhs, _constant);\n }\n // assignment code\n assign(lhs, rhs, sideEffects) {\n return this._leafNode(new Assign(lhs, rhs, sideEffects));\n }\n // `+=` code\n add(lhs, rhs) {\n return this._leafNode(new AssignOp(lhs, exports.operators.ADD, rhs));\n }\n // appends passed SafeExpr to code or executes Block\n code(c) {\n if (typeof c == \"function\")\n c();\n else if (c !== code_1.nil)\n this._leafNode(new AnyCode(c));\n return this;\n }\n // returns code for object literal for the passed argument list of key-value pairs\n object(...keyValues) {\n const code = [\"{\"];\n for (const [key, value] of keyValues) {\n if (code.length > 1)\n code.push(\",\");\n code.push(key);\n if (key !== value || this.opts.es5) {\n code.push(\":\");\n (0, code_1.addCodeArg)(code, value);\n }\n }\n code.push(\"}\");\n return new code_1._Code(code);\n }\n // `if` clause (or statement if `thenBody` and, optionally, `elseBody` are passed)\n if(condition, thenBody, elseBody) {\n this._blockNode(new If(condition));\n if (thenBody && elseBody) {\n this.code(thenBody).else().code(elseBody).endIf();\n }\n else if (thenBody) {\n this.code(thenBody).endIf();\n }\n else if (elseBody) {\n throw new Error('CodeGen: \"else\" body without \"then\" body');\n }\n return this;\n }\n // `else if` clause - invalid without `if` or after `else` clauses\n elseIf(condition) {\n return this._elseNode(new If(condition));\n }\n // `else` clause - only valid after `if` or `else if` clauses\n else() {\n return this._elseNode(new Else());\n }\n // end `if` statement (needed if gen.if was used only with condition)\n endIf() {\n return this._endBlockNode(If, Else);\n }\n _for(node, forBody) {\n this._blockNode(node);\n if (forBody)\n this.code(forBody).endFor();\n return this;\n }\n // a generic `for` clause (or statement if `forBody` is passed)\n for(iteration, forBody) {\n return this._for(new ForLoop(iteration), forBody);\n }\n // `for` statement for a range of values\n forRange(nameOrPrefix, from, to, forBody, varKind = this.opts.es5 ? scope_1.varKinds.var : scope_1.varKinds.let) {\n const name = this._scope.toName(nameOrPrefix);\n return this._for(new ForRange(varKind, name, from, to), () => forBody(name));\n }\n // `for-of` statement (in es5 mode replace with a normal for loop)\n forOf(nameOrPrefix, iterable, forBody, varKind = scope_1.varKinds.const) {\n const name = this._scope.toName(nameOrPrefix);\n if (this.opts.es5) {\n const arr = iterable instanceof code_1.Name ? iterable : this.var(\"_arr\", iterable);\n return this.forRange(\"_i\", 0, (0, code_1._) `${arr}.length`, (i) => {\n this.var(name, (0, code_1._) `${arr}[${i}]`);\n forBody(name);\n });\n }\n return this._for(new ForIter(\"of\", varKind, name, iterable), () => forBody(name));\n }\n // `for-in` statement.\n // With option `ownProperties` replaced with a `for-of` loop for object keys\n forIn(nameOrPrefix, obj, forBody, varKind = this.opts.es5 ? scope_1.varKinds.var : scope_1.varKinds.const) {\n if (this.opts.ownProperties) {\n return this.forOf(nameOrPrefix, (0, code_1._) `Object.keys(${obj})`, forBody);\n }\n const name = this._scope.toName(nameOrPrefix);\n return this._for(new ForIter(\"in\", varKind, name, obj), () => forBody(name));\n }\n // end `for` loop\n endFor() {\n return this._endBlockNode(For);\n }\n // `label` statement\n label(label) {\n return this._leafNode(new Label(label));\n }\n // `break` statement\n break(label) {\n return this._leafNode(new Break(label));\n }\n // `return` statement\n return(value) {\n const node = new Return();\n this._blockNode(node);\n this.code(value);\n if (node.nodes.length !== 1)\n throw new Error('CodeGen: \"return\" should have one node');\n return this._endBlockNode(Return);\n }\n // `try` statement\n try(tryBody, catchCode, finallyCode) {\n if (!catchCode && !finallyCode)\n throw new Error('CodeGen: \"try\" without \"catch\" and \"finally\"');\n const node = new Try();\n this._blockNode(node);\n this.code(tryBody);\n if (catchCode) {\n const error = this.name(\"e\");\n this._currNode = node.catch = new Catch(error);\n catchCode(error);\n }\n if (finallyCode) {\n this._currNode = node.finally = new Finally();\n this.code(finallyCode);\n }\n return this._endBlockNode(Catch, Finally);\n }\n // `throw` statement\n throw(error) {\n return this._leafNode(new Throw(error));\n }\n // start self-balancing block\n block(body, nodeCount) {\n this._blockStarts.push(this._nodes.length);\n if (body)\n this.code(body).endBlock(nodeCount);\n return this;\n }\n // end the current self-balancing block\n endBlock(nodeCount) {\n const len = this._blockStarts.pop();\n if (len === undefined)\n throw new Error(\"CodeGen: not in self-balancing block\");\n const toClose = this._nodes.length - len;\n if (toClose < 0 || (nodeCount !== undefined && toClose !== nodeCount)) {\n throw new Error(`CodeGen: wrong number of nodes: ${toClose} vs ${nodeCount} expected`);\n }\n this._nodes.length = len;\n return this;\n }\n // `function` heading (or definition if funcBody is passed)\n func(name, args = code_1.nil, async, funcBody) {\n this._blockNode(new Func(name, args, async));\n if (funcBody)\n this.code(funcBody).endFunc();\n return this;\n }\n // end function definition\n endFunc() {\n return this._endBlockNode(Func);\n }\n optimize(n = 1) {\n while (n-- > 0) {\n this._root.optimizeNodes();\n this._root.optimizeNames(this._root.names, this._constants);\n }\n }\n _leafNode(node) {\n this._currNode.nodes.push(node);\n return this;\n }\n _blockNode(node) {\n this._currNode.nodes.push(node);\n this._nodes.push(node);\n }\n _endBlockNode(N1, N2) {\n const n = this._currNode;\n if (n instanceof N1 || (N2 && n instanceof N2)) {\n this._nodes.pop();\n return this;\n }\n throw new Error(`CodeGen: not in block \"${N2 ? `${N1.kind}/${N2.kind}` : N1.kind}\"`);\n }\n _elseNode(node) {\n const n = this._currNode;\n if (!(n instanceof If)) {\n throw new Error('CodeGen: \"else\" without \"if\"');\n }\n this._currNode = n.else = node;\n return this;\n }\n get _root() {\n return this._nodes[0];\n }\n get _currNode() {\n const ns = this._nodes;\n return ns[ns.length - 1];\n }\n set _currNode(node) {\n const ns = this._nodes;\n ns[ns.length - 1] = node;\n }\n}\nexports.CodeGen = CodeGen;\nfunction addNames(names, from) {\n for (const n in from)\n names[n] = (names[n] || 0) + (from[n] || 0);\n return names;\n}\nfunction addExprNames(names, from) {\n return from instanceof code_1._CodeOrName ? addNames(names, from.names) : names;\n}\nfunction optimizeExpr(expr, names, constants) {\n if (expr instanceof code_1.Name)\n return replaceName(expr);\n if (!canOptimize(expr))\n return expr;\n return new code_1._Code(expr._items.reduce((items, c) => {\n if (c instanceof code_1.Name)\n c = replaceName(c);\n if (c instanceof code_1._Code)\n items.push(...c._items);\n else\n items.push(c);\n return items;\n }, []));\n function replaceName(n) {\n const c = constants[n.str];\n if (c === undefined || names[n.str] !== 1)\n return n;\n delete names[n.str];\n return c;\n }\n function canOptimize(e) {\n return (e instanceof code_1._Code &&\n e._items.some((c) => c instanceof code_1.Name && names[c.str] === 1 && constants[c.str] !== undefined));\n }\n}\nfunction subtractNames(names, from) {\n for (const n in from)\n names[n] = (names[n] || 0) - (from[n] || 0);\n}\nfunction not(x) {\n return typeof x == \"boolean\" || typeof x == \"number\" || x === null ? !x : (0, code_1._) `!${par(x)}`;\n}\nexports.not = not;\nconst andCode = mappend(exports.operators.AND);\n// boolean AND (&&) expression with the passed arguments\nfunction and(...args) {\n return args.reduce(andCode);\n}\nexports.and = and;\nconst orCode = mappend(exports.operators.OR);\n// boolean OR (||) expression with the passed arguments\nfunction or(...args) {\n return args.reduce(orCode);\n}\nexports.or = or;\nfunction mappend(op) {\n return (x, y) => (x === code_1.nil ? y : y === code_1.nil ? x : (0, code_1._) `${par(x)} ${op} ${par(y)}`);\n}\nfunction par(x) {\n return x instanceof code_1.Name ? x : (0, code_1._) `(${x})`;\n}\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ValueScope = exports.ValueScopeName = exports.Scope = exports.varKinds = exports.UsedValueState = void 0;\nconst code_1 = require(\"./code\");\nclass ValueError extends Error {\n constructor(name) {\n super(`CodeGen: \"code\" for ${name} not defined`);\n this.value = name.value;\n }\n}\nvar UsedValueState;\n(function (UsedValueState) {\n UsedValueState[UsedValueState[\"Started\"] = 0] = \"Started\";\n UsedValueState[UsedValueState[\"Completed\"] = 1] = \"Completed\";\n})(UsedValueState = exports.UsedValueState || (exports.UsedValueState = {}));\nexports.varKinds = {\n const: new code_1.Name(\"const\"),\n let: new code_1.Name(\"let\"),\n var: new code_1.Name(\"var\"),\n};\nclass Scope {\n constructor({ prefixes, parent } = {}) {\n this._names = {};\n this._prefixes = prefixes;\n this._parent = parent;\n }\n toName(nameOrPrefix) {\n return nameOrPrefix instanceof code_1.Name ? nameOrPrefix : this.name(nameOrPrefix);\n }\n name(prefix) {\n return new code_1.Name(this._newName(prefix));\n }\n _newName(prefix) {\n const ng = this._names[prefix] || this._nameGroup(prefix);\n return `${prefix}${ng.index++}`;\n }\n _nameGroup(prefix) {\n var _a, _b;\n if (((_b = (_a = this._parent) === null || _a === void 0 ? void 0 : _a._prefixes) === null || _b === void 0 ? void 0 : _b.has(prefix)) || (this._prefixes && !this._prefixes.has(prefix))) {\n throw new Error(`CodeGen: prefix \"${prefix}\" is not allowed in this scope`);\n }\n return (this._names[prefix] = { prefix, index: 0 });\n }\n}\nexports.Scope = Scope;\nclass ValueScopeName extends code_1.Name {\n constructor(prefix, nameStr) {\n super(nameStr);\n this.prefix = prefix;\n }\n setValue(value, { property, itemIndex }) {\n this.value = value;\n this.scopePath = (0, code_1._) `.${new code_1.Name(property)}[${itemIndex}]`;\n }\n}\nexports.ValueScopeName = ValueScopeName;\nconst line = (0, code_1._) `\\n`;\nclass ValueScope extends Scope {\n constructor(opts) {\n super(opts);\n this._values = {};\n this._scope = opts.scope;\n this.opts = { ...opts, _n: opts.lines ? line : code_1.nil };\n }\n get() {\n return this._scope;\n }\n name(prefix) {\n return new ValueScopeName(prefix, this._newName(prefix));\n }\n value(nameOrPrefix, value) {\n var _a;\n if (value.ref === undefined)\n throw new Error(\"CodeGen: ref must be passed in value\");\n const name = this.toName(nameOrPrefix);\n const { prefix } = name;\n const valueKey = (_a = value.key) !== null && _a !== void 0 ? _a : value.ref;\n let vs = this._values[prefix];\n if (vs) {\n const _name = vs.get(valueKey);\n if (_name)\n return _name;\n }\n else {\n vs = this._values[prefix] = new Map();\n }\n vs.set(valueKey, name);\n const s = this._scope[prefix] || (this._scope[prefix] = []);\n const itemIndex = s.length;\n s[itemIndex] = value.ref;\n name.setValue(value, { property: prefix, itemIndex });\n return name;\n }\n getValue(prefix, keyOrRef) {\n const vs = this._values[prefix];\n if (!vs)\n return;\n return vs.get(keyOrRef);\n }\n scopeRefs(scopeName, values = this._values) {\n return this._reduceValues(values, (name) => {\n if (name.scopePath === undefined)\n throw new Error(`CodeGen: name \"${name}\" has no value`);\n return (0, code_1._) `${scopeName}${name.scopePath}`;\n });\n }\n scopeCode(values = this._values, usedValues, getCode) {\n return this._reduceValues(values, (name) => {\n if (name.value === undefined)\n throw new Error(`CodeGen: name \"${name}\" has no value`);\n return name.value.code;\n }, usedValues, getCode);\n }\n _reduceValues(values, valueCode, usedValues = {}, getCode) {\n let code = code_1.nil;\n for (const prefix in values) {\n const vs = values[prefix];\n if (!vs)\n continue;\n const nameSet = (usedValues[prefix] = usedValues[prefix] || new Map());\n vs.forEach((name) => {\n if (nameSet.has(name))\n return;\n nameSet.set(name, UsedValueState.Started);\n let c = valueCode(name);\n if (c) {\n const def = this.opts.es5 ? exports.varKinds.var : exports.varKinds.const;\n code = (0, code_1._) `${code}${def} ${name} = ${c};${this.opts._n}`;\n }\n else if ((c = getCode === null || getCode === void 0 ? void 0 : getCode(name))) {\n code = (0, code_1._) `${code}${c}${this.opts._n}`;\n }\n else {\n throw new ValueError(name);\n }\n nameSet.set(name, UsedValueState.Completed);\n });\n }\n return code;\n }\n}\nexports.ValueScope = ValueScope;\n//# sourceMappingURL=scope.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.extendErrors = exports.resetErrorsCount = exports.reportExtraError = exports.reportError = exports.keyword$DataError = exports.keywordError = void 0;\nconst codegen_1 = require(\"./codegen\");\nconst util_1 = require(\"./util\");\nconst names_1 = require(\"./names\");\nexports.keywordError = {\n message: ({ keyword }) => (0, codegen_1.str) `must pass \"${keyword}\" keyword validation`,\n};\nexports.keyword$DataError = {\n message: ({ keyword, schemaType }) => schemaType\n ? (0, codegen_1.str) `\"${keyword}\" keyword must be ${schemaType} ($data)`\n : (0, codegen_1.str) `\"${keyword}\" keyword is invalid ($data)`,\n};\nfunction reportError(cxt, error = exports.keywordError, errorPaths, overrideAllErrors) {\n const { it } = cxt;\n const { gen, compositeRule, allErrors } = it;\n const errObj = errorObjectCode(cxt, error, errorPaths);\n if (overrideAllErrors !== null && overrideAllErrors !== void 0 ? overrideAllErrors : (compositeRule || allErrors)) {\n addError(gen, errObj);\n }\n else {\n returnErrors(it, (0, codegen_1._) `[${errObj}]`);\n }\n}\nexports.reportError = reportError;\nfunction reportExtraError(cxt, error = exports.keywordError, errorPaths) {\n const { it } = cxt;\n const { gen, compositeRule, allErrors } = it;\n const errObj = errorObjectCode(cxt, error, errorPaths);\n addError(gen, errObj);\n if (!(compositeRule || allErrors)) {\n returnErrors(it, names_1.default.vErrors);\n }\n}\nexports.reportExtraError = reportExtraError;\nfunction resetErrorsCount(gen, errsCount) {\n gen.assign(names_1.default.errors, errsCount);\n gen.if((0, codegen_1._) `${names_1.default.vErrors} !== null`, () => gen.if(errsCount, () => gen.assign((0, codegen_1._) `${names_1.default.vErrors}.length`, errsCount), () => gen.assign(names_1.default.vErrors, null)));\n}\nexports.resetErrorsCount = resetErrorsCount;\nfunction extendErrors({ gen, keyword, schemaValue, data, errsCount, it, }) {\n /* istanbul ignore if */\n if (errsCount === undefined)\n throw new Error(\"ajv implementation error\");\n const err = gen.name(\"err\");\n gen.forRange(\"i\", errsCount, names_1.default.errors, (i) => {\n gen.const(err, (0, codegen_1._) `${names_1.default.vErrors}[${i}]`);\n gen.if((0, codegen_1._) `${err}.instancePath === undefined`, () => gen.assign((0, codegen_1._) `${err}.instancePath`, (0, codegen_1.strConcat)(names_1.default.instancePath, it.errorPath)));\n gen.assign((0, codegen_1._) `${err}.schemaPath`, (0, codegen_1.str) `${it.errSchemaPath}/${keyword}`);\n if (it.opts.verbose) {\n gen.assign((0, codegen_1._) `${err}.schema`, schemaValue);\n gen.assign((0, codegen_1._) `${err}.data`, data);\n }\n });\n}\nexports.extendErrors = extendErrors;\nfunction addError(gen, errObj) {\n const err = gen.const(\"err\", errObj);\n gen.if((0, codegen_1._) `${names_1.default.vErrors} === null`, () => gen.assign(names_1.default.vErrors, (0, codegen_1._) `[${err}]`), (0, codegen_1._) `${names_1.default.vErrors}.push(${err})`);\n gen.code((0, codegen_1._) `${names_1.default.errors}++`);\n}\nfunction returnErrors(it, errs) {\n const { gen, validateName, schemaEnv } = it;\n if (schemaEnv.$async) {\n gen.throw((0, codegen_1._) `new ${it.ValidationError}(${errs})`);\n }\n else {\n gen.assign((0, codegen_1._) `${validateName}.errors`, errs);\n gen.return(false);\n }\n}\nconst E = {\n keyword: new codegen_1.Name(\"keyword\"),\n schemaPath: new codegen_1.Name(\"schemaPath\"),\n params: new codegen_1.Name(\"params\"),\n propertyName: new codegen_1.Name(\"propertyName\"),\n message: new codegen_1.Name(\"message\"),\n schema: new codegen_1.Name(\"schema\"),\n parentSchema: new codegen_1.Name(\"parentSchema\"),\n};\nfunction errorObjectCode(cxt, error, errorPaths) {\n const { createErrors } = cxt.it;\n if (createErrors === false)\n return (0, codegen_1._) `{}`;\n return errorObject(cxt, error, errorPaths);\n}\nfunction errorObject(cxt, error, errorPaths = {}) {\n const { gen, it } = cxt;\n const keyValues = [\n errorInstancePath(it, errorPaths),\n errorSchemaPath(cxt, errorPaths),\n ];\n extraErrorProps(cxt, error, keyValues);\n return gen.object(...keyValues);\n}\nfunction errorInstancePath({ errorPath }, { instancePath }) {\n const instPath = instancePath\n ? (0, codegen_1.str) `${errorPath}${(0, util_1.getErrorPath)(instancePath, util_1.Type.Str)}`\n : errorPath;\n return [names_1.default.instancePath, (0, codegen_1.strConcat)(names_1.default.instancePath, instPath)];\n}\nfunction errorSchemaPath({ keyword, it: { errSchemaPath } }, { schemaPath, parentSchema }) {\n let schPath = parentSchema ? errSchemaPath : (0, codegen_1.str) `${errSchemaPath}/${keyword}`;\n if (schemaPath) {\n schPath = (0, codegen_1.str) `${schPath}${(0, util_1.getErrorPath)(schemaPath, util_1.Type.Str)}`;\n }\n return [E.schemaPath, schPath];\n}\nfunction extraErrorProps(cxt, { params, message }, keyValues) {\n const { keyword, data, schemaValue, it } = cxt;\n const { opts, propertyName, topSchemaRef, schemaPath } = it;\n keyValues.push([E.keyword, keyword], [E.params, typeof params == \"function\" ? params(cxt) : params || (0, codegen_1._) `{}`]);\n if (opts.messages) {\n keyValues.push([E.message, typeof message == \"function\" ? message(cxt) : message]);\n }\n if (opts.verbose) {\n keyValues.push([E.schema, schemaValue], [E.parentSchema, (0, codegen_1._) `${topSchemaRef}${schemaPath}`], [names_1.default.data, data]);\n }\n if (propertyName)\n keyValues.push([E.propertyName, propertyName]);\n}\n//# sourceMappingURL=errors.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.resolveSchema = exports.getCompilingSchema = exports.resolveRef = exports.compileSchema = exports.SchemaEnv = void 0;\nconst codegen_1 = require(\"./codegen\");\nconst validation_error_1 = require(\"../runtime/validation_error\");\nconst names_1 = require(\"./names\");\nconst resolve_1 = require(\"./resolve\");\nconst util_1 = require(\"./util\");\nconst validate_1 = require(\"./validate\");\nclass SchemaEnv {\n constructor(env) {\n var _a;\n this.refs = {};\n this.dynamicAnchors = {};\n let schema;\n if (typeof env.schema == \"object\")\n schema = env.schema;\n this.schema = env.schema;\n this.schemaId = env.schemaId;\n this.root = env.root || this;\n this.baseId = (_a = env.baseId) !== null && _a !== void 0 ? _a : (0, resolve_1.normalizeId)(schema === null || schema === void 0 ? void 0 : schema[env.schemaId || \"$id\"]);\n this.schemaPath = env.schemaPath;\n this.localRefs = env.localRefs;\n this.meta = env.meta;\n this.$async = schema === null || schema === void 0 ? void 0 : schema.$async;\n this.refs = {};\n }\n}\nexports.SchemaEnv = SchemaEnv;\n// let codeSize = 0\n// let nodeCount = 0\n// Compiles schema in SchemaEnv\nfunction compileSchema(sch) {\n // TODO refactor - remove compilations\n const _sch = getCompilingSchema.call(this, sch);\n if (_sch)\n return _sch;\n const rootId = (0, resolve_1.getFullPath)(this.opts.uriResolver, sch.root.baseId); // TODO if getFullPath removed 1 tests fails\n const { es5, lines } = this.opts.code;\n const { ownProperties } = this.opts;\n const gen = new codegen_1.CodeGen(this.scope, { es5, lines, ownProperties });\n let _ValidationError;\n if (sch.$async) {\n _ValidationError = gen.scopeValue(\"Error\", {\n ref: validation_error_1.default,\n code: (0, codegen_1._) `require(\"ajv/dist/runtime/validation_error\").default`,\n });\n }\n const validateName = gen.scopeName(\"validate\");\n sch.validateName = validateName;\n const schemaCxt = {\n gen,\n allErrors: this.opts.allErrors,\n data: names_1.default.data,\n parentData: names_1.default.parentData,\n parentDataProperty: names_1.default.parentDataProperty,\n dataNames: [names_1.default.data],\n dataPathArr: [codegen_1.nil],\n dataLevel: 0,\n dataTypes: [],\n definedProperties: new Set(),\n topSchemaRef: gen.scopeValue(\"schema\", this.opts.code.source === true\n ? { ref: sch.schema, code: (0, codegen_1.stringify)(sch.schema) }\n : { ref: sch.schema }),\n validateName,\n ValidationError: _ValidationError,\n schema: sch.schema,\n schemaEnv: sch,\n rootId,\n baseId: sch.baseId || rootId,\n schemaPath: codegen_1.nil,\n errSchemaPath: sch.schemaPath || (this.opts.jtd ? \"\" : \"#\"),\n errorPath: (0, codegen_1._) `\"\"`,\n opts: this.opts,\n self: this,\n };\n let sourceCode;\n try {\n this._compilations.add(sch);\n (0, validate_1.validateFunctionCode)(schemaCxt);\n gen.optimize(this.opts.code.optimize);\n // gen.optimize(1)\n const validateCode = gen.toString();\n sourceCode = `${gen.scopeRefs(names_1.default.scope)}return ${validateCode}`;\n // console.log((codeSize += sourceCode.length), (nodeCount += gen.nodeCount))\n if (this.opts.code.process)\n sourceCode = this.opts.code.process(sourceCode, sch);\n // console.log(\"\\n\\n\\n *** \\n\", sourceCode)\n const makeValidate = new Function(`${names_1.default.self}`, `${names_1.default.scope}`, sourceCode);\n const validate = makeValidate(this, this.scope.get());\n this.scope.value(validateName, { ref: validate });\n validate.errors = null;\n validate.schema = sch.schema;\n validate.schemaEnv = sch;\n if (sch.$async)\n validate.$async = true;\n if (this.opts.code.source === true) {\n validate.source = { validateName, validateCode, scopeValues: gen._values };\n }\n if (this.opts.unevaluated) {\n const { props, items } = schemaCxt;\n validate.evaluated = {\n props: props instanceof codegen_1.Name ? undefined : props,\n items: items instanceof codegen_1.Name ? undefined : items,\n dynamicProps: props instanceof codegen_1.Name,\n dynamicItems: items instanceof codegen_1.Name,\n };\n if (validate.source)\n validate.source.evaluated = (0, codegen_1.stringify)(validate.evaluated);\n }\n sch.validate = validate;\n return sch;\n }\n catch (e) {\n delete sch.validate;\n delete sch.validateName;\n if (sourceCode)\n this.logger.error(\"Error compiling schema, function code:\", sourceCode);\n // console.log(\"\\n\\n\\n *** \\n\", sourceCode, this.opts)\n throw e;\n }\n finally {\n this._compilations.delete(sch);\n }\n}\nexports.compileSchema = compileSchema;\nfunction resolveRef(root, baseId, ref) {\n var _a;\n ref = (0, resolve_1.resolveUrl)(this.opts.uriResolver, baseId, ref);\n const schOrFunc = root.refs[ref];\n if (schOrFunc)\n return schOrFunc;\n let _sch = resolve.call(this, root, ref);\n if (_sch === undefined) {\n const schema = (_a = root.localRefs) === null || _a === void 0 ? void 0 : _a[ref]; // TODO maybe localRefs should hold SchemaEnv\n const { schemaId } = this.opts;\n if (schema)\n _sch = new SchemaEnv({ schema, schemaId, root, baseId });\n }\n if (_sch === undefined)\n return;\n return (root.refs[ref] = inlineOrCompile.call(this, _sch));\n}\nexports.resolveRef = resolveRef;\nfunction inlineOrCompile(sch) {\n if ((0, resolve_1.inlineRef)(sch.schema, this.opts.inlineRefs))\n return sch.schema;\n return sch.validate ? sch : compileSchema.call(this, sch);\n}\n// Index of schema compilation in the currently compiled list\nfunction getCompilingSchema(schEnv) {\n for (const sch of this._compilations) {\n if (sameSchemaEnv(sch, schEnv))\n return sch;\n }\n}\nexports.getCompilingSchema = getCompilingSchema;\nfunction sameSchemaEnv(s1, s2) {\n return s1.schema === s2.schema && s1.root === s2.root && s1.baseId === s2.baseId;\n}\n// resolve and compile the references ($ref)\n// TODO returns AnySchemaObject (if the schema can be inlined) or validation function\nfunction resolve(root, // information about the root schema for the current schema\nref // reference to resolve\n) {\n let sch;\n while (typeof (sch = this.refs[ref]) == \"string\")\n ref = sch;\n return sch || this.schemas[ref] || resolveSchema.call(this, root, ref);\n}\n// Resolve schema, its root and baseId\nfunction resolveSchema(root, // root object with properties schema, refs TODO below SchemaEnv is assigned to it\nref // reference to resolve\n) {\n const p = this.opts.uriResolver.parse(ref);\n const refPath = (0, resolve_1._getFullPath)(this.opts.uriResolver, p);\n let baseId = (0, resolve_1.getFullPath)(this.opts.uriResolver, root.baseId, undefined);\n // TODO `Object.keys(root.schema).length > 0` should not be needed - but removing breaks 2 tests\n if (Object.keys(root.schema).length > 0 && refPath === baseId) {\n return getJsonPointer.call(this, p, root);\n }\n const id = (0, resolve_1.normalizeId)(refPath);\n const schOrRef = this.refs[id] || this.schemas[id];\n if (typeof schOrRef == \"string\") {\n const sch = resolveSchema.call(this, root, schOrRef);\n if (typeof (sch === null || sch === void 0 ? void 0 : sch.schema) !== \"object\")\n return;\n return getJsonPointer.call(this, p, sch);\n }\n if (typeof (schOrRef === null || schOrRef === void 0 ? void 0 : schOrRef.schema) !== \"object\")\n return;\n if (!schOrRef.validate)\n compileSchema.call(this, schOrRef);\n if (id === (0, resolve_1.normalizeId)(ref)) {\n const { schema } = schOrRef;\n const { schemaId } = this.opts;\n const schId = schema[schemaId];\n if (schId)\n baseId = (0, resolve_1.resolveUrl)(this.opts.uriResolver, baseId, schId);\n return new SchemaEnv({ schema, schemaId, root, baseId });\n }\n return getJsonPointer.call(this, p, schOrRef);\n}\nexports.resolveSchema = resolveSchema;\nconst PREVENT_SCOPE_CHANGE = new Set([\n \"properties\",\n \"patternProperties\",\n \"enum\",\n \"dependencies\",\n \"definitions\",\n]);\nfunction getJsonPointer(parsedRef, { baseId, schema, root }) {\n var _a;\n if (((_a = parsedRef.fragment) === null || _a === void 0 ? void 0 : _a[0]) !== \"/\")\n return;\n for (const part of parsedRef.fragment.slice(1).split(\"/\")) {\n if (typeof schema === \"boolean\")\n return;\n const partSchema = schema[(0, util_1.unescapeFragment)(part)];\n if (partSchema === undefined)\n return;\n schema = partSchema;\n // TODO PREVENT_SCOPE_CHANGE could be defined in keyword def?\n const schId = typeof schema === \"object\" && schema[this.opts.schemaId];\n if (!PREVENT_SCOPE_CHANGE.has(part) && schId) {\n baseId = (0, resolve_1.resolveUrl)(this.opts.uriResolver, baseId, schId);\n }\n }\n let env;\n if (typeof schema != \"boolean\" && schema.$ref && !(0, util_1.schemaHasRulesButRef)(schema, this.RULES)) {\n const $ref = (0, resolve_1.resolveUrl)(this.opts.uriResolver, baseId, schema.$ref);\n env = resolveSchema.call(this, root, $ref);\n }\n // even though resolution failed we need to return SchemaEnv to throw exception\n // so that compileAsync loads missing schema.\n const { schemaId } = this.opts;\n env = env || new SchemaEnv({ schema, schemaId, root, baseId });\n if (env.schema !== env.root.schema)\n return env;\n return undefined;\n}\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"./codegen\");\nconst names = {\n // validation function arguments\n data: new codegen_1.Name(\"data\"),\n // args passed from referencing schema\n valCxt: new codegen_1.Name(\"valCxt\"),\n instancePath: new codegen_1.Name(\"instancePath\"),\n parentData: new codegen_1.Name(\"parentData\"),\n parentDataProperty: new codegen_1.Name(\"parentDataProperty\"),\n rootData: new codegen_1.Name(\"rootData\"),\n dynamicAnchors: new codegen_1.Name(\"dynamicAnchors\"),\n // function scoped variables\n vErrors: new codegen_1.Name(\"vErrors\"),\n errors: new codegen_1.Name(\"errors\"),\n this: new codegen_1.Name(\"this\"),\n // \"globals\"\n self: new codegen_1.Name(\"self\"),\n scope: new codegen_1.Name(\"scope\"),\n // JTD serialize/parse name for JSON string and position\n json: new codegen_1.Name(\"json\"),\n jsonPos: new codegen_1.Name(\"jsonPos\"),\n jsonLen: new codegen_1.Name(\"jsonLen\"),\n jsonPart: new codegen_1.Name(\"jsonPart\"),\n};\nexports.default = names;\n//# sourceMappingURL=names.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst resolve_1 = require(\"./resolve\");\nclass MissingRefError extends Error {\n constructor(resolver, baseId, ref, msg) {\n super(msg || `can't resolve reference ${ref} from id ${baseId}`);\n this.missingRef = (0, resolve_1.resolveUrl)(resolver, baseId, ref);\n this.missingSchema = (0, resolve_1.normalizeId)((0, resolve_1.getFullPath)(resolver, this.missingRef));\n }\n}\nexports.default = MissingRefError;\n//# sourceMappingURL=ref_error.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getSchemaRefs = exports.resolveUrl = exports.normalizeId = exports._getFullPath = exports.getFullPath = exports.inlineRef = void 0;\nconst util_1 = require(\"./util\");\nconst equal = require(\"fast-deep-equal\");\nconst traverse = require(\"json-schema-traverse\");\n// TODO refactor to use keyword definitions\nconst SIMPLE_INLINED = new Set([\n \"type\",\n \"format\",\n \"pattern\",\n \"maxLength\",\n \"minLength\",\n \"maxProperties\",\n \"minProperties\",\n \"maxItems\",\n \"minItems\",\n \"maximum\",\n \"minimum\",\n \"uniqueItems\",\n \"multipleOf\",\n \"required\",\n \"enum\",\n \"const\",\n]);\nfunction inlineRef(schema, limit = true) {\n if (typeof schema == \"boolean\")\n return true;\n if (limit === true)\n return !hasRef(schema);\n if (!limit)\n return false;\n return countKeys(schema) <= limit;\n}\nexports.inlineRef = inlineRef;\nconst REF_KEYWORDS = new Set([\n \"$ref\",\n \"$recursiveRef\",\n \"$recursiveAnchor\",\n \"$dynamicRef\",\n \"$dynamicAnchor\",\n]);\nfunction hasRef(schema) {\n for (const key in schema) {\n if (REF_KEYWORDS.has(key))\n return true;\n const sch = schema[key];\n if (Array.isArray(sch) && sch.some(hasRef))\n return true;\n if (typeof sch == \"object\" && hasRef(sch))\n return true;\n }\n return false;\n}\nfunction countKeys(schema) {\n let count = 0;\n for (const key in schema) {\n if (key === \"$ref\")\n return Infinity;\n count++;\n if (SIMPLE_INLINED.has(key))\n continue;\n if (typeof schema[key] == \"object\") {\n (0, util_1.eachItem)(schema[key], (sch) => (count += countKeys(sch)));\n }\n if (count === Infinity)\n return Infinity;\n }\n return count;\n}\nfunction getFullPath(resolver, id = \"\", normalize) {\n if (normalize !== false)\n id = normalizeId(id);\n const p = resolver.parse(id);\n return _getFullPath(resolver, p);\n}\nexports.getFullPath = getFullPath;\nfunction _getFullPath(resolver, p) {\n const serialized = resolver.serialize(p);\n return serialized.split(\"#\")[0] + \"#\";\n}\nexports._getFullPath = _getFullPath;\nconst TRAILING_SLASH_HASH = /#\\/?$/;\nfunction normalizeId(id) {\n return id ? id.replace(TRAILING_SLASH_HASH, \"\") : \"\";\n}\nexports.normalizeId = normalizeId;\nfunction resolveUrl(resolver, baseId, id) {\n id = normalizeId(id);\n return resolver.resolve(baseId, id);\n}\nexports.resolveUrl = resolveUrl;\nconst ANCHOR = /^[a-z_][-a-z0-9._]*$/i;\nfunction getSchemaRefs(schema, baseId) {\n if (typeof schema == \"boolean\")\n return {};\n const { schemaId, uriResolver } = this.opts;\n const schId = normalizeId(schema[schemaId] || baseId);\n const baseIds = { \"\": schId };\n const pathPrefix = getFullPath(uriResolver, schId, false);\n const localRefs = {};\n const schemaRefs = new Set();\n traverse(schema, { allKeys: true }, (sch, jsonPtr, _, parentJsonPtr) => {\n if (parentJsonPtr === undefined)\n return;\n const fullPath = pathPrefix + jsonPtr;\n let baseId = baseIds[parentJsonPtr];\n if (typeof sch[schemaId] == \"string\")\n baseId = addRef.call(this, sch[schemaId]);\n addAnchor.call(this, sch.$anchor);\n addAnchor.call(this, sch.$dynamicAnchor);\n baseIds[jsonPtr] = baseId;\n function addRef(ref) {\n // eslint-disable-next-line @typescript-eslint/unbound-method\n const _resolve = this.opts.uriResolver.resolve;\n ref = normalizeId(baseId ? _resolve(baseId, ref) : ref);\n if (schemaRefs.has(ref))\n throw ambiguos(ref);\n schemaRefs.add(ref);\n let schOrRef = this.refs[ref];\n if (typeof schOrRef == \"string\")\n schOrRef = this.refs[schOrRef];\n if (typeof schOrRef == \"object\") {\n checkAmbiguosRef(sch, schOrRef.schema, ref);\n }\n else if (ref !== normalizeId(fullPath)) {\n if (ref[0] === \"#\") {\n checkAmbiguosRef(sch, localRefs[ref], ref);\n localRefs[ref] = sch;\n }\n else {\n this.refs[ref] = fullPath;\n }\n }\n return ref;\n }\n function addAnchor(anchor) {\n if (typeof anchor == \"string\") {\n if (!ANCHOR.test(anchor))\n throw new Error(`invalid anchor \"${anchor}\"`);\n addRef.call(this, `#${anchor}`);\n }\n }\n });\n return localRefs;\n function checkAmbiguosRef(sch1, sch2, ref) {\n if (sch2 !== undefined && !equal(sch1, sch2))\n throw ambiguos(ref);\n }\n function ambiguos(ref) {\n return new Error(`reference \"${ref}\" resolves to more than one schema`);\n }\n}\nexports.getSchemaRefs = getSchemaRefs;\n//# sourceMappingURL=resolve.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getRules = exports.isJSONType = void 0;\nconst _jsonTypes = [\"string\", \"number\", \"integer\", \"boolean\", \"null\", \"object\", \"array\"];\nconst jsonTypes = new Set(_jsonTypes);\nfunction isJSONType(x) {\n return typeof x == \"string\" && jsonTypes.has(x);\n}\nexports.isJSONType = isJSONType;\nfunction getRules() {\n const groups = {\n number: { type: \"number\", rules: [] },\n string: { type: \"string\", rules: [] },\n array: { type: \"array\", rules: [] },\n object: { type: \"object\", rules: [] },\n };\n return {\n types: { ...groups, integer: true, boolean: true, null: true },\n rules: [{ rules: [] }, groups.number, groups.string, groups.array, groups.object],\n post: { rules: [] },\n all: {},\n keywords: {},\n };\n}\nexports.getRules = getRules;\n//# sourceMappingURL=rules.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.checkStrictMode = exports.getErrorPath = exports.Type = exports.useFunc = exports.setEvaluated = exports.evaluatedPropsToName = exports.mergeEvaluated = exports.eachItem = exports.unescapeJsonPointer = exports.escapeJsonPointer = exports.escapeFragment = exports.unescapeFragment = exports.schemaRefOrVal = exports.schemaHasRulesButRef = exports.schemaHasRules = exports.checkUnknownRules = exports.alwaysValidSchema = exports.toHash = void 0;\nconst codegen_1 = require(\"./codegen\");\nconst code_1 = require(\"./codegen/code\");\n// TODO refactor to use Set\nfunction toHash(arr) {\n const hash = {};\n for (const item of arr)\n hash[item] = true;\n return hash;\n}\nexports.toHash = toHash;\nfunction alwaysValidSchema(it, schema) {\n if (typeof schema == \"boolean\")\n return schema;\n if (Object.keys(schema).length === 0)\n return true;\n checkUnknownRules(it, schema);\n return !schemaHasRules(schema, it.self.RULES.all);\n}\nexports.alwaysValidSchema = alwaysValidSchema;\nfunction checkUnknownRules(it, schema = it.schema) {\n const { opts, self } = it;\n if (!opts.strictSchema)\n return;\n if (typeof schema === \"boolean\")\n return;\n const rules = self.RULES.keywords;\n for (const key in schema) {\n if (!rules[key])\n checkStrictMode(it, `unknown keyword: \"${key}\"`);\n }\n}\nexports.checkUnknownRules = checkUnknownRules;\nfunction schemaHasRules(schema, rules) {\n if (typeof schema == \"boolean\")\n return !schema;\n for (const key in schema)\n if (rules[key])\n return true;\n return false;\n}\nexports.schemaHasRules = schemaHasRules;\nfunction schemaHasRulesButRef(schema, RULES) {\n if (typeof schema == \"boolean\")\n return !schema;\n for (const key in schema)\n if (key !== \"$ref\" && RULES.all[key])\n return true;\n return false;\n}\nexports.schemaHasRulesButRef = schemaHasRulesButRef;\nfunction schemaRefOrVal({ topSchemaRef, schemaPath }, schema, keyword, $data) {\n if (!$data) {\n if (typeof schema == \"number\" || typeof schema == \"boolean\")\n return schema;\n if (typeof schema == \"string\")\n return (0, codegen_1._) `${schema}`;\n }\n return (0, codegen_1._) `${topSchemaRef}${schemaPath}${(0, codegen_1.getProperty)(keyword)}`;\n}\nexports.schemaRefOrVal = schemaRefOrVal;\nfunction unescapeFragment(str) {\n return unescapeJsonPointer(decodeURIComponent(str));\n}\nexports.unescapeFragment = unescapeFragment;\nfunction escapeFragment(str) {\n return encodeURIComponent(escapeJsonPointer(str));\n}\nexports.escapeFragment = escapeFragment;\nfunction escapeJsonPointer(str) {\n if (typeof str == \"number\")\n return `${str}`;\n return str.replace(/~/g, \"~0\").replace(/\\//g, \"~1\");\n}\nexports.escapeJsonPointer = escapeJsonPointer;\nfunction unescapeJsonPointer(str) {\n return str.replace(/~1/g, \"/\").replace(/~0/g, \"~\");\n}\nexports.unescapeJsonPointer = unescapeJsonPointer;\nfunction eachItem(xs, f) {\n if (Array.isArray(xs)) {\n for (const x of xs)\n f(x);\n }\n else {\n f(xs);\n }\n}\nexports.eachItem = eachItem;\nfunction makeMergeEvaluated({ mergeNames, mergeToName, mergeValues, resultToName, }) {\n return (gen, from, to, toName) => {\n const res = to === undefined\n ? from\n : to instanceof codegen_1.Name\n ? (from instanceof codegen_1.Name ? mergeNames(gen, from, to) : mergeToName(gen, from, to), to)\n : from instanceof codegen_1.Name\n ? (mergeToName(gen, to, from), from)\n : mergeValues(from, to);\n return toName === codegen_1.Name && !(res instanceof codegen_1.Name) ? resultToName(gen, res) : res;\n };\n}\nexports.mergeEvaluated = {\n props: makeMergeEvaluated({\n mergeNames: (gen, from, to) => gen.if((0, codegen_1._) `${to} !== true && ${from} !== undefined`, () => {\n gen.if((0, codegen_1._) `${from} === true`, () => gen.assign(to, true), () => gen.assign(to, (0, codegen_1._) `${to} || {}`).code((0, codegen_1._) `Object.assign(${to}, ${from})`));\n }),\n mergeToName: (gen, from, to) => gen.if((0, codegen_1._) `${to} !== true`, () => {\n if (from === true) {\n gen.assign(to, true);\n }\n else {\n gen.assign(to, (0, codegen_1._) `${to} || {}`);\n setEvaluated(gen, to, from);\n }\n }),\n mergeValues: (from, to) => (from === true ? true : { ...from, ...to }),\n resultToName: evaluatedPropsToName,\n }),\n items: makeMergeEvaluated({\n mergeNames: (gen, from, to) => gen.if((0, codegen_1._) `${to} !== true && ${from} !== undefined`, () => gen.assign(to, (0, codegen_1._) `${from} === true ? true : ${to} > ${from} ? ${to} : ${from}`)),\n mergeToName: (gen, from, to) => gen.if((0, codegen_1._) `${to} !== true`, () => gen.assign(to, from === true ? true : (0, codegen_1._) `${to} > ${from} ? ${to} : ${from}`)),\n mergeValues: (from, to) => (from === true ? true : Math.max(from, to)),\n resultToName: (gen, items) => gen.var(\"items\", items),\n }),\n};\nfunction evaluatedPropsToName(gen, ps) {\n if (ps === true)\n return gen.var(\"props\", true);\n const props = gen.var(\"props\", (0, codegen_1._) `{}`);\n if (ps !== undefined)\n setEvaluated(gen, props, ps);\n return props;\n}\nexports.evaluatedPropsToName = evaluatedPropsToName;\nfunction setEvaluated(gen, props, ps) {\n Object.keys(ps).forEach((p) => gen.assign((0, codegen_1._) `${props}${(0, codegen_1.getProperty)(p)}`, true));\n}\nexports.setEvaluated = setEvaluated;\nconst snippets = {};\nfunction useFunc(gen, f) {\n return gen.scopeValue(\"func\", {\n ref: f,\n code: snippets[f.code] || (snippets[f.code] = new code_1._Code(f.code)),\n });\n}\nexports.useFunc = useFunc;\nvar Type;\n(function (Type) {\n Type[Type[\"Num\"] = 0] = \"Num\";\n Type[Type[\"Str\"] = 1] = \"Str\";\n})(Type = exports.Type || (exports.Type = {}));\nfunction getErrorPath(dataProp, dataPropType, jsPropertySyntax) {\n // let path\n if (dataProp instanceof codegen_1.Name) {\n const isNumber = dataPropType === Type.Num;\n return jsPropertySyntax\n ? isNumber\n ? (0, codegen_1._) `\"[\" + ${dataProp} + \"]\"`\n : (0, codegen_1._) `\"['\" + ${dataProp} + \"']\"`\n : isNumber\n ? (0, codegen_1._) `\"/\" + ${dataProp}`\n : (0, codegen_1._) `\"/\" + ${dataProp}.replace(/~/g, \"~0\").replace(/\\\\//g, \"~1\")`; // TODO maybe use global escapePointer\n }\n return jsPropertySyntax ? (0, codegen_1.getProperty)(dataProp).toString() : \"/\" + escapeJsonPointer(dataProp);\n}\nexports.getErrorPath = getErrorPath;\nfunction checkStrictMode(it, msg, mode = it.opts.strictSchema) {\n if (!mode)\n return;\n msg = `strict mode: ${msg}`;\n if (mode === true)\n throw new Error(msg);\n it.self.logger.warn(msg);\n}\nexports.checkStrictMode = checkStrictMode;\n//# sourceMappingURL=util.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.shouldUseRule = exports.shouldUseGroup = exports.schemaHasRulesForType = void 0;\nfunction schemaHasRulesForType({ schema, self }, type) {\n const group = self.RULES.types[type];\n return group && group !== true && shouldUseGroup(schema, group);\n}\nexports.schemaHasRulesForType = schemaHasRulesForType;\nfunction shouldUseGroup(schema, group) {\n return group.rules.some((rule) => shouldUseRule(schema, rule));\n}\nexports.shouldUseGroup = shouldUseGroup;\nfunction shouldUseRule(schema, rule) {\n var _a;\n return (schema[rule.keyword] !== undefined ||\n ((_a = rule.definition.implements) === null || _a === void 0 ? void 0 : _a.some((kwd) => schema[kwd] !== undefined)));\n}\nexports.shouldUseRule = shouldUseRule;\n//# sourceMappingURL=applicability.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.boolOrEmptySchema = exports.topBoolOrEmptySchema = void 0;\nconst errors_1 = require(\"../errors\");\nconst codegen_1 = require(\"../codegen\");\nconst names_1 = require(\"../names\");\nconst boolError = {\n message: \"boolean schema is false\",\n};\nfunction topBoolOrEmptySchema(it) {\n const { gen, schema, validateName } = it;\n if (schema === false) {\n falseSchemaError(it, false);\n }\n else if (typeof schema == \"object\" && schema.$async === true) {\n gen.return(names_1.default.data);\n }\n else {\n gen.assign((0, codegen_1._) `${validateName}.errors`, null);\n gen.return(true);\n }\n}\nexports.topBoolOrEmptySchema = topBoolOrEmptySchema;\nfunction boolOrEmptySchema(it, valid) {\n const { gen, schema } = it;\n if (schema === false) {\n gen.var(valid, false); // TODO var\n falseSchemaError(it);\n }\n else {\n gen.var(valid, true); // TODO var\n }\n}\nexports.boolOrEmptySchema = boolOrEmptySchema;\nfunction falseSchemaError(it, overrideAllErrors) {\n const { gen, data } = it;\n // TODO maybe some other interface should be used for non-keyword validation errors...\n const cxt = {\n gen,\n keyword: \"false schema\",\n data,\n schema: false,\n schemaCode: false,\n schemaValue: false,\n params: {},\n it,\n };\n (0, errors_1.reportError)(cxt, boolError, undefined, overrideAllErrors);\n}\n//# sourceMappingURL=boolSchema.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.reportTypeError = exports.checkDataTypes = exports.checkDataType = exports.coerceAndCheckDataType = exports.getJSONTypes = exports.getSchemaTypes = exports.DataType = void 0;\nconst rules_1 = require(\"../rules\");\nconst applicability_1 = require(\"./applicability\");\nconst errors_1 = require(\"../errors\");\nconst codegen_1 = require(\"../codegen\");\nconst util_1 = require(\"../util\");\nvar DataType;\n(function (DataType) {\n DataType[DataType[\"Correct\"] = 0] = \"Correct\";\n DataType[DataType[\"Wrong\"] = 1] = \"Wrong\";\n})(DataType = exports.DataType || (exports.DataType = {}));\nfunction getSchemaTypes(schema) {\n const types = getJSONTypes(schema.type);\n const hasNull = types.includes(\"null\");\n if (hasNull) {\n if (schema.nullable === false)\n throw new Error(\"type: null contradicts nullable: false\");\n }\n else {\n if (!types.length && schema.nullable !== undefined) {\n throw new Error('\"nullable\" cannot be used without \"type\"');\n }\n if (schema.nullable === true)\n types.push(\"null\");\n }\n return types;\n}\nexports.getSchemaTypes = getSchemaTypes;\nfunction getJSONTypes(ts) {\n const types = Array.isArray(ts) ? ts : ts ? [ts] : [];\n if (types.every(rules_1.isJSONType))\n return types;\n throw new Error(\"type must be JSONType or JSONType[]: \" + types.join(\",\"));\n}\nexports.getJSONTypes = getJSONTypes;\nfunction coerceAndCheckDataType(it, types) {\n const { gen, data, opts } = it;\n const coerceTo = coerceToTypes(types, opts.coerceTypes);\n const checkTypes = types.length > 0 &&\n !(coerceTo.length === 0 && types.length === 1 && (0, applicability_1.schemaHasRulesForType)(it, types[0]));\n if (checkTypes) {\n const wrongType = checkDataTypes(types, data, opts.strictNumbers, DataType.Wrong);\n gen.if(wrongType, () => {\n if (coerceTo.length)\n coerceData(it, types, coerceTo);\n else\n reportTypeError(it);\n });\n }\n return checkTypes;\n}\nexports.coerceAndCheckDataType = coerceAndCheckDataType;\nconst COERCIBLE = new Set([\"string\", \"number\", \"integer\", \"boolean\", \"null\"]);\nfunction coerceToTypes(types, coerceTypes) {\n return coerceTypes\n ? types.filter((t) => COERCIBLE.has(t) || (coerceTypes === \"array\" && t === \"array\"))\n : [];\n}\nfunction coerceData(it, types, coerceTo) {\n const { gen, data, opts } = it;\n const dataType = gen.let(\"dataType\", (0, codegen_1._) `typeof ${data}`);\n const coerced = gen.let(\"coerced\", (0, codegen_1._) `undefined`);\n if (opts.coerceTypes === \"array\") {\n gen.if((0, codegen_1._) `${dataType} == 'object' && Array.isArray(${data}) && ${data}.length == 1`, () => gen\n .assign(data, (0, codegen_1._) `${data}[0]`)\n .assign(dataType, (0, codegen_1._) `typeof ${data}`)\n .if(checkDataTypes(types, data, opts.strictNumbers), () => gen.assign(coerced, data)));\n }\n gen.if((0, codegen_1._) `${coerced} !== undefined`);\n for (const t of coerceTo) {\n if (COERCIBLE.has(t) || (t === \"array\" && opts.coerceTypes === \"array\")) {\n coerceSpecificType(t);\n }\n }\n gen.else();\n reportTypeError(it);\n gen.endIf();\n gen.if((0, codegen_1._) `${coerced} !== undefined`, () => {\n gen.assign(data, coerced);\n assignParentData(it, coerced);\n });\n function coerceSpecificType(t) {\n switch (t) {\n case \"string\":\n gen\n .elseIf((0, codegen_1._) `${dataType} == \"number\" || ${dataType} == \"boolean\"`)\n .assign(coerced, (0, codegen_1._) `\"\" + ${data}`)\n .elseIf((0, codegen_1._) `${data} === null`)\n .assign(coerced, (0, codegen_1._) `\"\"`);\n return;\n case \"number\":\n gen\n .elseIf((0, codegen_1._) `${dataType} == \"boolean\" || ${data} === null\n || (${dataType} == \"string\" && ${data} && ${data} == +${data})`)\n .assign(coerced, (0, codegen_1._) `+${data}`);\n return;\n case \"integer\":\n gen\n .elseIf((0, codegen_1._) `${dataType} === \"boolean\" || ${data} === null\n || (${dataType} === \"string\" && ${data} && ${data} == +${data} && !(${data} % 1))`)\n .assign(coerced, (0, codegen_1._) `+${data}`);\n return;\n case \"boolean\":\n gen\n .elseIf((0, codegen_1._) `${data} === \"false\" || ${data} === 0 || ${data} === null`)\n .assign(coerced, false)\n .elseIf((0, codegen_1._) `${data} === \"true\" || ${data} === 1`)\n .assign(coerced, true);\n return;\n case \"null\":\n gen.elseIf((0, codegen_1._) `${data} === \"\" || ${data} === 0 || ${data} === false`);\n gen.assign(coerced, null);\n return;\n case \"array\":\n gen\n .elseIf((0, codegen_1._) `${dataType} === \"string\" || ${dataType} === \"number\"\n || ${dataType} === \"boolean\" || ${data} === null`)\n .assign(coerced, (0, codegen_1._) `[${data}]`);\n }\n }\n}\nfunction assignParentData({ gen, parentData, parentDataProperty }, expr) {\n // TODO use gen.property\n gen.if((0, codegen_1._) `${parentData} !== undefined`, () => gen.assign((0, codegen_1._) `${parentData}[${parentDataProperty}]`, expr));\n}\nfunction checkDataType(dataType, data, strictNums, correct = DataType.Correct) {\n const EQ = correct === DataType.Correct ? codegen_1.operators.EQ : codegen_1.operators.NEQ;\n let cond;\n switch (dataType) {\n case \"null\":\n return (0, codegen_1._) `${data} ${EQ} null`;\n case \"array\":\n cond = (0, codegen_1._) `Array.isArray(${data})`;\n break;\n case \"object\":\n cond = (0, codegen_1._) `${data} && typeof ${data} == \"object\" && !Array.isArray(${data})`;\n break;\n case \"integer\":\n cond = numCond((0, codegen_1._) `!(${data} % 1) && !isNaN(${data})`);\n break;\n case \"number\":\n cond = numCond();\n break;\n default:\n return (0, codegen_1._) `typeof ${data} ${EQ} ${dataType}`;\n }\n return correct === DataType.Correct ? cond : (0, codegen_1.not)(cond);\n function numCond(_cond = codegen_1.nil) {\n return (0, codegen_1.and)((0, codegen_1._) `typeof ${data} == \"number\"`, _cond, strictNums ? (0, codegen_1._) `isFinite(${data})` : codegen_1.nil);\n }\n}\nexports.checkDataType = checkDataType;\nfunction checkDataTypes(dataTypes, data, strictNums, correct) {\n if (dataTypes.length === 1) {\n return checkDataType(dataTypes[0], data, strictNums, correct);\n }\n let cond;\n const types = (0, util_1.toHash)(dataTypes);\n if (types.array && types.object) {\n const notObj = (0, codegen_1._) `typeof ${data} != \"object\"`;\n cond = types.null ? notObj : (0, codegen_1._) `!${data} || ${notObj}`;\n delete types.null;\n delete types.array;\n delete types.object;\n }\n else {\n cond = codegen_1.nil;\n }\n if (types.number)\n delete types.integer;\n for (const t in types)\n cond = (0, codegen_1.and)(cond, checkDataType(t, data, strictNums, correct));\n return cond;\n}\nexports.checkDataTypes = checkDataTypes;\nconst typeError = {\n message: ({ schema }) => `must be ${schema}`,\n params: ({ schema, schemaValue }) => typeof schema == \"string\" ? (0, codegen_1._) `{type: ${schema}}` : (0, codegen_1._) `{type: ${schemaValue}}`,\n};\nfunction reportTypeError(it) {\n const cxt = getTypeErrorContext(it);\n (0, errors_1.reportError)(cxt, typeError);\n}\nexports.reportTypeError = reportTypeError;\nfunction getTypeErrorContext(it) {\n const { gen, data, schema } = it;\n const schemaCode = (0, util_1.schemaRefOrVal)(it, schema, \"type\");\n return {\n gen,\n keyword: \"type\",\n data,\n schema: schema.type,\n schemaCode,\n schemaValue: schemaCode,\n parentSchema: schema,\n params: {},\n it,\n };\n}\n//# sourceMappingURL=dataType.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.assignDefaults = void 0;\nconst codegen_1 = require(\"../codegen\");\nconst util_1 = require(\"../util\");\nfunction assignDefaults(it, ty) {\n const { properties, items } = it.schema;\n if (ty === \"object\" && properties) {\n for (const key in properties) {\n assignDefault(it, key, properties[key].default);\n }\n }\n else if (ty === \"array\" && Array.isArray(items)) {\n items.forEach((sch, i) => assignDefault(it, i, sch.default));\n }\n}\nexports.assignDefaults = assignDefaults;\nfunction assignDefault(it, prop, defaultValue) {\n const { gen, compositeRule, data, opts } = it;\n if (defaultValue === undefined)\n return;\n const childData = (0, codegen_1._) `${data}${(0, codegen_1.getProperty)(prop)}`;\n if (compositeRule) {\n (0, util_1.checkStrictMode)(it, `default is ignored for: ${childData}`);\n return;\n }\n let condition = (0, codegen_1._) `${childData} === undefined`;\n if (opts.useDefaults === \"empty\") {\n condition = (0, codegen_1._) `${condition} || ${childData} === null || ${childData} === \"\"`;\n }\n // `${childData} === undefined` +\n // (opts.useDefaults === \"empty\" ? ` || ${childData} === null || ${childData} === \"\"` : \"\")\n gen.if(condition, (0, codegen_1._) `${childData} = ${(0, codegen_1.stringify)(defaultValue)}`);\n}\n//# sourceMappingURL=defaults.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getData = exports.KeywordCxt = exports.validateFunctionCode = void 0;\nconst boolSchema_1 = require(\"./boolSchema\");\nconst dataType_1 = require(\"./dataType\");\nconst applicability_1 = require(\"./applicability\");\nconst dataType_2 = require(\"./dataType\");\nconst defaults_1 = require(\"./defaults\");\nconst keyword_1 = require(\"./keyword\");\nconst subschema_1 = require(\"./subschema\");\nconst codegen_1 = require(\"../codegen\");\nconst names_1 = require(\"../names\");\nconst resolve_1 = require(\"../resolve\");\nconst util_1 = require(\"../util\");\nconst errors_1 = require(\"../errors\");\n// schema compilation - generates validation function, subschemaCode (below) is used for subschemas\nfunction validateFunctionCode(it) {\n if (isSchemaObj(it)) {\n checkKeywords(it);\n if (schemaCxtHasRules(it)) {\n topSchemaObjCode(it);\n return;\n }\n }\n validateFunction(it, () => (0, boolSchema_1.topBoolOrEmptySchema)(it));\n}\nexports.validateFunctionCode = validateFunctionCode;\nfunction validateFunction({ gen, validateName, schema, schemaEnv, opts }, body) {\n if (opts.code.es5) {\n gen.func(validateName, (0, codegen_1._) `${names_1.default.data}, ${names_1.default.valCxt}`, schemaEnv.$async, () => {\n gen.code((0, codegen_1._) `\"use strict\"; ${funcSourceUrl(schema, opts)}`);\n destructureValCxtES5(gen, opts);\n gen.code(body);\n });\n }\n else {\n gen.func(validateName, (0, codegen_1._) `${names_1.default.data}, ${destructureValCxt(opts)}`, schemaEnv.$async, () => gen.code(funcSourceUrl(schema, opts)).code(body));\n }\n}\nfunction destructureValCxt(opts) {\n return (0, codegen_1._) `{${names_1.default.instancePath}=\"\", ${names_1.default.parentData}, ${names_1.default.parentDataProperty}, ${names_1.default.rootData}=${names_1.default.data}${opts.dynamicRef ? (0, codegen_1._) `, ${names_1.default.dynamicAnchors}={}` : codegen_1.nil}}={}`;\n}\nfunction destructureValCxtES5(gen, opts) {\n gen.if(names_1.default.valCxt, () => {\n gen.var(names_1.default.instancePath, (0, codegen_1._) `${names_1.default.valCxt}.${names_1.default.instancePath}`);\n gen.var(names_1.default.parentData, (0, codegen_1._) `${names_1.default.valCxt}.${names_1.default.parentData}`);\n gen.var(names_1.default.parentDataProperty, (0, codegen_1._) `${names_1.default.valCxt}.${names_1.default.parentDataProperty}`);\n gen.var(names_1.default.rootData, (0, codegen_1._) `${names_1.default.valCxt}.${names_1.default.rootData}`);\n if (opts.dynamicRef)\n gen.var(names_1.default.dynamicAnchors, (0, codegen_1._) `${names_1.default.valCxt}.${names_1.default.dynamicAnchors}`);\n }, () => {\n gen.var(names_1.default.instancePath, (0, codegen_1._) `\"\"`);\n gen.var(names_1.default.parentData, (0, codegen_1._) `undefined`);\n gen.var(names_1.default.parentDataProperty, (0, codegen_1._) `undefined`);\n gen.var(names_1.default.rootData, names_1.default.data);\n if (opts.dynamicRef)\n gen.var(names_1.default.dynamicAnchors, (0, codegen_1._) `{}`);\n });\n}\nfunction topSchemaObjCode(it) {\n const { schema, opts, gen } = it;\n validateFunction(it, () => {\n if (opts.$comment && schema.$comment)\n commentKeyword(it);\n checkNoDefault(it);\n gen.let(names_1.default.vErrors, null);\n gen.let(names_1.default.errors, 0);\n if (opts.unevaluated)\n resetEvaluated(it);\n typeAndKeywords(it);\n returnResults(it);\n });\n return;\n}\nfunction resetEvaluated(it) {\n // TODO maybe some hook to execute it in the end to check whether props/items are Name, as in assignEvaluated\n const { gen, validateName } = it;\n it.evaluated = gen.const(\"evaluated\", (0, codegen_1._) `${validateName}.evaluated`);\n gen.if((0, codegen_1._) `${it.evaluated}.dynamicProps`, () => gen.assign((0, codegen_1._) `${it.evaluated}.props`, (0, codegen_1._) `undefined`));\n gen.if((0, codegen_1._) `${it.evaluated}.dynamicItems`, () => gen.assign((0, codegen_1._) `${it.evaluated}.items`, (0, codegen_1._) `undefined`));\n}\nfunction funcSourceUrl(schema, opts) {\n const schId = typeof schema == \"object\" && schema[opts.schemaId];\n return schId && (opts.code.source || opts.code.process) ? (0, codegen_1._) `/*# sourceURL=${schId} */` : codegen_1.nil;\n}\n// schema compilation - this function is used recursively to generate code for sub-schemas\nfunction subschemaCode(it, valid) {\n if (isSchemaObj(it)) {\n checkKeywords(it);\n if (schemaCxtHasRules(it)) {\n subSchemaObjCode(it, valid);\n return;\n }\n }\n (0, boolSchema_1.boolOrEmptySchema)(it, valid);\n}\nfunction schemaCxtHasRules({ schema, self }) {\n if (typeof schema == \"boolean\")\n return !schema;\n for (const key in schema)\n if (self.RULES.all[key])\n return true;\n return false;\n}\nfunction isSchemaObj(it) {\n return typeof it.schema != \"boolean\";\n}\nfunction subSchemaObjCode(it, valid) {\n const { schema, gen, opts } = it;\n if (opts.$comment && schema.$comment)\n commentKeyword(it);\n updateContext(it);\n checkAsyncSchema(it);\n const errsCount = gen.const(\"_errs\", names_1.default.errors);\n typeAndKeywords(it, errsCount);\n // TODO var\n gen.var(valid, (0, codegen_1._) `${errsCount} === ${names_1.default.errors}`);\n}\nfunction checkKeywords(it) {\n (0, util_1.checkUnknownRules)(it);\n checkRefsAndKeywords(it);\n}\nfunction typeAndKeywords(it, errsCount) {\n if (it.opts.jtd)\n return schemaKeywords(it, [], false, errsCount);\n const types = (0, dataType_1.getSchemaTypes)(it.schema);\n const checkedTypes = (0, dataType_1.coerceAndCheckDataType)(it, types);\n schemaKeywords(it, types, !checkedTypes, errsCount);\n}\nfunction checkRefsAndKeywords(it) {\n const { schema, errSchemaPath, opts, self } = it;\n if (schema.$ref && opts.ignoreKeywordsWithRef && (0, util_1.schemaHasRulesButRef)(schema, self.RULES)) {\n self.logger.warn(`$ref: keywords ignored in schema at path \"${errSchemaPath}\"`);\n }\n}\nfunction checkNoDefault(it) {\n const { schema, opts } = it;\n if (schema.default !== undefined && opts.useDefaults && opts.strictSchema) {\n (0, util_1.checkStrictMode)(it, \"default is ignored in the schema root\");\n }\n}\nfunction updateContext(it) {\n const schId = it.schema[it.opts.schemaId];\n if (schId)\n it.baseId = (0, resolve_1.resolveUrl)(it.opts.uriResolver, it.baseId, schId);\n}\nfunction checkAsyncSchema(it) {\n if (it.schema.$async && !it.schemaEnv.$async)\n throw new Error(\"async schema in sync schema\");\n}\nfunction commentKeyword({ gen, schemaEnv, schema, errSchemaPath, opts }) {\n const msg = schema.$comment;\n if (opts.$comment === true) {\n gen.code((0, codegen_1._) `${names_1.default.self}.logger.log(${msg})`);\n }\n else if (typeof opts.$comment == \"function\") {\n const schemaPath = (0, codegen_1.str) `${errSchemaPath}/$comment`;\n const rootName = gen.scopeValue(\"root\", { ref: schemaEnv.root });\n gen.code((0, codegen_1._) `${names_1.default.self}.opts.$comment(${msg}, ${schemaPath}, ${rootName}.schema)`);\n }\n}\nfunction returnResults(it) {\n const { gen, schemaEnv, validateName, ValidationError, opts } = it;\n if (schemaEnv.$async) {\n // TODO assign unevaluated\n gen.if((0, codegen_1._) `${names_1.default.errors} === 0`, () => gen.return(names_1.default.data), () => gen.throw((0, codegen_1._) `new ${ValidationError}(${names_1.default.vErrors})`));\n }\n else {\n gen.assign((0, codegen_1._) `${validateName}.errors`, names_1.default.vErrors);\n if (opts.unevaluated)\n assignEvaluated(it);\n gen.return((0, codegen_1._) `${names_1.default.errors} === 0`);\n }\n}\nfunction assignEvaluated({ gen, evaluated, props, items }) {\n if (props instanceof codegen_1.Name)\n gen.assign((0, codegen_1._) `${evaluated}.props`, props);\n if (items instanceof codegen_1.Name)\n gen.assign((0, codegen_1._) `${evaluated}.items`, items);\n}\nfunction schemaKeywords(it, types, typeErrors, errsCount) {\n const { gen, schema, data, allErrors, opts, self } = it;\n const { RULES } = self;\n if (schema.$ref && (opts.ignoreKeywordsWithRef || !(0, util_1.schemaHasRulesButRef)(schema, RULES))) {\n gen.block(() => keywordCode(it, \"$ref\", RULES.all.$ref.definition)); // TODO typecast\n return;\n }\n if (!opts.jtd)\n checkStrictTypes(it, types);\n gen.block(() => {\n for (const group of RULES.rules)\n groupKeywords(group);\n groupKeywords(RULES.post);\n });\n function groupKeywords(group) {\n if (!(0, applicability_1.shouldUseGroup)(schema, group))\n return;\n if (group.type) {\n gen.if((0, dataType_2.checkDataType)(group.type, data, opts.strictNumbers));\n iterateKeywords(it, group);\n if (types.length === 1 && types[0] === group.type && typeErrors) {\n gen.else();\n (0, dataType_2.reportTypeError)(it);\n }\n gen.endIf();\n }\n else {\n iterateKeywords(it, group);\n }\n // TODO make it \"ok\" call?\n if (!allErrors)\n gen.if((0, codegen_1._) `${names_1.default.errors} === ${errsCount || 0}`);\n }\n}\nfunction iterateKeywords(it, group) {\n const { gen, schema, opts: { useDefaults }, } = it;\n if (useDefaults)\n (0, defaults_1.assignDefaults)(it, group.type);\n gen.block(() => {\n for (const rule of group.rules) {\n if ((0, applicability_1.shouldUseRule)(schema, rule)) {\n keywordCode(it, rule.keyword, rule.definition, group.type);\n }\n }\n });\n}\nfunction checkStrictTypes(it, types) {\n if (it.schemaEnv.meta || !it.opts.strictTypes)\n return;\n checkContextTypes(it, types);\n if (!it.opts.allowUnionTypes)\n checkMultipleTypes(it, types);\n checkKeywordTypes(it, it.dataTypes);\n}\nfunction checkContextTypes(it, types) {\n if (!types.length)\n return;\n if (!it.dataTypes.length) {\n it.dataTypes = types;\n return;\n }\n types.forEach((t) => {\n if (!includesType(it.dataTypes, t)) {\n strictTypesError(it, `type \"${t}\" not allowed by context \"${it.dataTypes.join(\",\")}\"`);\n }\n });\n it.dataTypes = it.dataTypes.filter((t) => includesType(types, t));\n}\nfunction checkMultipleTypes(it, ts) {\n if (ts.length > 1 && !(ts.length === 2 && ts.includes(\"null\"))) {\n strictTypesError(it, \"use allowUnionTypes to allow union type keyword\");\n }\n}\nfunction checkKeywordTypes(it, ts) {\n const rules = it.self.RULES.all;\n for (const keyword in rules) {\n const rule = rules[keyword];\n if (typeof rule == \"object\" && (0, applicability_1.shouldUseRule)(it.schema, rule)) {\n const { type } = rule.definition;\n if (type.length && !type.some((t) => hasApplicableType(ts, t))) {\n strictTypesError(it, `missing type \"${type.join(\",\")}\" for keyword \"${keyword}\"`);\n }\n }\n }\n}\nfunction hasApplicableType(schTs, kwdT) {\n return schTs.includes(kwdT) || (kwdT === \"number\" && schTs.includes(\"integer\"));\n}\nfunction includesType(ts, t) {\n return ts.includes(t) || (t === \"integer\" && ts.includes(\"number\"));\n}\nfunction strictTypesError(it, msg) {\n const schemaPath = it.schemaEnv.baseId + it.errSchemaPath;\n msg += ` at \"${schemaPath}\" (strictTypes)`;\n (0, util_1.checkStrictMode)(it, msg, it.opts.strictTypes);\n}\nclass KeywordCxt {\n constructor(it, def, keyword) {\n (0, keyword_1.validateKeywordUsage)(it, def, keyword);\n this.gen = it.gen;\n this.allErrors = it.allErrors;\n this.keyword = keyword;\n this.data = it.data;\n this.schema = it.schema[keyword];\n this.$data = def.$data && it.opts.$data && this.schema && this.schema.$data;\n this.schemaValue = (0, util_1.schemaRefOrVal)(it, this.schema, keyword, this.$data);\n this.schemaType = def.schemaType;\n this.parentSchema = it.schema;\n this.params = {};\n this.it = it;\n this.def = def;\n if (this.$data) {\n this.schemaCode = it.gen.const(\"vSchema\", getData(this.$data, it));\n }\n else {\n this.schemaCode = this.schemaValue;\n if (!(0, keyword_1.validSchemaType)(this.schema, def.schemaType, def.allowUndefined)) {\n throw new Error(`${keyword} value must be ${JSON.stringify(def.schemaType)}`);\n }\n }\n if (\"code\" in def ? def.trackErrors : def.errors !== false) {\n this.errsCount = it.gen.const(\"_errs\", names_1.default.errors);\n }\n }\n result(condition, successAction, failAction) {\n this.failResult((0, codegen_1.not)(condition), successAction, failAction);\n }\n failResult(condition, successAction, failAction) {\n this.gen.if(condition);\n if (failAction)\n failAction();\n else\n this.error();\n if (successAction) {\n this.gen.else();\n successAction();\n if (this.allErrors)\n this.gen.endIf();\n }\n else {\n if (this.allErrors)\n this.gen.endIf();\n else\n this.gen.else();\n }\n }\n pass(condition, failAction) {\n this.failResult((0, codegen_1.not)(condition), undefined, failAction);\n }\n fail(condition) {\n if (condition === undefined) {\n this.error();\n if (!this.allErrors)\n this.gen.if(false); // this branch will be removed by gen.optimize\n return;\n }\n this.gen.if(condition);\n this.error();\n if (this.allErrors)\n this.gen.endIf();\n else\n this.gen.else();\n }\n fail$data(condition) {\n if (!this.$data)\n return this.fail(condition);\n const { schemaCode } = this;\n this.fail((0, codegen_1._) `${schemaCode} !== undefined && (${(0, codegen_1.or)(this.invalid$data(), condition)})`);\n }\n error(append, errorParams, errorPaths) {\n if (errorParams) {\n this.setParams(errorParams);\n this._error(append, errorPaths);\n this.setParams({});\n return;\n }\n this._error(append, errorPaths);\n }\n _error(append, errorPaths) {\n ;\n (append ? errors_1.reportExtraError : errors_1.reportError)(this, this.def.error, errorPaths);\n }\n $dataError() {\n (0, errors_1.reportError)(this, this.def.$dataError || errors_1.keyword$DataError);\n }\n reset() {\n if (this.errsCount === undefined)\n throw new Error('add \"trackErrors\" to keyword definition');\n (0, errors_1.resetErrorsCount)(this.gen, this.errsCount);\n }\n ok(cond) {\n if (!this.allErrors)\n this.gen.if(cond);\n }\n setParams(obj, assign) {\n if (assign)\n Object.assign(this.params, obj);\n else\n this.params = obj;\n }\n block$data(valid, codeBlock, $dataValid = codegen_1.nil) {\n this.gen.block(() => {\n this.check$data(valid, $dataValid);\n codeBlock();\n });\n }\n check$data(valid = codegen_1.nil, $dataValid = codegen_1.nil) {\n if (!this.$data)\n return;\n const { gen, schemaCode, schemaType, def } = this;\n gen.if((0, codegen_1.or)((0, codegen_1._) `${schemaCode} === undefined`, $dataValid));\n if (valid !== codegen_1.nil)\n gen.assign(valid, true);\n if (schemaType.length || def.validateSchema) {\n gen.elseIf(this.invalid$data());\n this.$dataError();\n if (valid !== codegen_1.nil)\n gen.assign(valid, false);\n }\n gen.else();\n }\n invalid$data() {\n const { gen, schemaCode, schemaType, def, it } = this;\n return (0, codegen_1.or)(wrong$DataType(), invalid$DataSchema());\n function wrong$DataType() {\n if (schemaType.length) {\n /* istanbul ignore if */\n if (!(schemaCode instanceof codegen_1.Name))\n throw new Error(\"ajv implementation error\");\n const st = Array.isArray(schemaType) ? schemaType : [schemaType];\n return (0, codegen_1._) `${(0, dataType_2.checkDataTypes)(st, schemaCode, it.opts.strictNumbers, dataType_2.DataType.Wrong)}`;\n }\n return codegen_1.nil;\n }\n function invalid$DataSchema() {\n if (def.validateSchema) {\n const validateSchemaRef = gen.scopeValue(\"validate$data\", { ref: def.validateSchema }); // TODO value.code for standalone\n return (0, codegen_1._) `!${validateSchemaRef}(${schemaCode})`;\n }\n return codegen_1.nil;\n }\n }\n subschema(appl, valid) {\n const subschema = (0, subschema_1.getSubschema)(this.it, appl);\n (0, subschema_1.extendSubschemaData)(subschema, this.it, appl);\n (0, subschema_1.extendSubschemaMode)(subschema, appl);\n const nextContext = { ...this.it, ...subschema, items: undefined, props: undefined };\n subschemaCode(nextContext, valid);\n return nextContext;\n }\n mergeEvaluated(schemaCxt, toName) {\n const { it, gen } = this;\n if (!it.opts.unevaluated)\n return;\n if (it.props !== true && schemaCxt.props !== undefined) {\n it.props = util_1.mergeEvaluated.props(gen, schemaCxt.props, it.props, toName);\n }\n if (it.items !== true && schemaCxt.items !== undefined) {\n it.items = util_1.mergeEvaluated.items(gen, schemaCxt.items, it.items, toName);\n }\n }\n mergeValidEvaluated(schemaCxt, valid) {\n const { it, gen } = this;\n if (it.opts.unevaluated && (it.props !== true || it.items !== true)) {\n gen.if(valid, () => this.mergeEvaluated(schemaCxt, codegen_1.Name));\n return true;\n }\n }\n}\nexports.KeywordCxt = KeywordCxt;\nfunction keywordCode(it, keyword, def, ruleType) {\n const cxt = new KeywordCxt(it, def, keyword);\n if (\"code\" in def) {\n def.code(cxt, ruleType);\n }\n else if (cxt.$data && def.validate) {\n (0, keyword_1.funcKeywordCode)(cxt, def);\n }\n else if (\"macro\" in def) {\n (0, keyword_1.macroKeywordCode)(cxt, def);\n }\n else if (def.compile || def.validate) {\n (0, keyword_1.funcKeywordCode)(cxt, def);\n }\n}\nconst JSON_POINTER = /^\\/(?:[^~]|~0|~1)*$/;\nconst RELATIVE_JSON_POINTER = /^([0-9]+)(#|\\/(?:[^~]|~0|~1)*)?$/;\nfunction getData($data, { dataLevel, dataNames, dataPathArr }) {\n let jsonPointer;\n let data;\n if ($data === \"\")\n return names_1.default.rootData;\n if ($data[0] === \"/\") {\n if (!JSON_POINTER.test($data))\n throw new Error(`Invalid JSON-pointer: ${$data}`);\n jsonPointer = $data;\n data = names_1.default.rootData;\n }\n else {\n const matches = RELATIVE_JSON_POINTER.exec($data);\n if (!matches)\n throw new Error(`Invalid JSON-pointer: ${$data}`);\n const up = +matches[1];\n jsonPointer = matches[2];\n if (jsonPointer === \"#\") {\n if (up >= dataLevel)\n throw new Error(errorMsg(\"property/index\", up));\n return dataPathArr[dataLevel - up];\n }\n if (up > dataLevel)\n throw new Error(errorMsg(\"data\", up));\n data = dataNames[dataLevel - up];\n if (!jsonPointer)\n return data;\n }\n let expr = data;\n const segments = jsonPointer.split(\"/\");\n for (const segment of segments) {\n if (segment) {\n data = (0, codegen_1._) `${data}${(0, codegen_1.getProperty)((0, util_1.unescapeJsonPointer)(segment))}`;\n expr = (0, codegen_1._) `${expr} && ${data}`;\n }\n }\n return expr;\n function errorMsg(pointerType, up) {\n return `Cannot access ${pointerType} ${up} levels up, current level is ${dataLevel}`;\n }\n}\nexports.getData = getData;\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.validateKeywordUsage = exports.validSchemaType = exports.funcKeywordCode = exports.macroKeywordCode = void 0;\nconst codegen_1 = require(\"../codegen\");\nconst names_1 = require(\"../names\");\nconst code_1 = require(\"../../vocabularies/code\");\nconst errors_1 = require(\"../errors\");\nfunction macroKeywordCode(cxt, def) {\n const { gen, keyword, schema, parentSchema, it } = cxt;\n const macroSchema = def.macro.call(it.self, schema, parentSchema, it);\n const schemaRef = useKeyword(gen, keyword, macroSchema);\n if (it.opts.validateSchema !== false)\n it.self.validateSchema(macroSchema, true);\n const valid = gen.name(\"valid\");\n cxt.subschema({\n schema: macroSchema,\n schemaPath: codegen_1.nil,\n errSchemaPath: `${it.errSchemaPath}/${keyword}`,\n topSchemaRef: schemaRef,\n compositeRule: true,\n }, valid);\n cxt.pass(valid, () => cxt.error(true));\n}\nexports.macroKeywordCode = macroKeywordCode;\nfunction funcKeywordCode(cxt, def) {\n var _a;\n const { gen, keyword, schema, parentSchema, $data, it } = cxt;\n checkAsyncKeyword(it, def);\n const validate = !$data && def.compile ? def.compile.call(it.self, schema, parentSchema, it) : def.validate;\n const validateRef = useKeyword(gen, keyword, validate);\n const valid = gen.let(\"valid\");\n cxt.block$data(valid, validateKeyword);\n cxt.ok((_a = def.valid) !== null && _a !== void 0 ? _a : valid);\n function validateKeyword() {\n if (def.errors === false) {\n assignValid();\n if (def.modifying)\n modifyData(cxt);\n reportErrs(() => cxt.error());\n }\n else {\n const ruleErrs = def.async ? validateAsync() : validateSync();\n if (def.modifying)\n modifyData(cxt);\n reportErrs(() => addErrs(cxt, ruleErrs));\n }\n }\n function validateAsync() {\n const ruleErrs = gen.let(\"ruleErrs\", null);\n gen.try(() => assignValid((0, codegen_1._) `await `), (e) => gen.assign(valid, false).if((0, codegen_1._) `${e} instanceof ${it.ValidationError}`, () => gen.assign(ruleErrs, (0, codegen_1._) `${e}.errors`), () => gen.throw(e)));\n return ruleErrs;\n }\n function validateSync() {\n const validateErrs = (0, codegen_1._) `${validateRef}.errors`;\n gen.assign(validateErrs, null);\n assignValid(codegen_1.nil);\n return validateErrs;\n }\n function assignValid(_await = def.async ? (0, codegen_1._) `await ` : codegen_1.nil) {\n const passCxt = it.opts.passContext ? names_1.default.this : names_1.default.self;\n const passSchema = !((\"compile\" in def && !$data) || def.schema === false);\n gen.assign(valid, (0, codegen_1._) `${_await}${(0, code_1.callValidateCode)(cxt, validateRef, passCxt, passSchema)}`, def.modifying);\n }\n function reportErrs(errors) {\n var _a;\n gen.if((0, codegen_1.not)((_a = def.valid) !== null && _a !== void 0 ? _a : valid), errors);\n }\n}\nexports.funcKeywordCode = funcKeywordCode;\nfunction modifyData(cxt) {\n const { gen, data, it } = cxt;\n gen.if(it.parentData, () => gen.assign(data, (0, codegen_1._) `${it.parentData}[${it.parentDataProperty}]`));\n}\nfunction addErrs(cxt, errs) {\n const { gen } = cxt;\n gen.if((0, codegen_1._) `Array.isArray(${errs})`, () => {\n gen\n .assign(names_1.default.vErrors, (0, codegen_1._) `${names_1.default.vErrors} === null ? ${errs} : ${names_1.default.vErrors}.concat(${errs})`)\n .assign(names_1.default.errors, (0, codegen_1._) `${names_1.default.vErrors}.length`);\n (0, errors_1.extendErrors)(cxt);\n }, () => cxt.error());\n}\nfunction checkAsyncKeyword({ schemaEnv }, def) {\n if (def.async && !schemaEnv.$async)\n throw new Error(\"async keyword in sync schema\");\n}\nfunction useKeyword(gen, keyword, result) {\n if (result === undefined)\n throw new Error(`keyword \"${keyword}\" failed to compile`);\n return gen.scopeValue(\"keyword\", typeof result == \"function\" ? { ref: result } : { ref: result, code: (0, codegen_1.stringify)(result) });\n}\nfunction validSchemaType(schema, schemaType, allowUndefined = false) {\n // TODO add tests\n return (!schemaType.length ||\n schemaType.some((st) => st === \"array\"\n ? Array.isArray(schema)\n : st === \"object\"\n ? schema && typeof schema == \"object\" && !Array.isArray(schema)\n : typeof schema == st || (allowUndefined && typeof schema == \"undefined\")));\n}\nexports.validSchemaType = validSchemaType;\nfunction validateKeywordUsage({ schema, opts, self, errSchemaPath }, def, keyword) {\n /* istanbul ignore if */\n if (Array.isArray(def.keyword) ? !def.keyword.includes(keyword) : def.keyword !== keyword) {\n throw new Error(\"ajv implementation error\");\n }\n const deps = def.dependencies;\n if (deps === null || deps === void 0 ? void 0 : deps.some((kwd) => !Object.prototype.hasOwnProperty.call(schema, kwd))) {\n throw new Error(`parent schema must have dependencies of ${keyword}: ${deps.join(\",\")}`);\n }\n if (def.validateSchema) {\n const valid = def.validateSchema(schema[keyword]);\n if (!valid) {\n const msg = `keyword \"${keyword}\" value is invalid at path \"${errSchemaPath}\": ` +\n self.errorsText(def.validateSchema.errors);\n if (opts.validateSchema === \"log\")\n self.logger.error(msg);\n else\n throw new Error(msg);\n }\n }\n}\nexports.validateKeywordUsage = validateKeywordUsage;\n//# sourceMappingURL=keyword.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.extendSubschemaMode = exports.extendSubschemaData = exports.getSubschema = void 0;\nconst codegen_1 = require(\"../codegen\");\nconst util_1 = require(\"../util\");\nfunction getSubschema(it, { keyword, schemaProp, schema, schemaPath, errSchemaPath, topSchemaRef }) {\n if (keyword !== undefined && schema !== undefined) {\n throw new Error('both \"keyword\" and \"schema\" passed, only one allowed');\n }\n if (keyword !== undefined) {\n const sch = it.schema[keyword];\n return schemaProp === undefined\n ? {\n schema: sch,\n schemaPath: (0, codegen_1._) `${it.schemaPath}${(0, codegen_1.getProperty)(keyword)}`,\n errSchemaPath: `${it.errSchemaPath}/${keyword}`,\n }\n : {\n schema: sch[schemaProp],\n schemaPath: (0, codegen_1._) `${it.schemaPath}${(0, codegen_1.getProperty)(keyword)}${(0, codegen_1.getProperty)(schemaProp)}`,\n errSchemaPath: `${it.errSchemaPath}/${keyword}/${(0, util_1.escapeFragment)(schemaProp)}`,\n };\n }\n if (schema !== undefined) {\n if (schemaPath === undefined || errSchemaPath === undefined || topSchemaRef === undefined) {\n throw new Error('\"schemaPath\", \"errSchemaPath\" and \"topSchemaRef\" are required with \"schema\"');\n }\n return {\n schema,\n schemaPath,\n topSchemaRef,\n errSchemaPath,\n };\n }\n throw new Error('either \"keyword\" or \"schema\" must be passed');\n}\nexports.getSubschema = getSubschema;\nfunction extendSubschemaData(subschema, it, { dataProp, dataPropType: dpType, data, dataTypes, propertyName }) {\n if (data !== undefined && dataProp !== undefined) {\n throw new Error('both \"data\" and \"dataProp\" passed, only one allowed');\n }\n const { gen } = it;\n if (dataProp !== undefined) {\n const { errorPath, dataPathArr, opts } = it;\n const nextData = gen.let(\"data\", (0, codegen_1._) `${it.data}${(0, codegen_1.getProperty)(dataProp)}`, true);\n dataContextProps(nextData);\n subschema.errorPath = (0, codegen_1.str) `${errorPath}${(0, util_1.getErrorPath)(dataProp, dpType, opts.jsPropertySyntax)}`;\n subschema.parentDataProperty = (0, codegen_1._) `${dataProp}`;\n subschema.dataPathArr = [...dataPathArr, subschema.parentDataProperty];\n }\n if (data !== undefined) {\n const nextData = data instanceof codegen_1.Name ? data : gen.let(\"data\", data, true); // replaceable if used once?\n dataContextProps(nextData);\n if (propertyName !== undefined)\n subschema.propertyName = propertyName;\n // TODO something is possibly wrong here with not changing parentDataProperty and not appending dataPathArr\n }\n if (dataTypes)\n subschema.dataTypes = dataTypes;\n function dataContextProps(_nextData) {\n subschema.data = _nextData;\n subschema.dataLevel = it.dataLevel + 1;\n subschema.dataTypes = [];\n it.definedProperties = new Set();\n subschema.parentData = it.data;\n subschema.dataNames = [...it.dataNames, _nextData];\n }\n}\nexports.extendSubschemaData = extendSubschemaData;\nfunction extendSubschemaMode(subschema, { jtdDiscriminator, jtdMetadata, compositeRule, createErrors, allErrors }) {\n if (compositeRule !== undefined)\n subschema.compositeRule = compositeRule;\n if (createErrors !== undefined)\n subschema.createErrors = createErrors;\n if (allErrors !== undefined)\n subschema.allErrors = allErrors;\n subschema.jtdDiscriminator = jtdDiscriminator; // not inherited\n subschema.jtdMetadata = jtdMetadata; // not inherited\n}\nexports.extendSubschemaMode = extendSubschemaMode;\n//# sourceMappingURL=subschema.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.CodeGen = exports.Name = exports.nil = exports.stringify = exports.str = exports._ = exports.KeywordCxt = void 0;\nvar validate_1 = require(\"./compile/validate\");\nObject.defineProperty(exports, \"KeywordCxt\", { enumerable: true, get: function () { return validate_1.KeywordCxt; } });\nvar codegen_1 = require(\"./compile/codegen\");\nObject.defineProperty(exports, \"_\", { enumerable: true, get: function () { return codegen_1._; } });\nObject.defineProperty(exports, \"str\", { enumerable: true, get: function () { return codegen_1.str; } });\nObject.defineProperty(exports, \"stringify\", { enumerable: true, get: function () { return codegen_1.stringify; } });\nObject.defineProperty(exports, \"nil\", { enumerable: true, get: function () { return codegen_1.nil; } });\nObject.defineProperty(exports, \"Name\", { enumerable: true, get: function () { return codegen_1.Name; } });\nObject.defineProperty(exports, \"CodeGen\", { enumerable: true, get: function () { return codegen_1.CodeGen; } });\nconst validation_error_1 = require(\"./runtime/validation_error\");\nconst ref_error_1 = require(\"./compile/ref_error\");\nconst rules_1 = require(\"./compile/rules\");\nconst compile_1 = require(\"./compile\");\nconst codegen_2 = require(\"./compile/codegen\");\nconst resolve_1 = require(\"./compile/resolve\");\nconst dataType_1 = require(\"./compile/validate/dataType\");\nconst util_1 = require(\"./compile/util\");\nconst $dataRefSchema = require(\"./refs/data.json\");\nconst uri_1 = require(\"./runtime/uri\");\nconst defaultRegExp = (str, flags) => new RegExp(str, flags);\ndefaultRegExp.code = \"new RegExp\";\nconst META_IGNORE_OPTIONS = [\"removeAdditional\", \"useDefaults\", \"coerceTypes\"];\nconst EXT_SCOPE_NAMES = new Set([\n \"validate\",\n \"serialize\",\n \"parse\",\n \"wrapper\",\n \"root\",\n \"schema\",\n \"keyword\",\n \"pattern\",\n \"formats\",\n \"validate$data\",\n \"func\",\n \"obj\",\n \"Error\",\n]);\nconst removedOptions = {\n errorDataPath: \"\",\n format: \"`validateFormats: false` can be used instead.\",\n nullable: '\"nullable\" keyword is supported by default.',\n jsonPointers: \"Deprecated jsPropertySyntax can be used instead.\",\n extendRefs: \"Deprecated ignoreKeywordsWithRef can be used instead.\",\n missingRefs: \"Pass empty schema with $id that should be ignored to ajv.addSchema.\",\n processCode: \"Use option `code: {process: (code, schemaEnv: object) => string}`\",\n sourceCode: \"Use option `code: {source: true}`\",\n strictDefaults: \"It is default now, see option `strict`.\",\n strictKeywords: \"It is default now, see option `strict`.\",\n uniqueItems: '\"uniqueItems\" keyword is always validated.',\n unknownFormats: \"Disable strict mode or pass `true` to `ajv.addFormat` (or `formats` option).\",\n cache: \"Map is used as cache, schema object as key.\",\n serialize: \"Map is used as cache, schema object as key.\",\n ajvErrors: \"It is default now.\",\n};\nconst deprecatedOptions = {\n ignoreKeywordsWithRef: \"\",\n jsPropertySyntax: \"\",\n unicode: '\"minLength\"/\"maxLength\" account for unicode characters by default.',\n};\nconst MAX_EXPRESSION = 200;\n// eslint-disable-next-line complexity\nfunction requiredOptions(o) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _0;\n const s = o.strict;\n const _optz = (_a = o.code) === null || _a === void 0 ? void 0 : _a.optimize;\n const optimize = _optz === true || _optz === undefined ? 1 : _optz || 0;\n const regExp = (_c = (_b = o.code) === null || _b === void 0 ? void 0 : _b.regExp) !== null && _c !== void 0 ? _c : defaultRegExp;\n const uriResolver = (_d = o.uriResolver) !== null && _d !== void 0 ? _d : uri_1.default;\n return {\n strictSchema: (_f = (_e = o.strictSchema) !== null && _e !== void 0 ? _e : s) !== null && _f !== void 0 ? _f : true,\n strictNumbers: (_h = (_g = o.strictNumbers) !== null && _g !== void 0 ? _g : s) !== null && _h !== void 0 ? _h : true,\n strictTypes: (_k = (_j = o.strictTypes) !== null && _j !== void 0 ? _j : s) !== null && _k !== void 0 ? _k : \"log\",\n strictTuples: (_m = (_l = o.strictTuples) !== null && _l !== void 0 ? _l : s) !== null && _m !== void 0 ? _m : \"log\",\n strictRequired: (_p = (_o = o.strictRequired) !== null && _o !== void 0 ? _o : s) !== null && _p !== void 0 ? _p : false,\n code: o.code ? { ...o.code, optimize, regExp } : { optimize, regExp },\n loopRequired: (_q = o.loopRequired) !== null && _q !== void 0 ? _q : MAX_EXPRESSION,\n loopEnum: (_r = o.loopEnum) !== null && _r !== void 0 ? _r : MAX_EXPRESSION,\n meta: (_s = o.meta) !== null && _s !== void 0 ? _s : true,\n messages: (_t = o.messages) !== null && _t !== void 0 ? _t : true,\n inlineRefs: (_u = o.inlineRefs) !== null && _u !== void 0 ? _u : true,\n schemaId: (_v = o.schemaId) !== null && _v !== void 0 ? _v : \"$id\",\n addUsedSchema: (_w = o.addUsedSchema) !== null && _w !== void 0 ? _w : true,\n validateSchema: (_x = o.validateSchema) !== null && _x !== void 0 ? _x : true,\n validateFormats: (_y = o.validateFormats) !== null && _y !== void 0 ? _y : true,\n unicodeRegExp: (_z = o.unicodeRegExp) !== null && _z !== void 0 ? _z : true,\n int32range: (_0 = o.int32range) !== null && _0 !== void 0 ? _0 : true,\n uriResolver: uriResolver,\n };\n}\nclass Ajv {\n constructor(opts = {}) {\n this.schemas = {};\n this.refs = {};\n this.formats = {};\n this._compilations = new Set();\n this._loading = {};\n this._cache = new Map();\n opts = this.opts = { ...opts, ...requiredOptions(opts) };\n const { es5, lines } = this.opts.code;\n this.scope = new codegen_2.ValueScope({ scope: {}, prefixes: EXT_SCOPE_NAMES, es5, lines });\n this.logger = getLogger(opts.logger);\n const formatOpt = opts.validateFormats;\n opts.validateFormats = false;\n this.RULES = (0, rules_1.getRules)();\n checkOptions.call(this, removedOptions, opts, \"NOT SUPPORTED\");\n checkOptions.call(this, deprecatedOptions, opts, \"DEPRECATED\", \"warn\");\n this._metaOpts = getMetaSchemaOptions.call(this);\n if (opts.formats)\n addInitialFormats.call(this);\n this._addVocabularies();\n this._addDefaultMetaSchema();\n if (opts.keywords)\n addInitialKeywords.call(this, opts.keywords);\n if (typeof opts.meta == \"object\")\n this.addMetaSchema(opts.meta);\n addInitialSchemas.call(this);\n opts.validateFormats = formatOpt;\n }\n _addVocabularies() {\n this.addKeyword(\"$async\");\n }\n _addDefaultMetaSchema() {\n const { $data, meta, schemaId } = this.opts;\n let _dataRefSchema = $dataRefSchema;\n if (schemaId === \"id\") {\n _dataRefSchema = { ...$dataRefSchema };\n _dataRefSchema.id = _dataRefSchema.$id;\n delete _dataRefSchema.$id;\n }\n if (meta && $data)\n this.addMetaSchema(_dataRefSchema, _dataRefSchema[schemaId], false);\n }\n defaultMeta() {\n const { meta, schemaId } = this.opts;\n return (this.opts.defaultMeta = typeof meta == \"object\" ? meta[schemaId] || meta : undefined);\n }\n validate(schemaKeyRef, // key, ref or schema object\n data // to be validated\n ) {\n let v;\n if (typeof schemaKeyRef == \"string\") {\n v = this.getSchema(schemaKeyRef);\n if (!v)\n throw new Error(`no schema with key or ref \"${schemaKeyRef}\"`);\n }\n else {\n v = this.compile(schemaKeyRef);\n }\n const valid = v(data);\n if (!(\"$async\" in v))\n this.errors = v.errors;\n return valid;\n }\n compile(schema, _meta) {\n const sch = this._addSchema(schema, _meta);\n return (sch.validate || this._compileSchemaEnv(sch));\n }\n compileAsync(schema, meta) {\n if (typeof this.opts.loadSchema != \"function\") {\n throw new Error(\"options.loadSchema should be a function\");\n }\n const { loadSchema } = this.opts;\n return runCompileAsync.call(this, schema, meta);\n async function runCompileAsync(_schema, _meta) {\n await loadMetaSchema.call(this, _schema.$schema);\n const sch = this._addSchema(_schema, _meta);\n return sch.validate || _compileAsync.call(this, sch);\n }\n async function loadMetaSchema($ref) {\n if ($ref && !this.getSchema($ref)) {\n await runCompileAsync.call(this, { $ref }, true);\n }\n }\n async function _compileAsync(sch) {\n try {\n return this._compileSchemaEnv(sch);\n }\n catch (e) {\n if (!(e instanceof ref_error_1.default))\n throw e;\n checkLoaded.call(this, e);\n await loadMissingSchema.call(this, e.missingSchema);\n return _compileAsync.call(this, sch);\n }\n }\n function checkLoaded({ missingSchema: ref, missingRef }) {\n if (this.refs[ref]) {\n throw new Error(`AnySchema ${ref} is loaded but ${missingRef} cannot be resolved`);\n }\n }\n async function loadMissingSchema(ref) {\n const _schema = await _loadSchema.call(this, ref);\n if (!this.refs[ref])\n await loadMetaSchema.call(this, _schema.$schema);\n if (!this.refs[ref])\n this.addSchema(_schema, ref, meta);\n }\n async function _loadSchema(ref) {\n const p = this._loading[ref];\n if (p)\n return p;\n try {\n return await (this._loading[ref] = loadSchema(ref));\n }\n finally {\n delete this._loading[ref];\n }\n }\n }\n // Adds schema to the instance\n addSchema(schema, // If array is passed, `key` will be ignored\n key, // Optional schema key. Can be passed to `validate` method instead of schema object or id/ref. One schema per instance can have empty `id` and `key`.\n _meta, // true if schema is a meta-schema. Used internally, addMetaSchema should be used instead.\n _validateSchema = this.opts.validateSchema // false to skip schema validation. Used internally, option validateSchema should be used instead.\n ) {\n if (Array.isArray(schema)) {\n for (const sch of schema)\n this.addSchema(sch, undefined, _meta, _validateSchema);\n return this;\n }\n let id;\n if (typeof schema === \"object\") {\n const { schemaId } = this.opts;\n id = schema[schemaId];\n if (id !== undefined && typeof id != \"string\") {\n throw new Error(`schema ${schemaId} must be string`);\n }\n }\n key = (0, resolve_1.normalizeId)(key || id);\n this._checkUnique(key);\n this.schemas[key] = this._addSchema(schema, _meta, key, _validateSchema, true);\n return this;\n }\n // Add schema that will be used to validate other schemas\n // options in META_IGNORE_OPTIONS are alway set to false\n addMetaSchema(schema, key, // schema key\n _validateSchema = this.opts.validateSchema // false to skip schema validation, can be used to override validateSchema option for meta-schema\n ) {\n this.addSchema(schema, key, true, _validateSchema);\n return this;\n }\n // Validate schema against its meta-schema\n validateSchema(schema, throwOrLogError) {\n if (typeof schema == \"boolean\")\n return true;\n let $schema;\n $schema = schema.$schema;\n if ($schema !== undefined && typeof $schema != \"string\") {\n throw new Error(\"$schema must be a string\");\n }\n $schema = $schema || this.opts.defaultMeta || this.defaultMeta();\n if (!$schema) {\n this.logger.warn(\"meta-schema not available\");\n this.errors = null;\n return true;\n }\n const valid = this.validate($schema, schema);\n if (!valid && throwOrLogError) {\n const message = \"schema is invalid: \" + this.errorsText();\n if (this.opts.validateSchema === \"log\")\n this.logger.error(message);\n else\n throw new Error(message);\n }\n return valid;\n }\n // Get compiled schema by `key` or `ref`.\n // (`key` that was passed to `addSchema` or full schema reference - `schema.$id` or resolved id)\n getSchema(keyRef) {\n let sch;\n while (typeof (sch = getSchEnv.call(this, keyRef)) == \"string\")\n keyRef = sch;\n if (sch === undefined) {\n const { schemaId } = this.opts;\n const root = new compile_1.SchemaEnv({ schema: {}, schemaId });\n sch = compile_1.resolveSchema.call(this, root, keyRef);\n if (!sch)\n return;\n this.refs[keyRef] = sch;\n }\n return (sch.validate || this._compileSchemaEnv(sch));\n }\n // Remove cached schema(s).\n // If no parameter is passed all schemas but meta-schemas are removed.\n // If RegExp is passed all schemas with key/id matching pattern but meta-schemas are removed.\n // Even if schema is referenced by other schemas it still can be removed as other schemas have local references.\n removeSchema(schemaKeyRef) {\n if (schemaKeyRef instanceof RegExp) {\n this._removeAllSchemas(this.schemas, schemaKeyRef);\n this._removeAllSchemas(this.refs, schemaKeyRef);\n return this;\n }\n switch (typeof schemaKeyRef) {\n case \"undefined\":\n this._removeAllSchemas(this.schemas);\n this._removeAllSchemas(this.refs);\n this._cache.clear();\n return this;\n case \"string\": {\n const sch = getSchEnv.call(this, schemaKeyRef);\n if (typeof sch == \"object\")\n this._cache.delete(sch.schema);\n delete this.schemas[schemaKeyRef];\n delete this.refs[schemaKeyRef];\n return this;\n }\n case \"object\": {\n const cacheKey = schemaKeyRef;\n this._cache.delete(cacheKey);\n let id = schemaKeyRef[this.opts.schemaId];\n if (id) {\n id = (0, resolve_1.normalizeId)(id);\n delete this.schemas[id];\n delete this.refs[id];\n }\n return this;\n }\n default:\n throw new Error(\"ajv.removeSchema: invalid parameter\");\n }\n }\n // add \"vocabulary\" - a collection of keywords\n addVocabulary(definitions) {\n for (const def of definitions)\n this.addKeyword(def);\n return this;\n }\n addKeyword(kwdOrDef, def // deprecated\n ) {\n let keyword;\n if (typeof kwdOrDef == \"string\") {\n keyword = kwdOrDef;\n if (typeof def == \"object\") {\n this.logger.warn(\"these parameters are deprecated, see docs for addKeyword\");\n def.keyword = keyword;\n }\n }\n else if (typeof kwdOrDef == \"object\" && def === undefined) {\n def = kwdOrDef;\n keyword = def.keyword;\n if (Array.isArray(keyword) && !keyword.length) {\n throw new Error(\"addKeywords: keyword must be string or non-empty array\");\n }\n }\n else {\n throw new Error(\"invalid addKeywords parameters\");\n }\n checkKeyword.call(this, keyword, def);\n if (!def) {\n (0, util_1.eachItem)(keyword, (kwd) => addRule.call(this, kwd));\n return this;\n }\n keywordMetaschema.call(this, def);\n const definition = {\n ...def,\n type: (0, dataType_1.getJSONTypes)(def.type),\n schemaType: (0, dataType_1.getJSONTypes)(def.schemaType),\n };\n (0, util_1.eachItem)(keyword, definition.type.length === 0\n ? (k) => addRule.call(this, k, definition)\n : (k) => definition.type.forEach((t) => addRule.call(this, k, definition, t)));\n return this;\n }\n getKeyword(keyword) {\n const rule = this.RULES.all[keyword];\n return typeof rule == \"object\" ? rule.definition : !!rule;\n }\n // Remove keyword\n removeKeyword(keyword) {\n // TODO return type should be Ajv\n const { RULES } = this;\n delete RULES.keywords[keyword];\n delete RULES.all[keyword];\n for (const group of RULES.rules) {\n const i = group.rules.findIndex((rule) => rule.keyword === keyword);\n if (i >= 0)\n group.rules.splice(i, 1);\n }\n return this;\n }\n // Add format\n addFormat(name, format) {\n if (typeof format == \"string\")\n format = new RegExp(format);\n this.formats[name] = format;\n return this;\n }\n errorsText(errors = this.errors, // optional array of validation errors\n { separator = \", \", dataVar = \"data\" } = {} // optional options with properties `separator` and `dataVar`\n ) {\n if (!errors || errors.length === 0)\n return \"No errors\";\n return errors\n .map((e) => `${dataVar}${e.instancePath} ${e.message}`)\n .reduce((text, msg) => text + separator + msg);\n }\n $dataMetaSchema(metaSchema, keywordsJsonPointers) {\n const rules = this.RULES.all;\n metaSchema = JSON.parse(JSON.stringify(metaSchema));\n for (const jsonPointer of keywordsJsonPointers) {\n const segments = jsonPointer.split(\"/\").slice(1); // first segment is an empty string\n let keywords = metaSchema;\n for (const seg of segments)\n keywords = keywords[seg];\n for (const key in rules) {\n const rule = rules[key];\n if (typeof rule != \"object\")\n continue;\n const { $data } = rule.definition;\n const schema = keywords[key];\n if ($data && schema)\n keywords[key] = schemaOrData(schema);\n }\n }\n return metaSchema;\n }\n _removeAllSchemas(schemas, regex) {\n for (const keyRef in schemas) {\n const sch = schemas[keyRef];\n if (!regex || regex.test(keyRef)) {\n if (typeof sch == \"string\") {\n delete schemas[keyRef];\n }\n else if (sch && !sch.meta) {\n this._cache.delete(sch.schema);\n delete schemas[keyRef];\n }\n }\n }\n }\n _addSchema(schema, meta, baseId, validateSchema = this.opts.validateSchema, addSchema = this.opts.addUsedSchema) {\n let id;\n const { schemaId } = this.opts;\n if (typeof schema == \"object\") {\n id = schema[schemaId];\n }\n else {\n if (this.opts.jtd)\n throw new Error(\"schema must be object\");\n else if (typeof schema != \"boolean\")\n throw new Error(\"schema must be object or boolean\");\n }\n let sch = this._cache.get(schema);\n if (sch !== undefined)\n return sch;\n baseId = (0, resolve_1.normalizeId)(id || baseId);\n const localRefs = resolve_1.getSchemaRefs.call(this, schema, baseId);\n sch = new compile_1.SchemaEnv({ schema, schemaId, meta, baseId, localRefs });\n this._cache.set(sch.schema, sch);\n if (addSchema && !baseId.startsWith(\"#\")) {\n // TODO atm it is allowed to overwrite schemas without id (instead of not adding them)\n if (baseId)\n this._checkUnique(baseId);\n this.refs[baseId] = sch;\n }\n if (validateSchema)\n this.validateSchema(schema, true);\n return sch;\n }\n _checkUnique(id) {\n if (this.schemas[id] || this.refs[id]) {\n throw new Error(`schema with key or id \"${id}\" already exists`);\n }\n }\n _compileSchemaEnv(sch) {\n if (sch.meta)\n this._compileMetaSchema(sch);\n else\n compile_1.compileSchema.call(this, sch);\n /* istanbul ignore if */\n if (!sch.validate)\n throw new Error(\"ajv implementation error\");\n return sch.validate;\n }\n _compileMetaSchema(sch) {\n const currentOpts = this.opts;\n this.opts = this._metaOpts;\n try {\n compile_1.compileSchema.call(this, sch);\n }\n finally {\n this.opts = currentOpts;\n }\n }\n}\nexports.default = Ajv;\nAjv.ValidationError = validation_error_1.default;\nAjv.MissingRefError = ref_error_1.default;\nfunction checkOptions(checkOpts, options, msg, log = \"error\") {\n for (const key in checkOpts) {\n const opt = key;\n if (opt in options)\n this.logger[log](`${msg}: option ${key}. ${checkOpts[opt]}`);\n }\n}\nfunction getSchEnv(keyRef) {\n keyRef = (0, resolve_1.normalizeId)(keyRef); // TODO tests fail without this line\n return this.schemas[keyRef] || this.refs[keyRef];\n}\nfunction addInitialSchemas() {\n const optsSchemas = this.opts.schemas;\n if (!optsSchemas)\n return;\n if (Array.isArray(optsSchemas))\n this.addSchema(optsSchemas);\n else\n for (const key in optsSchemas)\n this.addSchema(optsSchemas[key], key);\n}\nfunction addInitialFormats() {\n for (const name in this.opts.formats) {\n const format = this.opts.formats[name];\n if (format)\n this.addFormat(name, format);\n }\n}\nfunction addInitialKeywords(defs) {\n if (Array.isArray(defs)) {\n this.addVocabulary(defs);\n return;\n }\n this.logger.warn(\"keywords option as map is deprecated, pass array\");\n for (const keyword in defs) {\n const def = defs[keyword];\n if (!def.keyword)\n def.keyword = keyword;\n this.addKeyword(def);\n }\n}\nfunction getMetaSchemaOptions() {\n const metaOpts = { ...this.opts };\n for (const opt of META_IGNORE_OPTIONS)\n delete metaOpts[opt];\n return metaOpts;\n}\nconst noLogs = { log() { }, warn() { }, error() { } };\nfunction getLogger(logger) {\n if (logger === false)\n return noLogs;\n if (logger === undefined)\n return console;\n if (logger.log && logger.warn && logger.error)\n return logger;\n throw new Error(\"logger must implement log, warn and error methods\");\n}\nconst KEYWORD_NAME = /^[a-z_$][a-z0-9_$:-]*$/i;\nfunction checkKeyword(keyword, def) {\n const { RULES } = this;\n (0, util_1.eachItem)(keyword, (kwd) => {\n if (RULES.keywords[kwd])\n throw new Error(`Keyword ${kwd} is already defined`);\n if (!KEYWORD_NAME.test(kwd))\n throw new Error(`Keyword ${kwd} has invalid name`);\n });\n if (!def)\n return;\n if (def.$data && !(\"code\" in def || \"validate\" in def)) {\n throw new Error('$data keyword must have \"code\" or \"validate\" function');\n }\n}\nfunction addRule(keyword, definition, dataType) {\n var _a;\n const post = definition === null || definition === void 0 ? void 0 : definition.post;\n if (dataType && post)\n throw new Error('keyword with \"post\" flag cannot have \"type\"');\n const { RULES } = this;\n let ruleGroup = post ? RULES.post : RULES.rules.find(({ type: t }) => t === dataType);\n if (!ruleGroup) {\n ruleGroup = { type: dataType, rules: [] };\n RULES.rules.push(ruleGroup);\n }\n RULES.keywords[keyword] = true;\n if (!definition)\n return;\n const rule = {\n keyword,\n definition: {\n ...definition,\n type: (0, dataType_1.getJSONTypes)(definition.type),\n schemaType: (0, dataType_1.getJSONTypes)(definition.schemaType),\n },\n };\n if (definition.before)\n addBeforeRule.call(this, ruleGroup, rule, definition.before);\n else\n ruleGroup.rules.push(rule);\n RULES.all[keyword] = rule;\n (_a = definition.implements) === null || _a === void 0 ? void 0 : _a.forEach((kwd) => this.addKeyword(kwd));\n}\nfunction addBeforeRule(ruleGroup, rule, before) {\n const i = ruleGroup.rules.findIndex((_rule) => _rule.keyword === before);\n if (i >= 0) {\n ruleGroup.rules.splice(i, 0, rule);\n }\n else {\n ruleGroup.rules.push(rule);\n this.logger.warn(`rule ${before} is not defined`);\n }\n}\nfunction keywordMetaschema(def) {\n let { metaSchema } = def;\n if (metaSchema === undefined)\n return;\n if (def.$data && this.opts.$data)\n metaSchema = schemaOrData(metaSchema);\n def.validateSchema = this.compile(metaSchema, true);\n}\nconst $dataRef = {\n $ref: \"https://raw.githubusercontent.com/ajv-validator/ajv/master/lib/refs/data.json#\",\n};\nfunction schemaOrData(schema) {\n return { anyOf: [schema, $dataRef] };\n}\n//# sourceMappingURL=core.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\n// https://github.com/ajv-validator/ajv/issues/889\nconst equal = require(\"fast-deep-equal\");\nequal.code = 'require(\"ajv/dist/runtime/equal\").default';\nexports.default = equal;\n//# sourceMappingURL=equal.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\n// https://mathiasbynens.be/notes/javascript-encoding\n// https://github.com/bestiejs/punycode.js - punycode.ucs2.decode\nfunction ucs2length(str) {\n const len = str.length;\n let length = 0;\n let pos = 0;\n let value;\n while (pos < len) {\n length++;\n value = str.charCodeAt(pos++);\n if (value >= 0xd800 && value <= 0xdbff && pos < len) {\n // high surrogate, and there is a next character\n value = str.charCodeAt(pos);\n if ((value & 0xfc00) === 0xdc00)\n pos++; // low surrogate\n }\n }\n return length;\n}\nexports.default = ucs2length;\nucs2length.code = 'require(\"ajv/dist/runtime/ucs2length\").default';\n//# sourceMappingURL=ucs2length.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst uri = require(\"uri-js\");\nuri.code = 'require(\"ajv/dist/runtime/uri\").default';\nexports.default = uri;\n//# sourceMappingURL=uri.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nclass ValidationError extends Error {\n constructor(errors) {\n super(\"validation failed\");\n this.errors = errors;\n this.ajv = this.validation = true;\n }\n}\nexports.default = ValidationError;\n//# sourceMappingURL=validation_error.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.validateAdditionalItems = void 0;\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst error = {\n message: ({ params: { len } }) => (0, codegen_1.str) `must NOT have more than ${len} items`,\n params: ({ params: { len } }) => (0, codegen_1._) `{limit: ${len}}`,\n};\nconst def = {\n keyword: \"additionalItems\",\n type: \"array\",\n schemaType: [\"boolean\", \"object\"],\n before: \"uniqueItems\",\n error,\n code(cxt) {\n const { parentSchema, it } = cxt;\n const { items } = parentSchema;\n if (!Array.isArray(items)) {\n (0, util_1.checkStrictMode)(it, '\"additionalItems\" is ignored when \"items\" is not an array of schemas');\n return;\n }\n validateAdditionalItems(cxt, items);\n },\n};\nfunction validateAdditionalItems(cxt, items) {\n const { gen, schema, data, keyword, it } = cxt;\n it.items = true;\n const len = gen.const(\"len\", (0, codegen_1._) `${data}.length`);\n if (schema === false) {\n cxt.setParams({ len: items.length });\n cxt.pass((0, codegen_1._) `${len} <= ${items.length}`);\n }\n else if (typeof schema == \"object\" && !(0, util_1.alwaysValidSchema)(it, schema)) {\n const valid = gen.var(\"valid\", (0, codegen_1._) `${len} <= ${items.length}`); // TODO var\n gen.if((0, codegen_1.not)(valid), () => validateItems(valid));\n cxt.ok(valid);\n }\n function validateItems(valid) {\n gen.forRange(\"i\", items.length, len, (i) => {\n cxt.subschema({ keyword, dataProp: i, dataPropType: util_1.Type.Num }, valid);\n if (!it.allErrors)\n gen.if((0, codegen_1.not)(valid), () => gen.break());\n });\n }\n}\nexports.validateAdditionalItems = validateAdditionalItems;\nexports.default = def;\n//# sourceMappingURL=additionalItems.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst code_1 = require(\"../code\");\nconst codegen_1 = require(\"../../compile/codegen\");\nconst names_1 = require(\"../../compile/names\");\nconst util_1 = require(\"../../compile/util\");\nconst error = {\n message: \"must NOT have additional properties\",\n params: ({ params }) => (0, codegen_1._) `{additionalProperty: ${params.additionalProperty}}`,\n};\nconst def = {\n keyword: \"additionalProperties\",\n type: [\"object\"],\n schemaType: [\"boolean\", \"object\"],\n allowUndefined: true,\n trackErrors: true,\n error,\n code(cxt) {\n const { gen, schema, parentSchema, data, errsCount, it } = cxt;\n /* istanbul ignore if */\n if (!errsCount)\n throw new Error(\"ajv implementation error\");\n const { allErrors, opts } = it;\n it.props = true;\n if (opts.removeAdditional !== \"all\" && (0, util_1.alwaysValidSchema)(it, schema))\n return;\n const props = (0, code_1.allSchemaProperties)(parentSchema.properties);\n const patProps = (0, code_1.allSchemaProperties)(parentSchema.patternProperties);\n checkAdditionalProperties();\n cxt.ok((0, codegen_1._) `${errsCount} === ${names_1.default.errors}`);\n function checkAdditionalProperties() {\n gen.forIn(\"key\", data, (key) => {\n if (!props.length && !patProps.length)\n additionalPropertyCode(key);\n else\n gen.if(isAdditional(key), () => additionalPropertyCode(key));\n });\n }\n function isAdditional(key) {\n let definedProp;\n if (props.length > 8) {\n // TODO maybe an option instead of hard-coded 8?\n const propsSchema = (0, util_1.schemaRefOrVal)(it, parentSchema.properties, \"properties\");\n definedProp = (0, code_1.isOwnProperty)(gen, propsSchema, key);\n }\n else if (props.length) {\n definedProp = (0, codegen_1.or)(...props.map((p) => (0, codegen_1._) `${key} === ${p}`));\n }\n else {\n definedProp = codegen_1.nil;\n }\n if (patProps.length) {\n definedProp = (0, codegen_1.or)(definedProp, ...patProps.map((p) => (0, codegen_1._) `${(0, code_1.usePattern)(cxt, p)}.test(${key})`));\n }\n return (0, codegen_1.not)(definedProp);\n }\n function deleteAdditional(key) {\n gen.code((0, codegen_1._) `delete ${data}[${key}]`);\n }\n function additionalPropertyCode(key) {\n if (opts.removeAdditional === \"all\" || (opts.removeAdditional && schema === false)) {\n deleteAdditional(key);\n return;\n }\n if (schema === false) {\n cxt.setParams({ additionalProperty: key });\n cxt.error();\n if (!allErrors)\n gen.break();\n return;\n }\n if (typeof schema == \"object\" && !(0, util_1.alwaysValidSchema)(it, schema)) {\n const valid = gen.name(\"valid\");\n if (opts.removeAdditional === \"failing\") {\n applyAdditionalSchema(key, valid, false);\n gen.if((0, codegen_1.not)(valid), () => {\n cxt.reset();\n deleteAdditional(key);\n });\n }\n else {\n applyAdditionalSchema(key, valid);\n if (!allErrors)\n gen.if((0, codegen_1.not)(valid), () => gen.break());\n }\n }\n }\n function applyAdditionalSchema(key, valid, errors) {\n const subschema = {\n keyword: \"additionalProperties\",\n dataProp: key,\n dataPropType: util_1.Type.Str,\n };\n if (errors === false) {\n Object.assign(subschema, {\n compositeRule: true,\n createErrors: false,\n allErrors: false,\n });\n }\n cxt.subschema(subschema, valid);\n }\n },\n};\nexports.default = def;\n//# sourceMappingURL=additionalProperties.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst util_1 = require(\"../../compile/util\");\nconst def = {\n keyword: \"allOf\",\n schemaType: \"array\",\n code(cxt) {\n const { gen, schema, it } = cxt;\n /* istanbul ignore if */\n if (!Array.isArray(schema))\n throw new Error(\"ajv implementation error\");\n const valid = gen.name(\"valid\");\n schema.forEach((sch, i) => {\n if ((0, util_1.alwaysValidSchema)(it, sch))\n return;\n const schCxt = cxt.subschema({ keyword: \"allOf\", schemaProp: i }, valid);\n cxt.ok(valid);\n cxt.mergeEvaluated(schCxt);\n });\n },\n};\nexports.default = def;\n//# sourceMappingURL=allOf.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst code_1 = require(\"../code\");\nconst def = {\n keyword: \"anyOf\",\n schemaType: \"array\",\n trackErrors: true,\n code: code_1.validateUnion,\n error: { message: \"must match a schema in anyOf\" },\n};\nexports.default = def;\n//# sourceMappingURL=anyOf.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst error = {\n message: ({ params: { min, max } }) => max === undefined\n ? (0, codegen_1.str) `must contain at least ${min} valid item(s)`\n : (0, codegen_1.str) `must contain at least ${min} and no more than ${max} valid item(s)`,\n params: ({ params: { min, max } }) => max === undefined ? (0, codegen_1._) `{minContains: ${min}}` : (0, codegen_1._) `{minContains: ${min}, maxContains: ${max}}`,\n};\nconst def = {\n keyword: \"contains\",\n type: \"array\",\n schemaType: [\"object\", \"boolean\"],\n before: \"uniqueItems\",\n trackErrors: true,\n error,\n code(cxt) {\n const { gen, schema, parentSchema, data, it } = cxt;\n let min;\n let max;\n const { minContains, maxContains } = parentSchema;\n if (it.opts.next) {\n min = minContains === undefined ? 1 : minContains;\n max = maxContains;\n }\n else {\n min = 1;\n }\n const len = gen.const(\"len\", (0, codegen_1._) `${data}.length`);\n cxt.setParams({ min, max });\n if (max === undefined && min === 0) {\n (0, util_1.checkStrictMode)(it, `\"minContains\" == 0 without \"maxContains\": \"contains\" keyword ignored`);\n return;\n }\n if (max !== undefined && min > max) {\n (0, util_1.checkStrictMode)(it, `\"minContains\" > \"maxContains\" is always invalid`);\n cxt.fail();\n return;\n }\n if ((0, util_1.alwaysValidSchema)(it, schema)) {\n let cond = (0, codegen_1._) `${len} >= ${min}`;\n if (max !== undefined)\n cond = (0, codegen_1._) `${cond} && ${len} <= ${max}`;\n cxt.pass(cond);\n return;\n }\n it.items = true;\n const valid = gen.name(\"valid\");\n if (max === undefined && min === 1) {\n validateItems(valid, () => gen.if(valid, () => gen.break()));\n }\n else if (min === 0) {\n gen.let(valid, true);\n if (max !== undefined)\n gen.if((0, codegen_1._) `${data}.length > 0`, validateItemsWithCount);\n }\n else {\n gen.let(valid, false);\n validateItemsWithCount();\n }\n cxt.result(valid, () => cxt.reset());\n function validateItemsWithCount() {\n const schValid = gen.name(\"_valid\");\n const count = gen.let(\"count\", 0);\n validateItems(schValid, () => gen.if(schValid, () => checkLimits(count)));\n }\n function validateItems(_valid, block) {\n gen.forRange(\"i\", 0, len, (i) => {\n cxt.subschema({\n keyword: \"contains\",\n dataProp: i,\n dataPropType: util_1.Type.Num,\n compositeRule: true,\n }, _valid);\n block();\n });\n }\n function checkLimits(count) {\n gen.code((0, codegen_1._) `${count}++`);\n if (max === undefined) {\n gen.if((0, codegen_1._) `${count} >= ${min}`, () => gen.assign(valid, true).break());\n }\n else {\n gen.if((0, codegen_1._) `${count} > ${max}`, () => gen.assign(valid, false).break());\n if (min === 1)\n gen.assign(valid, true);\n else\n gen.if((0, codegen_1._) `${count} >= ${min}`, () => gen.assign(valid, true));\n }\n }\n },\n};\nexports.default = def;\n//# sourceMappingURL=contains.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.validateSchemaDeps = exports.validatePropertyDeps = exports.error = void 0;\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst code_1 = require(\"../code\");\nexports.error = {\n message: ({ params: { property, depsCount, deps } }) => {\n const property_ies = depsCount === 1 ? \"property\" : \"properties\";\n return (0, codegen_1.str) `must have ${property_ies} ${deps} when property ${property} is present`;\n },\n params: ({ params: { property, depsCount, deps, missingProperty } }) => (0, codegen_1._) `{property: ${property},\n missingProperty: ${missingProperty},\n depsCount: ${depsCount},\n deps: ${deps}}`, // TODO change to reference\n};\nconst def = {\n keyword: \"dependencies\",\n type: \"object\",\n schemaType: \"object\",\n error: exports.error,\n code(cxt) {\n const [propDeps, schDeps] = splitDependencies(cxt);\n validatePropertyDeps(cxt, propDeps);\n validateSchemaDeps(cxt, schDeps);\n },\n};\nfunction splitDependencies({ schema }) {\n const propertyDeps = {};\n const schemaDeps = {};\n for (const key in schema) {\n if (key === \"__proto__\")\n continue;\n const deps = Array.isArray(schema[key]) ? propertyDeps : schemaDeps;\n deps[key] = schema[key];\n }\n return [propertyDeps, schemaDeps];\n}\nfunction validatePropertyDeps(cxt, propertyDeps = cxt.schema) {\n const { gen, data, it } = cxt;\n if (Object.keys(propertyDeps).length === 0)\n return;\n const missing = gen.let(\"missing\");\n for (const prop in propertyDeps) {\n const deps = propertyDeps[prop];\n if (deps.length === 0)\n continue;\n const hasProperty = (0, code_1.propertyInData)(gen, data, prop, it.opts.ownProperties);\n cxt.setParams({\n property: prop,\n depsCount: deps.length,\n deps: deps.join(\", \"),\n });\n if (it.allErrors) {\n gen.if(hasProperty, () => {\n for (const depProp of deps) {\n (0, code_1.checkReportMissingProp)(cxt, depProp);\n }\n });\n }\n else {\n gen.if((0, codegen_1._) `${hasProperty} && (${(0, code_1.checkMissingProp)(cxt, deps, missing)})`);\n (0, code_1.reportMissingProp)(cxt, missing);\n gen.else();\n }\n }\n}\nexports.validatePropertyDeps = validatePropertyDeps;\nfunction validateSchemaDeps(cxt, schemaDeps = cxt.schema) {\n const { gen, data, keyword, it } = cxt;\n const valid = gen.name(\"valid\");\n for (const prop in schemaDeps) {\n if ((0, util_1.alwaysValidSchema)(it, schemaDeps[prop]))\n continue;\n gen.if((0, code_1.propertyInData)(gen, data, prop, it.opts.ownProperties), () => {\n const schCxt = cxt.subschema({ keyword, schemaProp: prop }, valid);\n cxt.mergeValidEvaluated(schCxt, valid);\n }, () => gen.var(valid, true) // TODO var\n );\n cxt.ok(valid);\n }\n}\nexports.validateSchemaDeps = validateSchemaDeps;\nexports.default = def;\n//# sourceMappingURL=dependencies.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst error = {\n message: ({ params }) => (0, codegen_1.str) `must match \"${params.ifClause}\" schema`,\n params: ({ params }) => (0, codegen_1._) `{failingKeyword: ${params.ifClause}}`,\n};\nconst def = {\n keyword: \"if\",\n schemaType: [\"object\", \"boolean\"],\n trackErrors: true,\n error,\n code(cxt) {\n const { gen, parentSchema, it } = cxt;\n if (parentSchema.then === undefined && parentSchema.else === undefined) {\n (0, util_1.checkStrictMode)(it, '\"if\" without \"then\" and \"else\" is ignored');\n }\n const hasThen = hasSchema(it, \"then\");\n const hasElse = hasSchema(it, \"else\");\n if (!hasThen && !hasElse)\n return;\n const valid = gen.let(\"valid\", true);\n const schValid = gen.name(\"_valid\");\n validateIf();\n cxt.reset();\n if (hasThen && hasElse) {\n const ifClause = gen.let(\"ifClause\");\n cxt.setParams({ ifClause });\n gen.if(schValid, validateClause(\"then\", ifClause), validateClause(\"else\", ifClause));\n }\n else if (hasThen) {\n gen.if(schValid, validateClause(\"then\"));\n }\n else {\n gen.if((0, codegen_1.not)(schValid), validateClause(\"else\"));\n }\n cxt.pass(valid, () => cxt.error(true));\n function validateIf() {\n const schCxt = cxt.subschema({\n keyword: \"if\",\n compositeRule: true,\n createErrors: false,\n allErrors: false,\n }, schValid);\n cxt.mergeEvaluated(schCxt);\n }\n function validateClause(keyword, ifClause) {\n return () => {\n const schCxt = cxt.subschema({ keyword }, schValid);\n gen.assign(valid, schValid);\n cxt.mergeValidEvaluated(schCxt, valid);\n if (ifClause)\n gen.assign(ifClause, (0, codegen_1._) `${keyword}`);\n else\n cxt.setParams({ ifClause: keyword });\n };\n }\n },\n};\nfunction hasSchema(it, keyword) {\n const schema = it.schema[keyword];\n return schema !== undefined && !(0, util_1.alwaysValidSchema)(it, schema);\n}\nexports.default = def;\n//# sourceMappingURL=if.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst additionalItems_1 = require(\"./additionalItems\");\nconst prefixItems_1 = require(\"./prefixItems\");\nconst items_1 = require(\"./items\");\nconst items2020_1 = require(\"./items2020\");\nconst contains_1 = require(\"./contains\");\nconst dependencies_1 = require(\"./dependencies\");\nconst propertyNames_1 = require(\"./propertyNames\");\nconst additionalProperties_1 = require(\"./additionalProperties\");\nconst properties_1 = require(\"./properties\");\nconst patternProperties_1 = require(\"./patternProperties\");\nconst not_1 = require(\"./not\");\nconst anyOf_1 = require(\"./anyOf\");\nconst oneOf_1 = require(\"./oneOf\");\nconst allOf_1 = require(\"./allOf\");\nconst if_1 = require(\"./if\");\nconst thenElse_1 = require(\"./thenElse\");\nfunction getApplicator(draft2020 = false) {\n const applicator = [\n // any\n not_1.default,\n anyOf_1.default,\n oneOf_1.default,\n allOf_1.default,\n if_1.default,\n thenElse_1.default,\n // object\n propertyNames_1.default,\n additionalProperties_1.default,\n dependencies_1.default,\n properties_1.default,\n patternProperties_1.default,\n ];\n // array\n if (draft2020)\n applicator.push(prefixItems_1.default, items2020_1.default);\n else\n applicator.push(additionalItems_1.default, items_1.default);\n applicator.push(contains_1.default);\n return applicator;\n}\nexports.default = getApplicator;\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.validateTuple = void 0;\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst code_1 = require(\"../code\");\nconst def = {\n keyword: \"items\",\n type: \"array\",\n schemaType: [\"object\", \"array\", \"boolean\"],\n before: \"uniqueItems\",\n code(cxt) {\n const { schema, it } = cxt;\n if (Array.isArray(schema))\n return validateTuple(cxt, \"additionalItems\", schema);\n it.items = true;\n if ((0, util_1.alwaysValidSchema)(it, schema))\n return;\n cxt.ok((0, code_1.validateArray)(cxt));\n },\n};\nfunction validateTuple(cxt, extraItems, schArr = cxt.schema) {\n const { gen, parentSchema, data, keyword, it } = cxt;\n checkStrictTuple(parentSchema);\n if (it.opts.unevaluated && schArr.length && it.items !== true) {\n it.items = util_1.mergeEvaluated.items(gen, schArr.length, it.items);\n }\n const valid = gen.name(\"valid\");\n const len = gen.const(\"len\", (0, codegen_1._) `${data}.length`);\n schArr.forEach((sch, i) => {\n if ((0, util_1.alwaysValidSchema)(it, sch))\n return;\n gen.if((0, codegen_1._) `${len} > ${i}`, () => cxt.subschema({\n keyword,\n schemaProp: i,\n dataProp: i,\n }, valid));\n cxt.ok(valid);\n });\n function checkStrictTuple(sch) {\n const { opts, errSchemaPath } = it;\n const l = schArr.length;\n const fullTuple = l === sch.minItems && (l === sch.maxItems || sch[extraItems] === false);\n if (opts.strictTuples && !fullTuple) {\n const msg = `\"${keyword}\" is ${l}-tuple, but minItems or maxItems/${extraItems} are not specified or different at path \"${errSchemaPath}\"`;\n (0, util_1.checkStrictMode)(it, msg, opts.strictTuples);\n }\n }\n}\nexports.validateTuple = validateTuple;\nexports.default = def;\n//# sourceMappingURL=items.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst code_1 = require(\"../code\");\nconst additionalItems_1 = require(\"./additionalItems\");\nconst error = {\n message: ({ params: { len } }) => (0, codegen_1.str) `must NOT have more than ${len} items`,\n params: ({ params: { len } }) => (0, codegen_1._) `{limit: ${len}}`,\n};\nconst def = {\n keyword: \"items\",\n type: \"array\",\n schemaType: [\"object\", \"boolean\"],\n before: \"uniqueItems\",\n error,\n code(cxt) {\n const { schema, parentSchema, it } = cxt;\n const { prefixItems } = parentSchema;\n it.items = true;\n if ((0, util_1.alwaysValidSchema)(it, schema))\n return;\n if (prefixItems)\n (0, additionalItems_1.validateAdditionalItems)(cxt, prefixItems);\n else\n cxt.ok((0, code_1.validateArray)(cxt));\n },\n};\nexports.default = def;\n//# sourceMappingURL=items2020.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst util_1 = require(\"../../compile/util\");\nconst def = {\n keyword: \"not\",\n schemaType: [\"object\", \"boolean\"],\n trackErrors: true,\n code(cxt) {\n const { gen, schema, it } = cxt;\n if ((0, util_1.alwaysValidSchema)(it, schema)) {\n cxt.fail();\n return;\n }\n const valid = gen.name(\"valid\");\n cxt.subschema({\n keyword: \"not\",\n compositeRule: true,\n createErrors: false,\n allErrors: false,\n }, valid);\n cxt.failResult(valid, () => cxt.reset(), () => cxt.error());\n },\n error: { message: \"must NOT be valid\" },\n};\nexports.default = def;\n//# sourceMappingURL=not.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst error = {\n message: \"must match exactly one schema in oneOf\",\n params: ({ params }) => (0, codegen_1._) `{passingSchemas: ${params.passing}}`,\n};\nconst def = {\n keyword: \"oneOf\",\n schemaType: \"array\",\n trackErrors: true,\n error,\n code(cxt) {\n const { gen, schema, parentSchema, it } = cxt;\n /* istanbul ignore if */\n if (!Array.isArray(schema))\n throw new Error(\"ajv implementation error\");\n if (it.opts.discriminator && parentSchema.discriminator)\n return;\n const schArr = schema;\n const valid = gen.let(\"valid\", false);\n const passing = gen.let(\"passing\", null);\n const schValid = gen.name(\"_valid\");\n cxt.setParams({ passing });\n // TODO possibly fail straight away (with warning or exception) if there are two empty always valid schemas\n gen.block(validateOneOf);\n cxt.result(valid, () => cxt.reset(), () => cxt.error(true));\n function validateOneOf() {\n schArr.forEach((sch, i) => {\n let schCxt;\n if ((0, util_1.alwaysValidSchema)(it, sch)) {\n gen.var(schValid, true);\n }\n else {\n schCxt = cxt.subschema({\n keyword: \"oneOf\",\n schemaProp: i,\n compositeRule: true,\n }, schValid);\n }\n if (i > 0) {\n gen\n .if((0, codegen_1._) `${schValid} && ${valid}`)\n .assign(valid, false)\n .assign(passing, (0, codegen_1._) `[${passing}, ${i}]`)\n .else();\n }\n gen.if(schValid, () => {\n gen.assign(valid, true);\n gen.assign(passing, i);\n if (schCxt)\n cxt.mergeEvaluated(schCxt, codegen_1.Name);\n });\n });\n }\n },\n};\nexports.default = def;\n//# sourceMappingURL=oneOf.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst code_1 = require(\"../code\");\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst util_2 = require(\"../../compile/util\");\nconst def = {\n keyword: \"patternProperties\",\n type: \"object\",\n schemaType: \"object\",\n code(cxt) {\n const { gen, schema, data, parentSchema, it } = cxt;\n const { opts } = it;\n const patterns = (0, code_1.allSchemaProperties)(schema);\n const alwaysValidPatterns = patterns.filter((p) => (0, util_1.alwaysValidSchema)(it, schema[p]));\n if (patterns.length === 0 ||\n (alwaysValidPatterns.length === patterns.length &&\n (!it.opts.unevaluated || it.props === true))) {\n return;\n }\n const checkProperties = opts.strictSchema && !opts.allowMatchingProperties && parentSchema.properties;\n const valid = gen.name(\"valid\");\n if (it.props !== true && !(it.props instanceof codegen_1.Name)) {\n it.props = (0, util_2.evaluatedPropsToName)(gen, it.props);\n }\n const { props } = it;\n validatePatternProperties();\n function validatePatternProperties() {\n for (const pat of patterns) {\n if (checkProperties)\n checkMatchingProperties(pat);\n if (it.allErrors) {\n validateProperties(pat);\n }\n else {\n gen.var(valid, true); // TODO var\n validateProperties(pat);\n gen.if(valid);\n }\n }\n }\n function checkMatchingProperties(pat) {\n for (const prop in checkProperties) {\n if (new RegExp(pat).test(prop)) {\n (0, util_1.checkStrictMode)(it, `property ${prop} matches pattern ${pat} (use allowMatchingProperties)`);\n }\n }\n }\n function validateProperties(pat) {\n gen.forIn(\"key\", data, (key) => {\n gen.if((0, codegen_1._) `${(0, code_1.usePattern)(cxt, pat)}.test(${key})`, () => {\n const alwaysValid = alwaysValidPatterns.includes(pat);\n if (!alwaysValid) {\n cxt.subschema({\n keyword: \"patternProperties\",\n schemaProp: pat,\n dataProp: key,\n dataPropType: util_2.Type.Str,\n }, valid);\n }\n if (it.opts.unevaluated && props !== true) {\n gen.assign((0, codegen_1._) `${props}[${key}]`, true);\n }\n else if (!alwaysValid && !it.allErrors) {\n // can short-circuit if `unevaluatedProperties` is not supported (opts.next === false)\n // or if all properties were evaluated (props === true)\n gen.if((0, codegen_1.not)(valid), () => gen.break());\n }\n });\n });\n }\n },\n};\nexports.default = def;\n//# sourceMappingURL=patternProperties.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst items_1 = require(\"./items\");\nconst def = {\n keyword: \"prefixItems\",\n type: \"array\",\n schemaType: [\"array\"],\n before: \"uniqueItems\",\n code: (cxt) => (0, items_1.validateTuple)(cxt, \"items\"),\n};\nexports.default = def;\n//# sourceMappingURL=prefixItems.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst validate_1 = require(\"../../compile/validate\");\nconst code_1 = require(\"../code\");\nconst util_1 = require(\"../../compile/util\");\nconst additionalProperties_1 = require(\"./additionalProperties\");\nconst def = {\n keyword: \"properties\",\n type: \"object\",\n schemaType: \"object\",\n code(cxt) {\n const { gen, schema, parentSchema, data, it } = cxt;\n if (it.opts.removeAdditional === \"all\" && parentSchema.additionalProperties === undefined) {\n additionalProperties_1.default.code(new validate_1.KeywordCxt(it, additionalProperties_1.default, \"additionalProperties\"));\n }\n const allProps = (0, code_1.allSchemaProperties)(schema);\n for (const prop of allProps) {\n it.definedProperties.add(prop);\n }\n if (it.opts.unevaluated && allProps.length && it.props !== true) {\n it.props = util_1.mergeEvaluated.props(gen, (0, util_1.toHash)(allProps), it.props);\n }\n const properties = allProps.filter((p) => !(0, util_1.alwaysValidSchema)(it, schema[p]));\n if (properties.length === 0)\n return;\n const valid = gen.name(\"valid\");\n for (const prop of properties) {\n if (hasDefault(prop)) {\n applyPropertySchema(prop);\n }\n else {\n gen.if((0, code_1.propertyInData)(gen, data, prop, it.opts.ownProperties));\n applyPropertySchema(prop);\n if (!it.allErrors)\n gen.else().var(valid, true);\n gen.endIf();\n }\n cxt.it.definedProperties.add(prop);\n cxt.ok(valid);\n }\n function hasDefault(prop) {\n return it.opts.useDefaults && !it.compositeRule && schema[prop].default !== undefined;\n }\n function applyPropertySchema(prop) {\n cxt.subschema({\n keyword: \"properties\",\n schemaProp: prop,\n dataProp: prop,\n }, valid);\n }\n },\n};\nexports.default = def;\n//# sourceMappingURL=properties.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst error = {\n message: \"property name must be valid\",\n params: ({ params }) => (0, codegen_1._) `{propertyName: ${params.propertyName}}`,\n};\nconst def = {\n keyword: \"propertyNames\",\n type: \"object\",\n schemaType: [\"object\", \"boolean\"],\n error,\n code(cxt) {\n const { gen, schema, data, it } = cxt;\n if ((0, util_1.alwaysValidSchema)(it, schema))\n return;\n const valid = gen.name(\"valid\");\n gen.forIn(\"key\", data, (key) => {\n cxt.setParams({ propertyName: key });\n cxt.subschema({\n keyword: \"propertyNames\",\n data: key,\n dataTypes: [\"string\"],\n propertyName: key,\n compositeRule: true,\n }, valid);\n gen.if((0, codegen_1.not)(valid), () => {\n cxt.error(true);\n if (!it.allErrors)\n gen.break();\n });\n });\n cxt.ok(valid);\n },\n};\nexports.default = def;\n//# sourceMappingURL=propertyNames.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst util_1 = require(\"../../compile/util\");\nconst def = {\n keyword: [\"then\", \"else\"],\n schemaType: [\"object\", \"boolean\"],\n code({ keyword, parentSchema, it }) {\n if (parentSchema.if === undefined)\n (0, util_1.checkStrictMode)(it, `\"${keyword}\" without \"if\" is ignored`);\n },\n};\nexports.default = def;\n//# sourceMappingURL=thenElse.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.validateUnion = exports.validateArray = exports.usePattern = exports.callValidateCode = exports.schemaProperties = exports.allSchemaProperties = exports.noPropertyInData = exports.propertyInData = exports.isOwnProperty = exports.hasPropFunc = exports.reportMissingProp = exports.checkMissingProp = exports.checkReportMissingProp = void 0;\nconst codegen_1 = require(\"../compile/codegen\");\nconst util_1 = require(\"../compile/util\");\nconst names_1 = require(\"../compile/names\");\nconst util_2 = require(\"../compile/util\");\nfunction checkReportMissingProp(cxt, prop) {\n const { gen, data, it } = cxt;\n gen.if(noPropertyInData(gen, data, prop, it.opts.ownProperties), () => {\n cxt.setParams({ missingProperty: (0, codegen_1._) `${prop}` }, true);\n cxt.error();\n });\n}\nexports.checkReportMissingProp = checkReportMissingProp;\nfunction checkMissingProp({ gen, data, it: { opts } }, properties, missing) {\n return (0, codegen_1.or)(...properties.map((prop) => (0, codegen_1.and)(noPropertyInData(gen, data, prop, opts.ownProperties), (0, codegen_1._) `${missing} = ${prop}`)));\n}\nexports.checkMissingProp = checkMissingProp;\nfunction reportMissingProp(cxt, missing) {\n cxt.setParams({ missingProperty: missing }, true);\n cxt.error();\n}\nexports.reportMissingProp = reportMissingProp;\nfunction hasPropFunc(gen) {\n return gen.scopeValue(\"func\", {\n // eslint-disable-next-line @typescript-eslint/unbound-method\n ref: Object.prototype.hasOwnProperty,\n code: (0, codegen_1._) `Object.prototype.hasOwnProperty`,\n });\n}\nexports.hasPropFunc = hasPropFunc;\nfunction isOwnProperty(gen, data, property) {\n return (0, codegen_1._) `${hasPropFunc(gen)}.call(${data}, ${property})`;\n}\nexports.isOwnProperty = isOwnProperty;\nfunction propertyInData(gen, data, property, ownProperties) {\n const cond = (0, codegen_1._) `${data}${(0, codegen_1.getProperty)(property)} !== undefined`;\n return ownProperties ? (0, codegen_1._) `${cond} && ${isOwnProperty(gen, data, property)}` : cond;\n}\nexports.propertyInData = propertyInData;\nfunction noPropertyInData(gen, data, property, ownProperties) {\n const cond = (0, codegen_1._) `${data}${(0, codegen_1.getProperty)(property)} === undefined`;\n return ownProperties ? (0, codegen_1.or)(cond, (0, codegen_1.not)(isOwnProperty(gen, data, property))) : cond;\n}\nexports.noPropertyInData = noPropertyInData;\nfunction allSchemaProperties(schemaMap) {\n return schemaMap ? Object.keys(schemaMap).filter((p) => p !== \"__proto__\") : [];\n}\nexports.allSchemaProperties = allSchemaProperties;\nfunction schemaProperties(it, schemaMap) {\n return allSchemaProperties(schemaMap).filter((p) => !(0, util_1.alwaysValidSchema)(it, schemaMap[p]));\n}\nexports.schemaProperties = schemaProperties;\nfunction callValidateCode({ schemaCode, data, it: { gen, topSchemaRef, schemaPath, errorPath }, it }, func, context, passSchema) {\n const dataAndSchema = passSchema ? (0, codegen_1._) `${schemaCode}, ${data}, ${topSchemaRef}${schemaPath}` : data;\n const valCxt = [\n [names_1.default.instancePath, (0, codegen_1.strConcat)(names_1.default.instancePath, errorPath)],\n [names_1.default.parentData, it.parentData],\n [names_1.default.parentDataProperty, it.parentDataProperty],\n [names_1.default.rootData, names_1.default.rootData],\n ];\n if (it.opts.dynamicRef)\n valCxt.push([names_1.default.dynamicAnchors, names_1.default.dynamicAnchors]);\n const args = (0, codegen_1._) `${dataAndSchema}, ${gen.object(...valCxt)}`;\n return context !== codegen_1.nil ? (0, codegen_1._) `${func}.call(${context}, ${args})` : (0, codegen_1._) `${func}(${args})`;\n}\nexports.callValidateCode = callValidateCode;\nconst newRegExp = (0, codegen_1._) `new RegExp`;\nfunction usePattern({ gen, it: { opts } }, pattern) {\n const u = opts.unicodeRegExp ? \"u\" : \"\";\n const { regExp } = opts.code;\n const rx = regExp(pattern, u);\n return gen.scopeValue(\"pattern\", {\n key: rx.toString(),\n ref: rx,\n code: (0, codegen_1._) `${regExp.code === \"new RegExp\" ? newRegExp : (0, util_2.useFunc)(gen, regExp)}(${pattern}, ${u})`,\n });\n}\nexports.usePattern = usePattern;\nfunction validateArray(cxt) {\n const { gen, data, keyword, it } = cxt;\n const valid = gen.name(\"valid\");\n if (it.allErrors) {\n const validArr = gen.let(\"valid\", true);\n validateItems(() => gen.assign(validArr, false));\n return validArr;\n }\n gen.var(valid, true);\n validateItems(() => gen.break());\n return valid;\n function validateItems(notValid) {\n const len = gen.const(\"len\", (0, codegen_1._) `${data}.length`);\n gen.forRange(\"i\", 0, len, (i) => {\n cxt.subschema({\n keyword,\n dataProp: i,\n dataPropType: util_1.Type.Num,\n }, valid);\n gen.if((0, codegen_1.not)(valid), notValid);\n });\n }\n}\nexports.validateArray = validateArray;\nfunction validateUnion(cxt) {\n const { gen, schema, keyword, it } = cxt;\n /* istanbul ignore if */\n if (!Array.isArray(schema))\n throw new Error(\"ajv implementation error\");\n const alwaysValid = schema.some((sch) => (0, util_1.alwaysValidSchema)(it, sch));\n if (alwaysValid && !it.opts.unevaluated)\n return;\n const valid = gen.let(\"valid\", false);\n const schValid = gen.name(\"_valid\");\n gen.block(() => schema.forEach((_sch, i) => {\n const schCxt = cxt.subschema({\n keyword,\n schemaProp: i,\n compositeRule: true,\n }, schValid);\n gen.assign(valid, (0, codegen_1._) `${valid} || ${schValid}`);\n const merged = cxt.mergeValidEvaluated(schCxt, schValid);\n // can short-circuit if `unevaluatedProperties/Items` not supported (opts.unevaluated !== true)\n // or if all properties and items were evaluated (it.props === true && it.items === true)\n if (!merged)\n gen.if((0, codegen_1.not)(valid));\n }));\n cxt.result(valid, () => cxt.reset(), () => cxt.error(true));\n}\nexports.validateUnion = validateUnion;\n//# sourceMappingURL=code.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst def = {\n keyword: \"id\",\n code() {\n throw new Error('NOT SUPPORTED: keyword \"id\", use \"$id\" for schema ID');\n },\n};\nexports.default = def;\n//# sourceMappingURL=id.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst id_1 = require(\"./id\");\nconst ref_1 = require(\"./ref\");\nconst core = [\n \"$schema\",\n \"$id\",\n \"$defs\",\n \"$vocabulary\",\n { keyword: \"$comment\" },\n \"definitions\",\n id_1.default,\n ref_1.default,\n];\nexports.default = core;\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.callRef = exports.getValidate = void 0;\nconst ref_error_1 = require(\"../../compile/ref_error\");\nconst code_1 = require(\"../code\");\nconst codegen_1 = require(\"../../compile/codegen\");\nconst names_1 = require(\"../../compile/names\");\nconst compile_1 = require(\"../../compile\");\nconst util_1 = require(\"../../compile/util\");\nconst def = {\n keyword: \"$ref\",\n schemaType: \"string\",\n code(cxt) {\n const { gen, schema: $ref, it } = cxt;\n const { baseId, schemaEnv: env, validateName, opts, self } = it;\n const { root } = env;\n if (($ref === \"#\" || $ref === \"#/\") && baseId === root.baseId)\n return callRootRef();\n const schOrEnv = compile_1.resolveRef.call(self, root, baseId, $ref);\n if (schOrEnv === undefined)\n throw new ref_error_1.default(it.opts.uriResolver, baseId, $ref);\n if (schOrEnv instanceof compile_1.SchemaEnv)\n return callValidate(schOrEnv);\n return inlineRefSchema(schOrEnv);\n function callRootRef() {\n if (env === root)\n return callRef(cxt, validateName, env, env.$async);\n const rootName = gen.scopeValue(\"root\", { ref: root });\n return callRef(cxt, (0, codegen_1._) `${rootName}.validate`, root, root.$async);\n }\n function callValidate(sch) {\n const v = getValidate(cxt, sch);\n callRef(cxt, v, sch, sch.$async);\n }\n function inlineRefSchema(sch) {\n const schName = gen.scopeValue(\"schema\", opts.code.source === true ? { ref: sch, code: (0, codegen_1.stringify)(sch) } : { ref: sch });\n const valid = gen.name(\"valid\");\n const schCxt = cxt.subschema({\n schema: sch,\n dataTypes: [],\n schemaPath: codegen_1.nil,\n topSchemaRef: schName,\n errSchemaPath: $ref,\n }, valid);\n cxt.mergeEvaluated(schCxt);\n cxt.ok(valid);\n }\n },\n};\nfunction getValidate(cxt, sch) {\n const { gen } = cxt;\n return sch.validate\n ? gen.scopeValue(\"validate\", { ref: sch.validate })\n : (0, codegen_1._) `${gen.scopeValue(\"wrapper\", { ref: sch })}.validate`;\n}\nexports.getValidate = getValidate;\nfunction callRef(cxt, v, sch, $async) {\n const { gen, it } = cxt;\n const { allErrors, schemaEnv: env, opts } = it;\n const passCxt = opts.passContext ? names_1.default.this : codegen_1.nil;\n if ($async)\n callAsyncRef();\n else\n callSyncRef();\n function callAsyncRef() {\n if (!env.$async)\n throw new Error(\"async schema referenced by sync schema\");\n const valid = gen.let(\"valid\");\n gen.try(() => {\n gen.code((0, codegen_1._) `await ${(0, code_1.callValidateCode)(cxt, v, passCxt)}`);\n addEvaluatedFrom(v); // TODO will not work with async, it has to be returned with the result\n if (!allErrors)\n gen.assign(valid, true);\n }, (e) => {\n gen.if((0, codegen_1._) `!(${e} instanceof ${it.ValidationError})`, () => gen.throw(e));\n addErrorsFrom(e);\n if (!allErrors)\n gen.assign(valid, false);\n });\n cxt.ok(valid);\n }\n function callSyncRef() {\n cxt.result((0, code_1.callValidateCode)(cxt, v, passCxt), () => addEvaluatedFrom(v), () => addErrorsFrom(v));\n }\n function addErrorsFrom(source) {\n const errs = (0, codegen_1._) `${source}.errors`;\n gen.assign(names_1.default.vErrors, (0, codegen_1._) `${names_1.default.vErrors} === null ? ${errs} : ${names_1.default.vErrors}.concat(${errs})`); // TODO tagged\n gen.assign(names_1.default.errors, (0, codegen_1._) `${names_1.default.vErrors}.length`);\n }\n function addEvaluatedFrom(source) {\n var _a;\n if (!it.opts.unevaluated)\n return;\n const schEvaluated = (_a = sch === null || sch === void 0 ? void 0 : sch.validate) === null || _a === void 0 ? void 0 : _a.evaluated;\n // TODO refactor\n if (it.props !== true) {\n if (schEvaluated && !schEvaluated.dynamicProps) {\n if (schEvaluated.props !== undefined) {\n it.props = util_1.mergeEvaluated.props(gen, schEvaluated.props, it.props);\n }\n }\n else {\n const props = gen.var(\"props\", (0, codegen_1._) `${source}.evaluated.props`);\n it.props = util_1.mergeEvaluated.props(gen, props, it.props, codegen_1.Name);\n }\n }\n if (it.items !== true) {\n if (schEvaluated && !schEvaluated.dynamicItems) {\n if (schEvaluated.items !== undefined) {\n it.items = util_1.mergeEvaluated.items(gen, schEvaluated.items, it.items);\n }\n }\n else {\n const items = gen.var(\"items\", (0, codegen_1._) `${source}.evaluated.items`);\n it.items = util_1.mergeEvaluated.items(gen, items, it.items, codegen_1.Name);\n }\n }\n }\n}\nexports.callRef = callRef;\nexports.default = def;\n//# sourceMappingURL=ref.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst types_1 = require(\"../discriminator/types\");\nconst compile_1 = require(\"../../compile\");\nconst util_1 = require(\"../../compile/util\");\nconst error = {\n message: ({ params: { discrError, tagName } }) => discrError === types_1.DiscrError.Tag\n ? `tag \"${tagName}\" must be string`\n : `value of tag \"${tagName}\" must be in oneOf`,\n params: ({ params: { discrError, tag, tagName } }) => (0, codegen_1._) `{error: ${discrError}, tag: ${tagName}, tagValue: ${tag}}`,\n};\nconst def = {\n keyword: \"discriminator\",\n type: \"object\",\n schemaType: \"object\",\n error,\n code(cxt) {\n const { gen, data, schema, parentSchema, it } = cxt;\n const { oneOf } = parentSchema;\n if (!it.opts.discriminator) {\n throw new Error(\"discriminator: requires discriminator option\");\n }\n const tagName = schema.propertyName;\n if (typeof tagName != \"string\")\n throw new Error(\"discriminator: requires propertyName\");\n if (schema.mapping)\n throw new Error(\"discriminator: mapping is not supported\");\n if (!oneOf)\n throw new Error(\"discriminator: requires oneOf keyword\");\n const valid = gen.let(\"valid\", false);\n const tag = gen.const(\"tag\", (0, codegen_1._) `${data}${(0, codegen_1.getProperty)(tagName)}`);\n gen.if((0, codegen_1._) `typeof ${tag} == \"string\"`, () => validateMapping(), () => cxt.error(false, { discrError: types_1.DiscrError.Tag, tag, tagName }));\n cxt.ok(valid);\n function validateMapping() {\n const mapping = getMapping();\n gen.if(false);\n for (const tagValue in mapping) {\n gen.elseIf((0, codegen_1._) `${tag} === ${tagValue}`);\n gen.assign(valid, applyTagSchema(mapping[tagValue]));\n }\n gen.else();\n cxt.error(false, { discrError: types_1.DiscrError.Mapping, tag, tagName });\n gen.endIf();\n }\n function applyTagSchema(schemaProp) {\n const _valid = gen.name(\"valid\");\n const schCxt = cxt.subschema({ keyword: \"oneOf\", schemaProp }, _valid);\n cxt.mergeEvaluated(schCxt, codegen_1.Name);\n return _valid;\n }\n function getMapping() {\n var _a;\n const oneOfMapping = {};\n const topRequired = hasRequired(parentSchema);\n let tagRequired = true;\n for (let i = 0; i < oneOf.length; i++) {\n let sch = oneOf[i];\n if ((sch === null || sch === void 0 ? void 0 : sch.$ref) && !(0, util_1.schemaHasRulesButRef)(sch, it.self.RULES)) {\n sch = compile_1.resolveRef.call(it.self, it.schemaEnv.root, it.baseId, sch === null || sch === void 0 ? void 0 : sch.$ref);\n if (sch instanceof compile_1.SchemaEnv)\n sch = sch.schema;\n }\n const propSch = (_a = sch === null || sch === void 0 ? void 0 : sch.properties) === null || _a === void 0 ? void 0 : _a[tagName];\n if (typeof propSch != \"object\") {\n throw new Error(`discriminator: oneOf subschemas (or referenced schemas) must have \"properties/${tagName}\"`);\n }\n tagRequired = tagRequired && (topRequired || hasRequired(sch));\n addMappings(propSch, i);\n }\n if (!tagRequired)\n throw new Error(`discriminator: \"${tagName}\" must be required`);\n return oneOfMapping;\n function hasRequired({ required }) {\n return Array.isArray(required) && required.includes(tagName);\n }\n function addMappings(sch, i) {\n if (sch.const) {\n addMapping(sch.const, i);\n }\n else if (sch.enum) {\n for (const tagValue of sch.enum) {\n addMapping(tagValue, i);\n }\n }\n else {\n throw new Error(`discriminator: \"properties/${tagName}\" must have \"const\" or \"enum\"`);\n }\n }\n function addMapping(tagValue, i) {\n if (typeof tagValue != \"string\" || tagValue in oneOfMapping) {\n throw new Error(`discriminator: \"${tagName}\" values must be unique strings`);\n }\n oneOfMapping[tagValue] = i;\n }\n }\n },\n};\nexports.default = def;\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.DiscrError = void 0;\nvar DiscrError;\n(function (DiscrError) {\n DiscrError[\"Tag\"] = \"tag\";\n DiscrError[\"Mapping\"] = \"mapping\";\n})(DiscrError = exports.DiscrError || (exports.DiscrError = {}));\n//# sourceMappingURL=types.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst core_1 = require(\"./core\");\nconst validation_1 = require(\"./validation\");\nconst applicator_1 = require(\"./applicator\");\nconst format_1 = require(\"./format\");\nconst metadata_1 = require(\"./metadata\");\nconst draft7Vocabularies = [\n core_1.default,\n validation_1.default,\n (0, applicator_1.default)(),\n format_1.default,\n metadata_1.metadataVocabulary,\n metadata_1.contentVocabulary,\n];\nexports.default = draft7Vocabularies;\n//# sourceMappingURL=draft7.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst error = {\n message: ({ schemaCode }) => (0, codegen_1.str) `must match format \"${schemaCode}\"`,\n params: ({ schemaCode }) => (0, codegen_1._) `{format: ${schemaCode}}`,\n};\nconst def = {\n keyword: \"format\",\n type: [\"number\", \"string\"],\n schemaType: \"string\",\n $data: true,\n error,\n code(cxt, ruleType) {\n const { gen, data, $data, schema, schemaCode, it } = cxt;\n const { opts, errSchemaPath, schemaEnv, self } = it;\n if (!opts.validateFormats)\n return;\n if ($data)\n validate$DataFormat();\n else\n validateFormat();\n function validate$DataFormat() {\n const fmts = gen.scopeValue(\"formats\", {\n ref: self.formats,\n code: opts.code.formats,\n });\n const fDef = gen.const(\"fDef\", (0, codegen_1._) `${fmts}[${schemaCode}]`);\n const fType = gen.let(\"fType\");\n const format = gen.let(\"format\");\n // TODO simplify\n gen.if((0, codegen_1._) `typeof ${fDef} == \"object\" && !(${fDef} instanceof RegExp)`, () => gen.assign(fType, (0, codegen_1._) `${fDef}.type || \"string\"`).assign(format, (0, codegen_1._) `${fDef}.validate`), () => gen.assign(fType, (0, codegen_1._) `\"string\"`).assign(format, fDef));\n cxt.fail$data((0, codegen_1.or)(unknownFmt(), invalidFmt()));\n function unknownFmt() {\n if (opts.strictSchema === false)\n return codegen_1.nil;\n return (0, codegen_1._) `${schemaCode} && !${format}`;\n }\n function invalidFmt() {\n const callFormat = schemaEnv.$async\n ? (0, codegen_1._) `(${fDef}.async ? await ${format}(${data}) : ${format}(${data}))`\n : (0, codegen_1._) `${format}(${data})`;\n const validData = (0, codegen_1._) `(typeof ${format} == \"function\" ? ${callFormat} : ${format}.test(${data}))`;\n return (0, codegen_1._) `${format} && ${format} !== true && ${fType} === ${ruleType} && !${validData}`;\n }\n }\n function validateFormat() {\n const formatDef = self.formats[schema];\n if (!formatDef) {\n unknownFormat();\n return;\n }\n if (formatDef === true)\n return;\n const [fmtType, format, fmtRef] = getFormat(formatDef);\n if (fmtType === ruleType)\n cxt.pass(validCondition());\n function unknownFormat() {\n if (opts.strictSchema === false) {\n self.logger.warn(unknownMsg());\n return;\n }\n throw new Error(unknownMsg());\n function unknownMsg() {\n return `unknown format \"${schema}\" ignored in schema at path \"${errSchemaPath}\"`;\n }\n }\n function getFormat(fmtDef) {\n const code = fmtDef instanceof RegExp\n ? (0, codegen_1.regexpCode)(fmtDef)\n : opts.code.formats\n ? (0, codegen_1._) `${opts.code.formats}${(0, codegen_1.getProperty)(schema)}`\n : undefined;\n const fmt = gen.scopeValue(\"formats\", { key: schema, ref: fmtDef, code });\n if (typeof fmtDef == \"object\" && !(fmtDef instanceof RegExp)) {\n return [fmtDef.type || \"string\", fmtDef.validate, (0, codegen_1._) `${fmt}.validate`];\n }\n return [\"string\", fmtDef, fmt];\n }\n function validCondition() {\n if (typeof formatDef == \"object\" && !(formatDef instanceof RegExp) && formatDef.async) {\n if (!schemaEnv.$async)\n throw new Error(\"async format in sync schema\");\n return (0, codegen_1._) `await ${fmtRef}(${data})`;\n }\n return typeof format == \"function\" ? (0, codegen_1._) `${fmtRef}(${data})` : (0, codegen_1._) `${fmtRef}.test(${data})`;\n }\n }\n },\n};\nexports.default = def;\n//# sourceMappingURL=format.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst format_1 = require(\"./format\");\nconst format = [format_1.default];\nexports.default = format;\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.contentVocabulary = exports.metadataVocabulary = void 0;\nexports.metadataVocabulary = [\n \"title\",\n \"description\",\n \"default\",\n \"deprecated\",\n \"readOnly\",\n \"writeOnly\",\n \"examples\",\n];\nexports.contentVocabulary = [\n \"contentMediaType\",\n \"contentEncoding\",\n \"contentSchema\",\n];\n//# sourceMappingURL=metadata.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst equal_1 = require(\"../../runtime/equal\");\nconst error = {\n message: \"must be equal to constant\",\n params: ({ schemaCode }) => (0, codegen_1._) `{allowedValue: ${schemaCode}}`,\n};\nconst def = {\n keyword: \"const\",\n $data: true,\n error,\n code(cxt) {\n const { gen, data, $data, schemaCode, schema } = cxt;\n if ($data || (schema && typeof schema == \"object\")) {\n cxt.fail$data((0, codegen_1._) `!${(0, util_1.useFunc)(gen, equal_1.default)}(${data}, ${schemaCode})`);\n }\n else {\n cxt.fail((0, codegen_1._) `${schema} !== ${data}`);\n }\n },\n};\nexports.default = def;\n//# sourceMappingURL=const.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst equal_1 = require(\"../../runtime/equal\");\nconst error = {\n message: \"must be equal to one of the allowed values\",\n params: ({ schemaCode }) => (0, codegen_1._) `{allowedValues: ${schemaCode}}`,\n};\nconst def = {\n keyword: \"enum\",\n schemaType: \"array\",\n $data: true,\n error,\n code(cxt) {\n const { gen, data, $data, schema, schemaCode, it } = cxt;\n if (!$data && schema.length === 0)\n throw new Error(\"enum must have non-empty array\");\n const useLoop = schema.length >= it.opts.loopEnum;\n let eql;\n const getEql = () => (eql !== null && eql !== void 0 ? eql : (eql = (0, util_1.useFunc)(gen, equal_1.default)));\n let valid;\n if (useLoop || $data) {\n valid = gen.let(\"valid\");\n cxt.block$data(valid, loopEnum);\n }\n else {\n /* istanbul ignore if */\n if (!Array.isArray(schema))\n throw new Error(\"ajv implementation error\");\n const vSchema = gen.const(\"vSchema\", schemaCode);\n valid = (0, codegen_1.or)(...schema.map((_x, i) => equalCode(vSchema, i)));\n }\n cxt.pass(valid);\n function loopEnum() {\n gen.assign(valid, false);\n gen.forOf(\"v\", schemaCode, (v) => gen.if((0, codegen_1._) `${getEql()}(${data}, ${v})`, () => gen.assign(valid, true).break()));\n }\n function equalCode(vSchema, i) {\n const sch = schema[i];\n return typeof sch === \"object\" && sch !== null\n ? (0, codegen_1._) `${getEql()}(${data}, ${vSchema}[${i}])`\n : (0, codegen_1._) `${data} === ${sch}`;\n }\n },\n};\nexports.default = def;\n//# sourceMappingURL=enum.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst limitNumber_1 = require(\"./limitNumber\");\nconst multipleOf_1 = require(\"./multipleOf\");\nconst limitLength_1 = require(\"./limitLength\");\nconst pattern_1 = require(\"./pattern\");\nconst limitProperties_1 = require(\"./limitProperties\");\nconst required_1 = require(\"./required\");\nconst limitItems_1 = require(\"./limitItems\");\nconst uniqueItems_1 = require(\"./uniqueItems\");\nconst const_1 = require(\"./const\");\nconst enum_1 = require(\"./enum\");\nconst validation = [\n // number\n limitNumber_1.default,\n multipleOf_1.default,\n // string\n limitLength_1.default,\n pattern_1.default,\n // object\n limitProperties_1.default,\n required_1.default,\n // array\n limitItems_1.default,\n uniqueItems_1.default,\n // any\n { keyword: \"type\", schemaType: [\"string\", \"array\"] },\n { keyword: \"nullable\", schemaType: \"boolean\" },\n const_1.default,\n enum_1.default,\n];\nexports.default = validation;\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst error = {\n message({ keyword, schemaCode }) {\n const comp = keyword === \"maxItems\" ? \"more\" : \"fewer\";\n return (0, codegen_1.str) `must NOT have ${comp} than ${schemaCode} items`;\n },\n params: ({ schemaCode }) => (0, codegen_1._) `{limit: ${schemaCode}}`,\n};\nconst def = {\n keyword: [\"maxItems\", \"minItems\"],\n type: \"array\",\n schemaType: \"number\",\n $data: true,\n error,\n code(cxt) {\n const { keyword, data, schemaCode } = cxt;\n const op = keyword === \"maxItems\" ? codegen_1.operators.GT : codegen_1.operators.LT;\n cxt.fail$data((0, codegen_1._) `${data}.length ${op} ${schemaCode}`);\n },\n};\nexports.default = def;\n//# sourceMappingURL=limitItems.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst ucs2length_1 = require(\"../../runtime/ucs2length\");\nconst error = {\n message({ keyword, schemaCode }) {\n const comp = keyword === \"maxLength\" ? \"more\" : \"fewer\";\n return (0, codegen_1.str) `must NOT have ${comp} than ${schemaCode} characters`;\n },\n params: ({ schemaCode }) => (0, codegen_1._) `{limit: ${schemaCode}}`,\n};\nconst def = {\n keyword: [\"maxLength\", \"minLength\"],\n type: \"string\",\n schemaType: \"number\",\n $data: true,\n error,\n code(cxt) {\n const { keyword, data, schemaCode, it } = cxt;\n const op = keyword === \"maxLength\" ? codegen_1.operators.GT : codegen_1.operators.LT;\n const len = it.opts.unicode === false ? (0, codegen_1._) `${data}.length` : (0, codegen_1._) `${(0, util_1.useFunc)(cxt.gen, ucs2length_1.default)}(${data})`;\n cxt.fail$data((0, codegen_1._) `${len} ${op} ${schemaCode}`);\n },\n};\nexports.default = def;\n//# sourceMappingURL=limitLength.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst ops = codegen_1.operators;\nconst KWDs = {\n maximum: { okStr: \"<=\", ok: ops.LTE, fail: ops.GT },\n minimum: { okStr: \">=\", ok: ops.GTE, fail: ops.LT },\n exclusiveMaximum: { okStr: \"<\", ok: ops.LT, fail: ops.GTE },\n exclusiveMinimum: { okStr: \">\", ok: ops.GT, fail: ops.LTE },\n};\nconst error = {\n message: ({ keyword, schemaCode }) => (0, codegen_1.str) `must be ${KWDs[keyword].okStr} ${schemaCode}`,\n params: ({ keyword, schemaCode }) => (0, codegen_1._) `{comparison: ${KWDs[keyword].okStr}, limit: ${schemaCode}}`,\n};\nconst def = {\n keyword: Object.keys(KWDs),\n type: \"number\",\n schemaType: \"number\",\n $data: true,\n error,\n code(cxt) {\n const { keyword, data, schemaCode } = cxt;\n cxt.fail$data((0, codegen_1._) `${data} ${KWDs[keyword].fail} ${schemaCode} || isNaN(${data})`);\n },\n};\nexports.default = def;\n//# sourceMappingURL=limitNumber.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst error = {\n message({ keyword, schemaCode }) {\n const comp = keyword === \"maxProperties\" ? \"more\" : \"fewer\";\n return (0, codegen_1.str) `must NOT have ${comp} than ${schemaCode} properties`;\n },\n params: ({ schemaCode }) => (0, codegen_1._) `{limit: ${schemaCode}}`,\n};\nconst def = {\n keyword: [\"maxProperties\", \"minProperties\"],\n type: \"object\",\n schemaType: \"number\",\n $data: true,\n error,\n code(cxt) {\n const { keyword, data, schemaCode } = cxt;\n const op = keyword === \"maxProperties\" ? codegen_1.operators.GT : codegen_1.operators.LT;\n cxt.fail$data((0, codegen_1._) `Object.keys(${data}).length ${op} ${schemaCode}`);\n },\n};\nexports.default = def;\n//# sourceMappingURL=limitProperties.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst error = {\n message: ({ schemaCode }) => (0, codegen_1.str) `must be multiple of ${schemaCode}`,\n params: ({ schemaCode }) => (0, codegen_1._) `{multipleOf: ${schemaCode}}`,\n};\nconst def = {\n keyword: \"multipleOf\",\n type: \"number\",\n schemaType: \"number\",\n $data: true,\n error,\n code(cxt) {\n const { gen, data, schemaCode, it } = cxt;\n // const bdt = bad$DataType(schemaCode, def.schemaType, $data)\n const prec = it.opts.multipleOfPrecision;\n const res = gen.let(\"res\");\n const invalid = prec\n ? (0, codegen_1._) `Math.abs(Math.round(${res}) - ${res}) > 1e-${prec}`\n : (0, codegen_1._) `${res} !== parseInt(${res})`;\n cxt.fail$data((0, codegen_1._) `(${schemaCode} === 0 || (${res} = ${data}/${schemaCode}, ${invalid}))`);\n },\n};\nexports.default = def;\n//# sourceMappingURL=multipleOf.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst code_1 = require(\"../code\");\nconst codegen_1 = require(\"../../compile/codegen\");\nconst error = {\n message: ({ schemaCode }) => (0, codegen_1.str) `must match pattern \"${schemaCode}\"`,\n params: ({ schemaCode }) => (0, codegen_1._) `{pattern: ${schemaCode}}`,\n};\nconst def = {\n keyword: \"pattern\",\n type: \"string\",\n schemaType: \"string\",\n $data: true,\n error,\n code(cxt) {\n const { data, $data, schema, schemaCode, it } = cxt;\n // TODO regexp should be wrapped in try/catchs\n const u = it.opts.unicodeRegExp ? \"u\" : \"\";\n const regExp = $data ? (0, codegen_1._) `(new RegExp(${schemaCode}, ${u}))` : (0, code_1.usePattern)(cxt, schema);\n cxt.fail$data((0, codegen_1._) `!${regExp}.test(${data})`);\n },\n};\nexports.default = def;\n//# sourceMappingURL=pattern.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst code_1 = require(\"../code\");\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst error = {\n message: ({ params: { missingProperty } }) => (0, codegen_1.str) `must have required property '${missingProperty}'`,\n params: ({ params: { missingProperty } }) => (0, codegen_1._) `{missingProperty: ${missingProperty}}`,\n};\nconst def = {\n keyword: \"required\",\n type: \"object\",\n schemaType: \"array\",\n $data: true,\n error,\n code(cxt) {\n const { gen, schema, schemaCode, data, $data, it } = cxt;\n const { opts } = it;\n if (!$data && schema.length === 0)\n return;\n const useLoop = schema.length >= opts.loopRequired;\n if (it.allErrors)\n allErrorsMode();\n else\n exitOnErrorMode();\n if (opts.strictRequired) {\n const props = cxt.parentSchema.properties;\n const { definedProperties } = cxt.it;\n for (const requiredKey of schema) {\n if ((props === null || props === void 0 ? void 0 : props[requiredKey]) === undefined && !definedProperties.has(requiredKey)) {\n const schemaPath = it.schemaEnv.baseId + it.errSchemaPath;\n const msg = `required property \"${requiredKey}\" is not defined at \"${schemaPath}\" (strictRequired)`;\n (0, util_1.checkStrictMode)(it, msg, it.opts.strictRequired);\n }\n }\n }\n function allErrorsMode() {\n if (useLoop || $data) {\n cxt.block$data(codegen_1.nil, loopAllRequired);\n }\n else {\n for (const prop of schema) {\n (0, code_1.checkReportMissingProp)(cxt, prop);\n }\n }\n }\n function exitOnErrorMode() {\n const missing = gen.let(\"missing\");\n if (useLoop || $data) {\n const valid = gen.let(\"valid\", true);\n cxt.block$data(valid, () => loopUntilMissing(missing, valid));\n cxt.ok(valid);\n }\n else {\n gen.if((0, code_1.checkMissingProp)(cxt, schema, missing));\n (0, code_1.reportMissingProp)(cxt, missing);\n gen.else();\n }\n }\n function loopAllRequired() {\n gen.forOf(\"prop\", schemaCode, (prop) => {\n cxt.setParams({ missingProperty: prop });\n gen.if((0, code_1.noPropertyInData)(gen, data, prop, opts.ownProperties), () => cxt.error());\n });\n }\n function loopUntilMissing(missing, valid) {\n cxt.setParams({ missingProperty: missing });\n gen.forOf(missing, schemaCode, () => {\n gen.assign(valid, (0, code_1.propertyInData)(gen, data, missing, opts.ownProperties));\n gen.if((0, codegen_1.not)(valid), () => {\n cxt.error();\n gen.break();\n });\n }, codegen_1.nil);\n }\n },\n};\nexports.default = def;\n//# sourceMappingURL=required.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst dataType_1 = require(\"../../compile/validate/dataType\");\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst equal_1 = require(\"../../runtime/equal\");\nconst error = {\n message: ({ params: { i, j } }) => (0, codegen_1.str) `must NOT have duplicate items (items ## ${j} and ${i} are identical)`,\n params: ({ params: { i, j } }) => (0, codegen_1._) `{i: ${i}, j: ${j}}`,\n};\nconst def = {\n keyword: \"uniqueItems\",\n type: \"array\",\n schemaType: \"boolean\",\n $data: true,\n error,\n code(cxt) {\n const { gen, data, $data, schema, parentSchema, schemaCode, it } = cxt;\n if (!$data && !schema)\n return;\n const valid = gen.let(\"valid\");\n const itemTypes = parentSchema.items ? (0, dataType_1.getSchemaTypes)(parentSchema.items) : [];\n cxt.block$data(valid, validateUniqueItems, (0, codegen_1._) `${schemaCode} === false`);\n cxt.ok(valid);\n function validateUniqueItems() {\n const i = gen.let(\"i\", (0, codegen_1._) `${data}.length`);\n const j = gen.let(\"j\");\n cxt.setParams({ i, j });\n gen.assign(valid, true);\n gen.if((0, codegen_1._) `${i} > 1`, () => (canOptimize() ? loopN : loopN2)(i, j));\n }\n function canOptimize() {\n return itemTypes.length > 0 && !itemTypes.some((t) => t === \"object\" || t === \"array\");\n }\n function loopN(i, j) {\n const item = gen.name(\"item\");\n const wrongType = (0, dataType_1.checkDataTypes)(itemTypes, item, it.opts.strictNumbers, dataType_1.DataType.Wrong);\n const indices = gen.const(\"indices\", (0, codegen_1._) `{}`);\n gen.for((0, codegen_1._) `;${i}--;`, () => {\n gen.let(item, (0, codegen_1._) `${data}[${i}]`);\n gen.if(wrongType, (0, codegen_1._) `continue`);\n if (itemTypes.length > 1)\n gen.if((0, codegen_1._) `typeof ${item} == \"string\"`, (0, codegen_1._) `${item} += \"_\"`);\n gen\n .if((0, codegen_1._) `typeof ${indices}[${item}] == \"number\"`, () => {\n gen.assign(j, (0, codegen_1._) `${indices}[${item}]`);\n cxt.error();\n gen.assign(valid, false).break();\n })\n .code((0, codegen_1._) `${indices}[${item}] = ${i}`);\n });\n }\n function loopN2(i, j) {\n const eql = (0, util_1.useFunc)(gen, equal_1.default);\n const outer = gen.name(\"outer\");\n gen.label(outer).for((0, codegen_1._) `;${i}--;`, () => gen.for((0, codegen_1._) `${j} = ${i}; ${j}--;`, () => gen.if((0, codegen_1._) `${eql}(${data}[${i}], ${data}[${j}])`, () => {\n cxt.error();\n gen.assign(valid, false).break(outer);\n })));\n }\n },\n};\nexports.default = def;\n//# sourceMappingURL=uniqueItems.js.map","'use strict';\n\n// do not edit .js files directly - edit src/index.jst\n\n\n\nmodule.exports = function equal(a, b) {\n if (a === b) return true;\n\n if (a && b && typeof a == 'object' && typeof b == 'object') {\n if (a.constructor !== b.constructor) return false;\n\n var length, i, keys;\n if (Array.isArray(a)) {\n length = a.length;\n if (length != b.length) return false;\n for (i = length; i-- !== 0;)\n if (!equal(a[i], b[i])) return false;\n return true;\n }\n\n\n\n if (a.constructor === RegExp) return a.source === b.source && a.flags === b.flags;\n if (a.valueOf !== Object.prototype.valueOf) return a.valueOf() === b.valueOf();\n if (a.toString !== Object.prototype.toString) return a.toString() === b.toString();\n\n keys = Object.keys(a);\n length = keys.length;\n if (length !== Object.keys(b).length) return false;\n\n for (i = length; i-- !== 0;)\n if (!Object.prototype.hasOwnProperty.call(b, keys[i])) return false;\n\n for (i = length; i-- !== 0;) {\n var key = keys[i];\n\n if (!equal(a[key], b[key])) return false;\n }\n\n return true;\n }\n\n // true if both NaN, false otherwise\n return a!==a && b!==b;\n};\n","'use strict';\n\nvar traverse = module.exports = function (schema, opts, cb) {\n // Legacy support for v0.3.1 and earlier.\n if (typeof opts == 'function') {\n cb = opts;\n opts = {};\n }\n\n cb = opts.cb || cb;\n var pre = (typeof cb == 'function') ? cb : cb.pre || function() {};\n var post = cb.post || function() {};\n\n _traverse(opts, pre, post, schema, '', schema);\n};\n\n\ntraverse.keywords = {\n additionalItems: true,\n items: true,\n contains: true,\n additionalProperties: true,\n propertyNames: true,\n not: true,\n if: true,\n then: true,\n else: true\n};\n\ntraverse.arrayKeywords = {\n items: true,\n allOf: true,\n anyOf: true,\n oneOf: true\n};\n\ntraverse.propsKeywords = {\n $defs: true,\n definitions: true,\n properties: true,\n patternProperties: true,\n dependencies: true\n};\n\ntraverse.skipKeywords = {\n default: true,\n enum: true,\n const: true,\n required: true,\n maximum: true,\n minimum: true,\n exclusiveMaximum: true,\n exclusiveMinimum: true,\n multipleOf: true,\n maxLength: true,\n minLength: true,\n pattern: true,\n format: true,\n maxItems: true,\n minItems: true,\n uniqueItems: true,\n maxProperties: true,\n minProperties: true\n};\n\n\nfunction _traverse(opts, pre, post, schema, jsonPtr, rootSchema, parentJsonPtr, parentKeyword, parentSchema, keyIndex) {\n if (schema && typeof schema == 'object' && !Array.isArray(schema)) {\n pre(schema, jsonPtr, rootSchema, parentJsonPtr, parentKeyword, parentSchema, keyIndex);\n for (var key in schema) {\n var sch = schema[key];\n if (Array.isArray(sch)) {\n if (key in traverse.arrayKeywords) {\n for (var i=0; i */\n\nif (!globalThis.DOMException) {\n try {\n const { MessageChannel } = require('worker_threads'),\n port = new MessageChannel().port1,\n ab = new ArrayBuffer()\n port.postMessage(ab, [ab, ab])\n } catch (err) {\n err.constructor.name === 'DOMException' && (\n globalThis.DOMException = err.constructor\n )\n }\n}\n\nmodule.exports = globalThis.DOMException\n","module.exports = require('./lib/tunnel');\n","'use strict';\n\nvar net = require('net');\nvar tls = require('tls');\nvar http = require('http');\nvar https = require('https');\nvar events = require('events');\nvar assert = require('assert');\nvar util = require('util');\n\n\nexports.httpOverHttp = httpOverHttp;\nexports.httpsOverHttp = httpsOverHttp;\nexports.httpOverHttps = httpOverHttps;\nexports.httpsOverHttps = httpsOverHttps;\n\n\nfunction httpOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n return agent;\n}\n\nfunction httpsOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\nfunction httpOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n return agent;\n}\n\nfunction httpsOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\n\nfunction TunnelingAgent(options) {\n var self = this;\n self.options = options || {};\n self.proxyOptions = self.options.proxy || {};\n self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;\n self.requests = [];\n self.sockets = [];\n\n self.on('free', function onFree(socket, host, port, localAddress) {\n var options = toOptions(host, port, localAddress);\n for (var i = 0, len = self.requests.length; i < len; ++i) {\n var pending = self.requests[i];\n if (pending.host === options.host && pending.port === options.port) {\n // Detect the request to connect same origin server,\n // reuse the connection.\n self.requests.splice(i, 1);\n pending.request.onSocket(socket);\n return;\n }\n }\n socket.destroy();\n self.removeSocket(socket);\n });\n}\nutil.inherits(TunnelingAgent, events.EventEmitter);\n\nTunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {\n var self = this;\n var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));\n\n if (self.sockets.length >= this.maxSockets) {\n // We are over limit so we'll add it to the queue.\n self.requests.push(options);\n return;\n }\n\n // If we are under maxSockets create a new one.\n self.createSocket(options, function(socket) {\n socket.on('free', onFree);\n socket.on('close', onCloseOrRemove);\n socket.on('agentRemove', onCloseOrRemove);\n req.onSocket(socket);\n\n function onFree() {\n self.emit('free', socket, options);\n }\n\n function onCloseOrRemove(err) {\n self.removeSocket(socket);\n socket.removeListener('free', onFree);\n socket.removeListener('close', onCloseOrRemove);\n socket.removeListener('agentRemove', onCloseOrRemove);\n }\n });\n};\n\nTunnelingAgent.prototype.createSocket = function createSocket(options, cb) {\n var self = this;\n var placeholder = {};\n self.sockets.push(placeholder);\n\n var connectOptions = mergeOptions({}, self.proxyOptions, {\n method: 'CONNECT',\n path: options.host + ':' + options.port,\n agent: false,\n headers: {\n host: options.host + ':' + options.port\n }\n });\n if (options.localAddress) {\n connectOptions.localAddress = options.localAddress;\n }\n if (connectOptions.proxyAuth) {\n connectOptions.headers = connectOptions.headers || {};\n connectOptions.headers['Proxy-Authorization'] = 'Basic ' +\n new Buffer(connectOptions.proxyAuth).toString('base64');\n }\n\n debug('making CONNECT request');\n var connectReq = self.request(connectOptions);\n connectReq.useChunkedEncodingByDefault = false; // for v0.6\n connectReq.once('response', onResponse); // for v0.6\n connectReq.once('upgrade', onUpgrade); // for v0.6\n connectReq.once('connect', onConnect); // for v0.7 or later\n connectReq.once('error', onError);\n connectReq.end();\n\n function onResponse(res) {\n // Very hacky. This is necessary to avoid http-parser leaks.\n res.upgrade = true;\n }\n\n function onUpgrade(res, socket, head) {\n // Hacky.\n process.nextTick(function() {\n onConnect(res, socket, head);\n });\n }\n\n function onConnect(res, socket, head) {\n connectReq.removeAllListeners();\n socket.removeAllListeners();\n\n if (res.statusCode !== 200) {\n debug('tunneling socket could not be established, statusCode=%d',\n res.statusCode);\n socket.destroy();\n var error = new Error('tunneling socket could not be established, ' +\n 'statusCode=' + res.statusCode);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n if (head.length > 0) {\n debug('got illegal response body from proxy');\n socket.destroy();\n var error = new Error('got illegal response body from proxy');\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n debug('tunneling connection has established');\n self.sockets[self.sockets.indexOf(placeholder)] = socket;\n return cb(socket);\n }\n\n function onError(cause) {\n connectReq.removeAllListeners();\n\n debug('tunneling socket could not be established, cause=%s\\n',\n cause.message, cause.stack);\n var error = new Error('tunneling socket could not be established, ' +\n 'cause=' + cause.message);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n }\n};\n\nTunnelingAgent.prototype.removeSocket = function removeSocket(socket) {\n var pos = this.sockets.indexOf(socket)\n if (pos === -1) {\n return;\n }\n this.sockets.splice(pos, 1);\n\n var pending = this.requests.shift();\n if (pending) {\n // If we have pending requests and a socket gets closed a new one\n // needs to be created to take over in the pool for the one that closed.\n this.createSocket(pending, function(socket) {\n pending.request.onSocket(socket);\n });\n }\n};\n\nfunction createSecureSocket(options, cb) {\n var self = this;\n TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {\n var hostHeader = options.request.getHeader('host');\n var tlsOptions = mergeOptions({}, self.options, {\n socket: socket,\n servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host\n });\n\n // 0 is dummy port for v0.6\n var secureSocket = tls.connect(0, tlsOptions);\n self.sockets[self.sockets.indexOf(socket)] = secureSocket;\n cb(secureSocket);\n });\n}\n\n\nfunction toOptions(host, port, localAddress) {\n if (typeof host === 'string') { // since v0.10\n return {\n host: host,\n port: port,\n localAddress: localAddress\n };\n }\n return host; // for v0.11 or later\n}\n\nfunction mergeOptions(target) {\n for (var i = 1, len = arguments.length; i < len; ++i) {\n var overrides = arguments[i];\n if (typeof overrides === 'object') {\n var keys = Object.keys(overrides);\n for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {\n var k = keys[j];\n if (overrides[k] !== undefined) {\n target[k] = overrides[k];\n }\n }\n }\n }\n return target;\n}\n\n\nvar debug;\nif (process.env.NODE_DEBUG && /\\btunnel\\b/.test(process.env.NODE_DEBUG)) {\n debug = function() {\n var args = Array.prototype.slice.call(arguments);\n if (typeof args[0] === 'string') {\n args[0] = 'TUNNEL: ' + args[0];\n } else {\n args.unshift('TUNNEL:');\n }\n console.error.apply(console, args);\n }\n} else {\n debug = function() {};\n}\nexports.debug = debug; // for test\n","/** @license URI.js v4.4.1 (c) 2011 Gary Court. License: http://github.com/garycourt/uri-js */\n(function (global, factory) {\n\ttypeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :\n\ttypeof define === 'function' && define.amd ? define(['exports'], factory) :\n\t(factory((global.URI = global.URI || {})));\n}(this, (function (exports) { 'use strict';\n\nfunction merge() {\n for (var _len = arguments.length, sets = Array(_len), _key = 0; _key < _len; _key++) {\n sets[_key] = arguments[_key];\n }\n\n if (sets.length > 1) {\n sets[0] = sets[0].slice(0, -1);\n var xl = sets.length - 1;\n for (var x = 1; x < xl; ++x) {\n sets[x] = sets[x].slice(1, -1);\n }\n sets[xl] = sets[xl].slice(1);\n return sets.join('');\n } else {\n return sets[0];\n }\n}\nfunction subexp(str) {\n return \"(?:\" + str + \")\";\n}\nfunction typeOf(o) {\n return o === undefined ? \"undefined\" : o === null ? \"null\" : Object.prototype.toString.call(o).split(\" \").pop().split(\"]\").shift().toLowerCase();\n}\nfunction toUpperCase(str) {\n return str.toUpperCase();\n}\nfunction toArray(obj) {\n return obj !== undefined && obj !== null ? obj instanceof Array ? obj : typeof obj.length !== \"number\" || obj.split || obj.setInterval || obj.call ? [obj] : Array.prototype.slice.call(obj) : [];\n}\nfunction assign(target, source) {\n var obj = target;\n if (source) {\n for (var key in source) {\n obj[key] = source[key];\n }\n }\n return obj;\n}\n\nfunction buildExps(isIRI) {\n var ALPHA$$ = \"[A-Za-z]\",\n CR$ = \"[\\\\x0D]\",\n DIGIT$$ = \"[0-9]\",\n DQUOTE$$ = \"[\\\\x22]\",\n HEXDIG$$ = merge(DIGIT$$, \"[A-Fa-f]\"),\n //case-insensitive\n LF$$ = \"[\\\\x0A]\",\n SP$$ = \"[\\\\x20]\",\n PCT_ENCODED$ = subexp(subexp(\"%[EFef]\" + HEXDIG$$ + \"%\" + HEXDIG$$ + HEXDIG$$ + \"%\" + HEXDIG$$ + HEXDIG$$) + \"|\" + subexp(\"%[89A-Fa-f]\" + HEXDIG$$ + \"%\" + HEXDIG$$ + HEXDIG$$) + \"|\" + subexp(\"%\" + HEXDIG$$ + HEXDIG$$)),\n //expanded\n GEN_DELIMS$$ = \"[\\\\:\\\\/\\\\?\\\\#\\\\[\\\\]\\\\@]\",\n SUB_DELIMS$$ = \"[\\\\!\\\\$\\\\&\\\\'\\\\(\\\\)\\\\*\\\\+\\\\,\\\\;\\\\=]\",\n RESERVED$$ = merge(GEN_DELIMS$$, SUB_DELIMS$$),\n UCSCHAR$$ = isIRI ? \"[\\\\xA0-\\\\u200D\\\\u2010-\\\\u2029\\\\u202F-\\\\uD7FF\\\\uF900-\\\\uFDCF\\\\uFDF0-\\\\uFFEF]\" : \"[]\",\n //subset, excludes bidi control characters\n IPRIVATE$$ = isIRI ? \"[\\\\uE000-\\\\uF8FF]\" : \"[]\",\n //subset\n UNRESERVED$$ = merge(ALPHA$$, DIGIT$$, \"[\\\\-\\\\.\\\\_\\\\~]\", UCSCHAR$$),\n SCHEME$ = subexp(ALPHA$$ + merge(ALPHA$$, DIGIT$$, \"[\\\\+\\\\-\\\\.]\") + \"*\"),\n USERINFO$ = subexp(subexp(PCT_ENCODED$ + \"|\" + merge(UNRESERVED$$, SUB_DELIMS$$, \"[\\\\:]\")) + \"*\"),\n DEC_OCTET$ = subexp(subexp(\"25[0-5]\") + \"|\" + subexp(\"2[0-4]\" + DIGIT$$) + \"|\" + subexp(\"1\" + DIGIT$$ + DIGIT$$) + \"|\" + subexp(\"[1-9]\" + DIGIT$$) + \"|\" + DIGIT$$),\n DEC_OCTET_RELAXED$ = subexp(subexp(\"25[0-5]\") + \"|\" + subexp(\"2[0-4]\" + DIGIT$$) + \"|\" + subexp(\"1\" + DIGIT$$ + DIGIT$$) + \"|\" + subexp(\"0?[1-9]\" + DIGIT$$) + \"|0?0?\" + DIGIT$$),\n //relaxed parsing rules\n IPV4ADDRESS$ = subexp(DEC_OCTET_RELAXED$ + \"\\\\.\" + DEC_OCTET_RELAXED$ + \"\\\\.\" + DEC_OCTET_RELAXED$ + \"\\\\.\" + DEC_OCTET_RELAXED$),\n H16$ = subexp(HEXDIG$$ + \"{1,4}\"),\n LS32$ = subexp(subexp(H16$ + \"\\\\:\" + H16$) + \"|\" + IPV4ADDRESS$),\n IPV6ADDRESS1$ = subexp(subexp(H16$ + \"\\\\:\") + \"{6}\" + LS32$),\n // 6( h16 \":\" ) ls32\n IPV6ADDRESS2$ = subexp(\"\\\\:\\\\:\" + subexp(H16$ + \"\\\\:\") + \"{5}\" + LS32$),\n // \"::\" 5( h16 \":\" ) ls32\n IPV6ADDRESS3$ = subexp(subexp(H16$) + \"?\\\\:\\\\:\" + subexp(H16$ + \"\\\\:\") + \"{4}\" + LS32$),\n //[ h16 ] \"::\" 4( h16 \":\" ) ls32\n IPV6ADDRESS4$ = subexp(subexp(subexp(H16$ + \"\\\\:\") + \"{0,1}\" + H16$) + \"?\\\\:\\\\:\" + subexp(H16$ + \"\\\\:\") + \"{3}\" + LS32$),\n //[ *1( h16 \":\" ) h16 ] \"::\" 3( h16 \":\" ) ls32\n IPV6ADDRESS5$ = subexp(subexp(subexp(H16$ + \"\\\\:\") + \"{0,2}\" + H16$) + \"?\\\\:\\\\:\" + subexp(H16$ + \"\\\\:\") + \"{2}\" + LS32$),\n //[ *2( h16 \":\" ) h16 ] \"::\" 2( h16 \":\" ) ls32\n IPV6ADDRESS6$ = subexp(subexp(subexp(H16$ + \"\\\\:\") + \"{0,3}\" + H16$) + \"?\\\\:\\\\:\" + H16$ + \"\\\\:\" + LS32$),\n //[ *3( h16 \":\" ) h16 ] \"::\" h16 \":\" ls32\n IPV6ADDRESS7$ = subexp(subexp(subexp(H16$ + \"\\\\:\") + \"{0,4}\" + H16$) + \"?\\\\:\\\\:\" + LS32$),\n //[ *4( h16 \":\" ) h16 ] \"::\" ls32\n IPV6ADDRESS8$ = subexp(subexp(subexp(H16$ + \"\\\\:\") + \"{0,5}\" + H16$) + \"?\\\\:\\\\:\" + H16$),\n //[ *5( h16 \":\" ) h16 ] \"::\" h16\n IPV6ADDRESS9$ = subexp(subexp(subexp(H16$ + \"\\\\:\") + \"{0,6}\" + H16$) + \"?\\\\:\\\\:\"),\n //[ *6( h16 \":\" ) h16 ] \"::\"\n IPV6ADDRESS$ = subexp([IPV6ADDRESS1$, IPV6ADDRESS2$, IPV6ADDRESS3$, IPV6ADDRESS4$, IPV6ADDRESS5$, IPV6ADDRESS6$, IPV6ADDRESS7$, IPV6ADDRESS8$, IPV6ADDRESS9$].join(\"|\")),\n ZONEID$ = subexp(subexp(UNRESERVED$$ + \"|\" + PCT_ENCODED$) + \"+\"),\n //RFC 6874\n IPV6ADDRZ$ = subexp(IPV6ADDRESS$ + \"\\\\%25\" + ZONEID$),\n //RFC 6874\n IPV6ADDRZ_RELAXED$ = subexp(IPV6ADDRESS$ + subexp(\"\\\\%25|\\\\%(?!\" + HEXDIG$$ + \"{2})\") + ZONEID$),\n //RFC 6874, with relaxed parsing rules\n IPVFUTURE$ = subexp(\"[vV]\" + HEXDIG$$ + \"+\\\\.\" + merge(UNRESERVED$$, SUB_DELIMS$$, \"[\\\\:]\") + \"+\"),\n IP_LITERAL$ = subexp(\"\\\\[\" + subexp(IPV6ADDRZ_RELAXED$ + \"|\" + IPV6ADDRESS$ + \"|\" + IPVFUTURE$) + \"\\\\]\"),\n //RFC 6874\n REG_NAME$ = subexp(subexp(PCT_ENCODED$ + \"|\" + merge(UNRESERVED$$, SUB_DELIMS$$)) + \"*\"),\n HOST$ = subexp(IP_LITERAL$ + \"|\" + IPV4ADDRESS$ + \"(?!\" + REG_NAME$ + \")\" + \"|\" + REG_NAME$),\n PORT$ = subexp(DIGIT$$ + \"*\"),\n AUTHORITY$ = subexp(subexp(USERINFO$ + \"@\") + \"?\" + HOST$ + subexp(\"\\\\:\" + PORT$) + \"?\"),\n PCHAR$ = subexp(PCT_ENCODED$ + \"|\" + merge(UNRESERVED$$, SUB_DELIMS$$, \"[\\\\:\\\\@]\")),\n SEGMENT$ = subexp(PCHAR$ + \"*\"),\n SEGMENT_NZ$ = subexp(PCHAR$ + \"+\"),\n SEGMENT_NZ_NC$ = subexp(subexp(PCT_ENCODED$ + \"|\" + merge(UNRESERVED$$, SUB_DELIMS$$, \"[\\\\@]\")) + \"+\"),\n PATH_ABEMPTY$ = subexp(subexp(\"\\\\/\" + SEGMENT$) + \"*\"),\n PATH_ABSOLUTE$ = subexp(\"\\\\/\" + subexp(SEGMENT_NZ$ + PATH_ABEMPTY$) + \"?\"),\n //simplified\n PATH_NOSCHEME$ = subexp(SEGMENT_NZ_NC$ + PATH_ABEMPTY$),\n //simplified\n PATH_ROOTLESS$ = subexp(SEGMENT_NZ$ + PATH_ABEMPTY$),\n //simplified\n PATH_EMPTY$ = \"(?!\" + PCHAR$ + \")\",\n PATH$ = subexp(PATH_ABEMPTY$ + \"|\" + PATH_ABSOLUTE$ + \"|\" + PATH_NOSCHEME$ + \"|\" + PATH_ROOTLESS$ + \"|\" + PATH_EMPTY$),\n QUERY$ = subexp(subexp(PCHAR$ + \"|\" + merge(\"[\\\\/\\\\?]\", IPRIVATE$$)) + \"*\"),\n FRAGMENT$ = subexp(subexp(PCHAR$ + \"|[\\\\/\\\\?]\") + \"*\"),\n HIER_PART$ = subexp(subexp(\"\\\\/\\\\/\" + AUTHORITY$ + PATH_ABEMPTY$) + \"|\" + PATH_ABSOLUTE$ + \"|\" + PATH_ROOTLESS$ + \"|\" + PATH_EMPTY$),\n URI$ = subexp(SCHEME$ + \"\\\\:\" + HIER_PART$ + subexp(\"\\\\?\" + QUERY$) + \"?\" + subexp(\"\\\\#\" + FRAGMENT$) + \"?\"),\n RELATIVE_PART$ = subexp(subexp(\"\\\\/\\\\/\" + AUTHORITY$ + PATH_ABEMPTY$) + \"|\" + PATH_ABSOLUTE$ + \"|\" + PATH_NOSCHEME$ + \"|\" + PATH_EMPTY$),\n RELATIVE$ = subexp(RELATIVE_PART$ + subexp(\"\\\\?\" + QUERY$) + \"?\" + subexp(\"\\\\#\" + FRAGMENT$) + \"?\"),\n URI_REFERENCE$ = subexp(URI$ + \"|\" + RELATIVE$),\n ABSOLUTE_URI$ = subexp(SCHEME$ + \"\\\\:\" + HIER_PART$ + subexp(\"\\\\?\" + QUERY$) + \"?\"),\n GENERIC_REF$ = \"^(\" + SCHEME$ + \")\\\\:\" + subexp(subexp(\"\\\\/\\\\/(\" + subexp(\"(\" + USERINFO$ + \")@\") + \"?(\" + HOST$ + \")\" + subexp(\"\\\\:(\" + PORT$ + \")\") + \"?)\") + \"?(\" + PATH_ABEMPTY$ + \"|\" + PATH_ABSOLUTE$ + \"|\" + PATH_ROOTLESS$ + \"|\" + PATH_EMPTY$ + \")\") + subexp(\"\\\\?(\" + QUERY$ + \")\") + \"?\" + subexp(\"\\\\#(\" + FRAGMENT$ + \")\") + \"?$\",\n RELATIVE_REF$ = \"^(){0}\" + subexp(subexp(\"\\\\/\\\\/(\" + subexp(\"(\" + USERINFO$ + \")@\") + \"?(\" + HOST$ + \")\" + subexp(\"\\\\:(\" + PORT$ + \")\") + \"?)\") + \"?(\" + PATH_ABEMPTY$ + \"|\" + PATH_ABSOLUTE$ + \"|\" + PATH_NOSCHEME$ + \"|\" + PATH_EMPTY$ + \")\") + subexp(\"\\\\?(\" + QUERY$ + \")\") + \"?\" + subexp(\"\\\\#(\" + FRAGMENT$ + \")\") + \"?$\",\n ABSOLUTE_REF$ = \"^(\" + SCHEME$ + \")\\\\:\" + subexp(subexp(\"\\\\/\\\\/(\" + subexp(\"(\" + USERINFO$ + \")@\") + \"?(\" + HOST$ + \")\" + subexp(\"\\\\:(\" + PORT$ + \")\") + \"?)\") + \"?(\" + PATH_ABEMPTY$ + \"|\" + PATH_ABSOLUTE$ + \"|\" + PATH_ROOTLESS$ + \"|\" + PATH_EMPTY$ + \")\") + subexp(\"\\\\?(\" + QUERY$ + \")\") + \"?$\",\n SAMEDOC_REF$ = \"^\" + subexp(\"\\\\#(\" + FRAGMENT$ + \")\") + \"?$\",\n AUTHORITY_REF$ = \"^\" + subexp(\"(\" + USERINFO$ + \")@\") + \"?(\" + HOST$ + \")\" + subexp(\"\\\\:(\" + PORT$ + \")\") + \"?$\";\n return {\n NOT_SCHEME: new RegExp(merge(\"[^]\", ALPHA$$, DIGIT$$, \"[\\\\+\\\\-\\\\.]\"), \"g\"),\n NOT_USERINFO: new RegExp(merge(\"[^\\\\%\\\\:]\", UNRESERVED$$, SUB_DELIMS$$), \"g\"),\n NOT_HOST: new RegExp(merge(\"[^\\\\%\\\\[\\\\]\\\\:]\", UNRESERVED$$, SUB_DELIMS$$), \"g\"),\n NOT_PATH: new RegExp(merge(\"[^\\\\%\\\\/\\\\:\\\\@]\", UNRESERVED$$, SUB_DELIMS$$), \"g\"),\n NOT_PATH_NOSCHEME: new RegExp(merge(\"[^\\\\%\\\\/\\\\@]\", UNRESERVED$$, SUB_DELIMS$$), \"g\"),\n NOT_QUERY: new RegExp(merge(\"[^\\\\%]\", UNRESERVED$$, SUB_DELIMS$$, \"[\\\\:\\\\@\\\\/\\\\?]\", IPRIVATE$$), \"g\"),\n NOT_FRAGMENT: new RegExp(merge(\"[^\\\\%]\", UNRESERVED$$, SUB_DELIMS$$, \"[\\\\:\\\\@\\\\/\\\\?]\"), \"g\"),\n ESCAPE: new RegExp(merge(\"[^]\", UNRESERVED$$, SUB_DELIMS$$), \"g\"),\n UNRESERVED: new RegExp(UNRESERVED$$, \"g\"),\n OTHER_CHARS: new RegExp(merge(\"[^\\\\%]\", UNRESERVED$$, RESERVED$$), \"g\"),\n PCT_ENCODED: new RegExp(PCT_ENCODED$, \"g\"),\n IPV4ADDRESS: new RegExp(\"^(\" + IPV4ADDRESS$ + \")$\"),\n IPV6ADDRESS: new RegExp(\"^\\\\[?(\" + IPV6ADDRESS$ + \")\" + subexp(subexp(\"\\\\%25|\\\\%(?!\" + HEXDIG$$ + \"{2})\") + \"(\" + ZONEID$ + \")\") + \"?\\\\]?$\") //RFC 6874, with relaxed parsing rules\n };\n}\nvar URI_PROTOCOL = buildExps(false);\n\nvar IRI_PROTOCOL = buildExps(true);\n\nvar slicedToArray = function () {\n function sliceIterator(arr, i) {\n var _arr = [];\n var _n = true;\n var _d = false;\n var _e = undefined;\n\n try {\n for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) {\n _arr.push(_s.value);\n\n if (i && _arr.length === i) break;\n }\n } catch (err) {\n _d = true;\n _e = err;\n } finally {\n try {\n if (!_n && _i[\"return\"]) _i[\"return\"]();\n } finally {\n if (_d) throw _e;\n }\n }\n\n return _arr;\n }\n\n return function (arr, i) {\n if (Array.isArray(arr)) {\n return arr;\n } else if (Symbol.iterator in Object(arr)) {\n return sliceIterator(arr, i);\n } else {\n throw new TypeError(\"Invalid attempt to destructure non-iterable instance\");\n }\n };\n}();\n\n\n\n\n\n\n\n\n\n\n\n\n\nvar toConsumableArray = function (arr) {\n if (Array.isArray(arr)) {\n for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) arr2[i] = arr[i];\n\n return arr2;\n } else {\n return Array.from(arr);\n }\n};\n\n/** Highest positive signed 32-bit float value */\n\nvar maxInt = 2147483647; // aka. 0x7FFFFFFF or 2^31-1\n\n/** Bootstring parameters */\nvar base = 36;\nvar tMin = 1;\nvar tMax = 26;\nvar skew = 38;\nvar damp = 700;\nvar initialBias = 72;\nvar initialN = 128; // 0x80\nvar delimiter = '-'; // '\\x2D'\n\n/** Regular expressions */\nvar regexPunycode = /^xn--/;\nvar regexNonASCII = /[^\\0-\\x7E]/; // non-ASCII chars\nvar regexSeparators = /[\\x2E\\u3002\\uFF0E\\uFF61]/g; // RFC 3490 separators\n\n/** Error messages */\nvar errors = {\n\t'overflow': 'Overflow: input needs wider integers to process',\n\t'not-basic': 'Illegal input >= 0x80 (not a basic code point)',\n\t'invalid-input': 'Invalid input'\n};\n\n/** Convenience shortcuts */\nvar baseMinusTMin = base - tMin;\nvar floor = Math.floor;\nvar stringFromCharCode = String.fromCharCode;\n\n/*--------------------------------------------------------------------------*/\n\n/**\n * A generic error utility function.\n * @private\n * @param {String} type The error type.\n * @returns {Error} Throws a `RangeError` with the applicable error message.\n */\nfunction error$1(type) {\n\tthrow new RangeError(errors[type]);\n}\n\n/**\n * A generic `Array#map` utility function.\n * @private\n * @param {Array} array The array to iterate over.\n * @param {Function} callback The function that gets called for every array\n * item.\n * @returns {Array} A new array of values returned by the callback function.\n */\nfunction map(array, fn) {\n\tvar result = [];\n\tvar length = array.length;\n\twhile (length--) {\n\t\tresult[length] = fn(array[length]);\n\t}\n\treturn result;\n}\n\n/**\n * A simple `Array#map`-like wrapper to work with domain name strings or email\n * addresses.\n * @private\n * @param {String} domain The domain name or email address.\n * @param {Function} callback The function that gets called for every\n * character.\n * @returns {Array} A new string of characters returned by the callback\n * function.\n */\nfunction mapDomain(string, fn) {\n\tvar parts = string.split('@');\n\tvar result = '';\n\tif (parts.length > 1) {\n\t\t// In email addresses, only the domain name should be punycoded. Leave\n\t\t// the local part (i.e. everything up to `@`) intact.\n\t\tresult = parts[0] + '@';\n\t\tstring = parts[1];\n\t}\n\t// Avoid `split(regex)` for IE8 compatibility. See #17.\n\tstring = string.replace(regexSeparators, '\\x2E');\n\tvar labels = string.split('.');\n\tvar encoded = map(labels, fn).join('.');\n\treturn result + encoded;\n}\n\n/**\n * Creates an array containing the numeric code points of each Unicode\n * character in the string. While JavaScript uses UCS-2 internally,\n * this function will convert a pair of surrogate halves (each of which\n * UCS-2 exposes as separate characters) into a single code point,\n * matching UTF-16.\n * @see `punycode.ucs2.encode`\n * @see \n * @memberOf punycode.ucs2\n * @name decode\n * @param {String} string The Unicode input string (UCS-2).\n * @returns {Array} The new array of code points.\n */\nfunction ucs2decode(string) {\n\tvar output = [];\n\tvar counter = 0;\n\tvar length = string.length;\n\twhile (counter < length) {\n\t\tvar value = string.charCodeAt(counter++);\n\t\tif (value >= 0xD800 && value <= 0xDBFF && counter < length) {\n\t\t\t// It's a high surrogate, and there is a next character.\n\t\t\tvar extra = string.charCodeAt(counter++);\n\t\t\tif ((extra & 0xFC00) == 0xDC00) {\n\t\t\t\t// Low surrogate.\n\t\t\t\toutput.push(((value & 0x3FF) << 10) + (extra & 0x3FF) + 0x10000);\n\t\t\t} else {\n\t\t\t\t// It's an unmatched surrogate; only append this code unit, in case the\n\t\t\t\t// next code unit is the high surrogate of a surrogate pair.\n\t\t\t\toutput.push(value);\n\t\t\t\tcounter--;\n\t\t\t}\n\t\t} else {\n\t\t\toutput.push(value);\n\t\t}\n\t}\n\treturn output;\n}\n\n/**\n * Creates a string based on an array of numeric code points.\n * @see `punycode.ucs2.decode`\n * @memberOf punycode.ucs2\n * @name encode\n * @param {Array} codePoints The array of numeric code points.\n * @returns {String} The new Unicode string (UCS-2).\n */\nvar ucs2encode = function ucs2encode(array) {\n\treturn String.fromCodePoint.apply(String, toConsumableArray(array));\n};\n\n/**\n * Converts a basic code point into a digit/integer.\n * @see `digitToBasic()`\n * @private\n * @param {Number} codePoint The basic numeric code point value.\n * @returns {Number} The numeric value of a basic code point (for use in\n * representing integers) in the range `0` to `base - 1`, or `base` if\n * the code point does not represent a value.\n */\nvar basicToDigit = function basicToDigit(codePoint) {\n\tif (codePoint - 0x30 < 0x0A) {\n\t\treturn codePoint - 0x16;\n\t}\n\tif (codePoint - 0x41 < 0x1A) {\n\t\treturn codePoint - 0x41;\n\t}\n\tif (codePoint - 0x61 < 0x1A) {\n\t\treturn codePoint - 0x61;\n\t}\n\treturn base;\n};\n\n/**\n * Converts a digit/integer into a basic code point.\n * @see `basicToDigit()`\n * @private\n * @param {Number} digit The numeric value of a basic code point.\n * @returns {Number} The basic code point whose value (when used for\n * representing integers) is `digit`, which needs to be in the range\n * `0` to `base - 1`. If `flag` is non-zero, the uppercase form is\n * used; else, the lowercase form is used. The behavior is undefined\n * if `flag` is non-zero and `digit` has no uppercase form.\n */\nvar digitToBasic = function digitToBasic(digit, flag) {\n\t// 0..25 map to ASCII a..z or A..Z\n\t// 26..35 map to ASCII 0..9\n\treturn digit + 22 + 75 * (digit < 26) - ((flag != 0) << 5);\n};\n\n/**\n * Bias adaptation function as per section 3.4 of RFC 3492.\n * https://tools.ietf.org/html/rfc3492#section-3.4\n * @private\n */\nvar adapt = function adapt(delta, numPoints, firstTime) {\n\tvar k = 0;\n\tdelta = firstTime ? floor(delta / damp) : delta >> 1;\n\tdelta += floor(delta / numPoints);\n\tfor (; /* no initialization */delta > baseMinusTMin * tMax >> 1; k += base) {\n\t\tdelta = floor(delta / baseMinusTMin);\n\t}\n\treturn floor(k + (baseMinusTMin + 1) * delta / (delta + skew));\n};\n\n/**\n * Converts a Punycode string of ASCII-only symbols to a string of Unicode\n * symbols.\n * @memberOf punycode\n * @param {String} input The Punycode string of ASCII-only symbols.\n * @returns {String} The resulting string of Unicode symbols.\n */\nvar decode = function decode(input) {\n\t// Don't use UCS-2.\n\tvar output = [];\n\tvar inputLength = input.length;\n\tvar i = 0;\n\tvar n = initialN;\n\tvar bias = initialBias;\n\n\t// Handle the basic code points: let `basic` be the number of input code\n\t// points before the last delimiter, or `0` if there is none, then copy\n\t// the first basic code points to the output.\n\n\tvar basic = input.lastIndexOf(delimiter);\n\tif (basic < 0) {\n\t\tbasic = 0;\n\t}\n\n\tfor (var j = 0; j < basic; ++j) {\n\t\t// if it's not a basic code point\n\t\tif (input.charCodeAt(j) >= 0x80) {\n\t\t\terror$1('not-basic');\n\t\t}\n\t\toutput.push(input.charCodeAt(j));\n\t}\n\n\t// Main decoding loop: start just after the last delimiter if any basic code\n\t// points were copied; start at the beginning otherwise.\n\n\tfor (var index = basic > 0 ? basic + 1 : 0; index < inputLength;) /* no final expression */{\n\n\t\t// `index` is the index of the next character to be consumed.\n\t\t// Decode a generalized variable-length integer into `delta`,\n\t\t// which gets added to `i`. The overflow checking is easier\n\t\t// if we increase `i` as we go, then subtract off its starting\n\t\t// value at the end to obtain `delta`.\n\t\tvar oldi = i;\n\t\tfor (var w = 1, k = base;; /* no condition */k += base) {\n\n\t\t\tif (index >= inputLength) {\n\t\t\t\terror$1('invalid-input');\n\t\t\t}\n\n\t\t\tvar digit = basicToDigit(input.charCodeAt(index++));\n\n\t\t\tif (digit >= base || digit > floor((maxInt - i) / w)) {\n\t\t\t\terror$1('overflow');\n\t\t\t}\n\n\t\t\ti += digit * w;\n\t\t\tvar t = k <= bias ? tMin : k >= bias + tMax ? tMax : k - bias;\n\n\t\t\tif (digit < t) {\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tvar baseMinusT = base - t;\n\t\t\tif (w > floor(maxInt / baseMinusT)) {\n\t\t\t\terror$1('overflow');\n\t\t\t}\n\n\t\t\tw *= baseMinusT;\n\t\t}\n\n\t\tvar out = output.length + 1;\n\t\tbias = adapt(i - oldi, out, oldi == 0);\n\n\t\t// `i` was supposed to wrap around from `out` to `0`,\n\t\t// incrementing `n` each time, so we'll fix that now:\n\t\tif (floor(i / out) > maxInt - n) {\n\t\t\terror$1('overflow');\n\t\t}\n\n\t\tn += floor(i / out);\n\t\ti %= out;\n\n\t\t// Insert `n` at position `i` of the output.\n\t\toutput.splice(i++, 0, n);\n\t}\n\n\treturn String.fromCodePoint.apply(String, output);\n};\n\n/**\n * Converts a string of Unicode symbols (e.g. a domain name label) to a\n * Punycode string of ASCII-only symbols.\n * @memberOf punycode\n * @param {String} input The string of Unicode symbols.\n * @returns {String} The resulting Punycode string of ASCII-only symbols.\n */\nvar encode = function encode(input) {\n\tvar output = [];\n\n\t// Convert the input in UCS-2 to an array of Unicode code points.\n\tinput = ucs2decode(input);\n\n\t// Cache the length.\n\tvar inputLength = input.length;\n\n\t// Initialize the state.\n\tvar n = initialN;\n\tvar delta = 0;\n\tvar bias = initialBias;\n\n\t// Handle the basic code points.\n\tvar _iteratorNormalCompletion = true;\n\tvar _didIteratorError = false;\n\tvar _iteratorError = undefined;\n\n\ttry {\n\t\tfor (var _iterator = input[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {\n\t\t\tvar _currentValue2 = _step.value;\n\n\t\t\tif (_currentValue2 < 0x80) {\n\t\t\t\toutput.push(stringFromCharCode(_currentValue2));\n\t\t\t}\n\t\t}\n\t} catch (err) {\n\t\t_didIteratorError = true;\n\t\t_iteratorError = err;\n\t} finally {\n\t\ttry {\n\t\t\tif (!_iteratorNormalCompletion && _iterator.return) {\n\t\t\t\t_iterator.return();\n\t\t\t}\n\t\t} finally {\n\t\t\tif (_didIteratorError) {\n\t\t\t\tthrow _iteratorError;\n\t\t\t}\n\t\t}\n\t}\n\n\tvar basicLength = output.length;\n\tvar handledCPCount = basicLength;\n\n\t// `handledCPCount` is the number of code points that have been handled;\n\t// `basicLength` is the number of basic code points.\n\n\t// Finish the basic string with a delimiter unless it's empty.\n\tif (basicLength) {\n\t\toutput.push(delimiter);\n\t}\n\n\t// Main encoding loop:\n\twhile (handledCPCount < inputLength) {\n\n\t\t// All non-basic code points < n have been handled already. Find the next\n\t\t// larger one:\n\t\tvar m = maxInt;\n\t\tvar _iteratorNormalCompletion2 = true;\n\t\tvar _didIteratorError2 = false;\n\t\tvar _iteratorError2 = undefined;\n\n\t\ttry {\n\t\t\tfor (var _iterator2 = input[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) {\n\t\t\t\tvar currentValue = _step2.value;\n\n\t\t\t\tif (currentValue >= n && currentValue < m) {\n\t\t\t\t\tm = currentValue;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// Increase `delta` enough to advance the decoder's state to ,\n\t\t\t// but guard against overflow.\n\t\t} catch (err) {\n\t\t\t_didIteratorError2 = true;\n\t\t\t_iteratorError2 = err;\n\t\t} finally {\n\t\t\ttry {\n\t\t\t\tif (!_iteratorNormalCompletion2 && _iterator2.return) {\n\t\t\t\t\t_iterator2.return();\n\t\t\t\t}\n\t\t\t} finally {\n\t\t\t\tif (_didIteratorError2) {\n\t\t\t\t\tthrow _iteratorError2;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tvar handledCPCountPlusOne = handledCPCount + 1;\n\t\tif (m - n > floor((maxInt - delta) / handledCPCountPlusOne)) {\n\t\t\terror$1('overflow');\n\t\t}\n\n\t\tdelta += (m - n) * handledCPCountPlusOne;\n\t\tn = m;\n\n\t\tvar _iteratorNormalCompletion3 = true;\n\t\tvar _didIteratorError3 = false;\n\t\tvar _iteratorError3 = undefined;\n\n\t\ttry {\n\t\t\tfor (var _iterator3 = input[Symbol.iterator](), _step3; !(_iteratorNormalCompletion3 = (_step3 = _iterator3.next()).done); _iteratorNormalCompletion3 = true) {\n\t\t\t\tvar _currentValue = _step3.value;\n\n\t\t\t\tif (_currentValue < n && ++delta > maxInt) {\n\t\t\t\t\terror$1('overflow');\n\t\t\t\t}\n\t\t\t\tif (_currentValue == n) {\n\t\t\t\t\t// Represent delta as a generalized variable-length integer.\n\t\t\t\t\tvar q = delta;\n\t\t\t\t\tfor (var k = base;; /* no condition */k += base) {\n\t\t\t\t\t\tvar t = k <= bias ? tMin : k >= bias + tMax ? tMax : k - bias;\n\t\t\t\t\t\tif (q < t) {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t\tvar qMinusT = q - t;\n\t\t\t\t\t\tvar baseMinusT = base - t;\n\t\t\t\t\t\toutput.push(stringFromCharCode(digitToBasic(t + qMinusT % baseMinusT, 0)));\n\t\t\t\t\t\tq = floor(qMinusT / baseMinusT);\n\t\t\t\t\t}\n\n\t\t\t\t\toutput.push(stringFromCharCode(digitToBasic(q, 0)));\n\t\t\t\t\tbias = adapt(delta, handledCPCountPlusOne, handledCPCount == basicLength);\n\t\t\t\t\tdelta = 0;\n\t\t\t\t\t++handledCPCount;\n\t\t\t\t}\n\t\t\t}\n\t\t} catch (err) {\n\t\t\t_didIteratorError3 = true;\n\t\t\t_iteratorError3 = err;\n\t\t} finally {\n\t\t\ttry {\n\t\t\t\tif (!_iteratorNormalCompletion3 && _iterator3.return) {\n\t\t\t\t\t_iterator3.return();\n\t\t\t\t}\n\t\t\t} finally {\n\t\t\t\tif (_didIteratorError3) {\n\t\t\t\t\tthrow _iteratorError3;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\t++delta;\n\t\t++n;\n\t}\n\treturn output.join('');\n};\n\n/**\n * Converts a Punycode string representing a domain name or an email address\n * to Unicode. Only the Punycoded parts of the input will be converted, i.e.\n * it doesn't matter if you call it on a string that has already been\n * converted to Unicode.\n * @memberOf punycode\n * @param {String} input The Punycoded domain name or email address to\n * convert to Unicode.\n * @returns {String} The Unicode representation of the given Punycode\n * string.\n */\nvar toUnicode = function toUnicode(input) {\n\treturn mapDomain(input, function (string) {\n\t\treturn regexPunycode.test(string) ? decode(string.slice(4).toLowerCase()) : string;\n\t});\n};\n\n/**\n * Converts a Unicode string representing a domain name or an email address to\n * Punycode. Only the non-ASCII parts of the domain name will be converted,\n * i.e. it doesn't matter if you call it with a domain that's already in\n * ASCII.\n * @memberOf punycode\n * @param {String} input The domain name or email address to convert, as a\n * Unicode string.\n * @returns {String} The Punycode representation of the given domain name or\n * email address.\n */\nvar toASCII = function toASCII(input) {\n\treturn mapDomain(input, function (string) {\n\t\treturn regexNonASCII.test(string) ? 'xn--' + encode(string) : string;\n\t});\n};\n\n/*--------------------------------------------------------------------------*/\n\n/** Define the public API */\nvar punycode = {\n\t/**\n * A string representing the current Punycode.js version number.\n * @memberOf punycode\n * @type String\n */\n\t'version': '2.1.0',\n\t/**\n * An object of methods to convert from JavaScript's internal character\n * representation (UCS-2) to Unicode code points, and back.\n * @see \n * @memberOf punycode\n * @type Object\n */\n\t'ucs2': {\n\t\t'decode': ucs2decode,\n\t\t'encode': ucs2encode\n\t},\n\t'decode': decode,\n\t'encode': encode,\n\t'toASCII': toASCII,\n\t'toUnicode': toUnicode\n};\n\n/**\n * URI.js\n *\n * @fileoverview An RFC 3986 compliant, scheme extendable URI parsing/validating/resolving library for JavaScript.\n * @author Gary Court\n * @see http://github.com/garycourt/uri-js\n */\n/**\n * Copyright 2011 Gary Court. All rights reserved.\n *\n * Redistribution and use in source and binary forms, with or without modification, are\n * permitted provided that the following conditions are met:\n *\n * 1. Redistributions of source code must retain the above copyright notice, this list of\n * conditions and the following disclaimer.\n *\n * 2. Redistributions in binary form must reproduce the above copyright notice, this list\n * of conditions and the following disclaimer in the documentation and/or other materials\n * provided with the distribution.\n *\n * THIS SOFTWARE IS PROVIDED BY GARY COURT ``AS IS'' AND ANY EXPRESS OR IMPLIED\n * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND\n * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GARY COURT OR\n * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\n * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON\n * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\n * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF\n * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n *\n * The views and conclusions contained in the software and documentation are those of the\n * authors and should not be interpreted as representing official policies, either expressed\n * or implied, of Gary Court.\n */\nvar SCHEMES = {};\nfunction pctEncChar(chr) {\n var c = chr.charCodeAt(0);\n var e = void 0;\n if (c < 16) e = \"%0\" + c.toString(16).toUpperCase();else if (c < 128) e = \"%\" + c.toString(16).toUpperCase();else if (c < 2048) e = \"%\" + (c >> 6 | 192).toString(16).toUpperCase() + \"%\" + (c & 63 | 128).toString(16).toUpperCase();else e = \"%\" + (c >> 12 | 224).toString(16).toUpperCase() + \"%\" + (c >> 6 & 63 | 128).toString(16).toUpperCase() + \"%\" + (c & 63 | 128).toString(16).toUpperCase();\n return e;\n}\nfunction pctDecChars(str) {\n var newStr = \"\";\n var i = 0;\n var il = str.length;\n while (i < il) {\n var c = parseInt(str.substr(i + 1, 2), 16);\n if (c < 128) {\n newStr += String.fromCharCode(c);\n i += 3;\n } else if (c >= 194 && c < 224) {\n if (il - i >= 6) {\n var c2 = parseInt(str.substr(i + 4, 2), 16);\n newStr += String.fromCharCode((c & 31) << 6 | c2 & 63);\n } else {\n newStr += str.substr(i, 6);\n }\n i += 6;\n } else if (c >= 224) {\n if (il - i >= 9) {\n var _c = parseInt(str.substr(i + 4, 2), 16);\n var c3 = parseInt(str.substr(i + 7, 2), 16);\n newStr += String.fromCharCode((c & 15) << 12 | (_c & 63) << 6 | c3 & 63);\n } else {\n newStr += str.substr(i, 9);\n }\n i += 9;\n } else {\n newStr += str.substr(i, 3);\n i += 3;\n }\n }\n return newStr;\n}\nfunction _normalizeComponentEncoding(components, protocol) {\n function decodeUnreserved(str) {\n var decStr = pctDecChars(str);\n return !decStr.match(protocol.UNRESERVED) ? str : decStr;\n }\n if (components.scheme) components.scheme = String(components.scheme).replace(protocol.PCT_ENCODED, decodeUnreserved).toLowerCase().replace(protocol.NOT_SCHEME, \"\");\n if (components.userinfo !== undefined) components.userinfo = String(components.userinfo).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_USERINFO, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase);\n if (components.host !== undefined) components.host = String(components.host).replace(protocol.PCT_ENCODED, decodeUnreserved).toLowerCase().replace(protocol.NOT_HOST, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase);\n if (components.path !== undefined) components.path = String(components.path).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(components.scheme ? protocol.NOT_PATH : protocol.NOT_PATH_NOSCHEME, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase);\n if (components.query !== undefined) components.query = String(components.query).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_QUERY, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase);\n if (components.fragment !== undefined) components.fragment = String(components.fragment).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_FRAGMENT, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase);\n return components;\n}\n\nfunction _stripLeadingZeros(str) {\n return str.replace(/^0*(.*)/, \"$1\") || \"0\";\n}\nfunction _normalizeIPv4(host, protocol) {\n var matches = host.match(protocol.IPV4ADDRESS) || [];\n\n var _matches = slicedToArray(matches, 2),\n address = _matches[1];\n\n if (address) {\n return address.split(\".\").map(_stripLeadingZeros).join(\".\");\n } else {\n return host;\n }\n}\nfunction _normalizeIPv6(host, protocol) {\n var matches = host.match(protocol.IPV6ADDRESS) || [];\n\n var _matches2 = slicedToArray(matches, 3),\n address = _matches2[1],\n zone = _matches2[2];\n\n if (address) {\n var _address$toLowerCase$ = address.toLowerCase().split('::').reverse(),\n _address$toLowerCase$2 = slicedToArray(_address$toLowerCase$, 2),\n last = _address$toLowerCase$2[0],\n first = _address$toLowerCase$2[1];\n\n var firstFields = first ? first.split(\":\").map(_stripLeadingZeros) : [];\n var lastFields = last.split(\":\").map(_stripLeadingZeros);\n var isLastFieldIPv4Address = protocol.IPV4ADDRESS.test(lastFields[lastFields.length - 1]);\n var fieldCount = isLastFieldIPv4Address ? 7 : 8;\n var lastFieldsStart = lastFields.length - fieldCount;\n var fields = Array(fieldCount);\n for (var x = 0; x < fieldCount; ++x) {\n fields[x] = firstFields[x] || lastFields[lastFieldsStart + x] || '';\n }\n if (isLastFieldIPv4Address) {\n fields[fieldCount - 1] = _normalizeIPv4(fields[fieldCount - 1], protocol);\n }\n var allZeroFields = fields.reduce(function (acc, field, index) {\n if (!field || field === \"0\") {\n var lastLongest = acc[acc.length - 1];\n if (lastLongest && lastLongest.index + lastLongest.length === index) {\n lastLongest.length++;\n } else {\n acc.push({ index: index, length: 1 });\n }\n }\n return acc;\n }, []);\n var longestZeroFields = allZeroFields.sort(function (a, b) {\n return b.length - a.length;\n })[0];\n var newHost = void 0;\n if (longestZeroFields && longestZeroFields.length > 1) {\n var newFirst = fields.slice(0, longestZeroFields.index);\n var newLast = fields.slice(longestZeroFields.index + longestZeroFields.length);\n newHost = newFirst.join(\":\") + \"::\" + newLast.join(\":\");\n } else {\n newHost = fields.join(\":\");\n }\n if (zone) {\n newHost += \"%\" + zone;\n }\n return newHost;\n } else {\n return host;\n }\n}\nvar URI_PARSE = /^(?:([^:\\/?#]+):)?(?:\\/\\/((?:([^\\/?#@]*)@)?(\\[[^\\/?#\\]]+\\]|[^\\/?#:]*)(?:\\:(\\d*))?))?([^?#]*)(?:\\?([^#]*))?(?:#((?:.|\\n|\\r)*))?/i;\nvar NO_MATCH_IS_UNDEFINED = \"\".match(/(){0}/)[1] === undefined;\nfunction parse(uriString) {\n var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n var components = {};\n var protocol = options.iri !== false ? IRI_PROTOCOL : URI_PROTOCOL;\n if (options.reference === \"suffix\") uriString = (options.scheme ? options.scheme + \":\" : \"\") + \"//\" + uriString;\n var matches = uriString.match(URI_PARSE);\n if (matches) {\n if (NO_MATCH_IS_UNDEFINED) {\n //store each component\n components.scheme = matches[1];\n components.userinfo = matches[3];\n components.host = matches[4];\n components.port = parseInt(matches[5], 10);\n components.path = matches[6] || \"\";\n components.query = matches[7];\n components.fragment = matches[8];\n //fix port number\n if (isNaN(components.port)) {\n components.port = matches[5];\n }\n } else {\n //IE FIX for improper RegExp matching\n //store each component\n components.scheme = matches[1] || undefined;\n components.userinfo = uriString.indexOf(\"@\") !== -1 ? matches[3] : undefined;\n components.host = uriString.indexOf(\"//\") !== -1 ? matches[4] : undefined;\n components.port = parseInt(matches[5], 10);\n components.path = matches[6] || \"\";\n components.query = uriString.indexOf(\"?\") !== -1 ? matches[7] : undefined;\n components.fragment = uriString.indexOf(\"#\") !== -1 ? matches[8] : undefined;\n //fix port number\n if (isNaN(components.port)) {\n components.port = uriString.match(/\\/\\/(?:.|\\n)*\\:(?:\\/|\\?|\\#|$)/) ? matches[4] : undefined;\n }\n }\n if (components.host) {\n //normalize IP hosts\n components.host = _normalizeIPv6(_normalizeIPv4(components.host, protocol), protocol);\n }\n //determine reference type\n if (components.scheme === undefined && components.userinfo === undefined && components.host === undefined && components.port === undefined && !components.path && components.query === undefined) {\n components.reference = \"same-document\";\n } else if (components.scheme === undefined) {\n components.reference = \"relative\";\n } else if (components.fragment === undefined) {\n components.reference = \"absolute\";\n } else {\n components.reference = \"uri\";\n }\n //check for reference errors\n if (options.reference && options.reference !== \"suffix\" && options.reference !== components.reference) {\n components.error = components.error || \"URI is not a \" + options.reference + \" reference.\";\n }\n //find scheme handler\n var schemeHandler = SCHEMES[(options.scheme || components.scheme || \"\").toLowerCase()];\n //check if scheme can't handle IRIs\n if (!options.unicodeSupport && (!schemeHandler || !schemeHandler.unicodeSupport)) {\n //if host component is a domain name\n if (components.host && (options.domainHost || schemeHandler && schemeHandler.domainHost)) {\n //convert Unicode IDN -> ASCII IDN\n try {\n components.host = punycode.toASCII(components.host.replace(protocol.PCT_ENCODED, pctDecChars).toLowerCase());\n } catch (e) {\n components.error = components.error || \"Host's domain name can not be converted to ASCII via punycode: \" + e;\n }\n }\n //convert IRI -> URI\n _normalizeComponentEncoding(components, URI_PROTOCOL);\n } else {\n //normalize encodings\n _normalizeComponentEncoding(components, protocol);\n }\n //perform scheme specific parsing\n if (schemeHandler && schemeHandler.parse) {\n schemeHandler.parse(components, options);\n }\n } else {\n components.error = components.error || \"URI can not be parsed.\";\n }\n return components;\n}\n\nfunction _recomposeAuthority(components, options) {\n var protocol = options.iri !== false ? IRI_PROTOCOL : URI_PROTOCOL;\n var uriTokens = [];\n if (components.userinfo !== undefined) {\n uriTokens.push(components.userinfo);\n uriTokens.push(\"@\");\n }\n if (components.host !== undefined) {\n //normalize IP hosts, add brackets and escape zone separator for IPv6\n uriTokens.push(_normalizeIPv6(_normalizeIPv4(String(components.host), protocol), protocol).replace(protocol.IPV6ADDRESS, function (_, $1, $2) {\n return \"[\" + $1 + ($2 ? \"%25\" + $2 : \"\") + \"]\";\n }));\n }\n if (typeof components.port === \"number\" || typeof components.port === \"string\") {\n uriTokens.push(\":\");\n uriTokens.push(String(components.port));\n }\n return uriTokens.length ? uriTokens.join(\"\") : undefined;\n}\n\nvar RDS1 = /^\\.\\.?\\//;\nvar RDS2 = /^\\/\\.(\\/|$)/;\nvar RDS3 = /^\\/\\.\\.(\\/|$)/;\nvar RDS5 = /^\\/?(?:.|\\n)*?(?=\\/|$)/;\nfunction removeDotSegments(input) {\n var output = [];\n while (input.length) {\n if (input.match(RDS1)) {\n input = input.replace(RDS1, \"\");\n } else if (input.match(RDS2)) {\n input = input.replace(RDS2, \"/\");\n } else if (input.match(RDS3)) {\n input = input.replace(RDS3, \"/\");\n output.pop();\n } else if (input === \".\" || input === \"..\") {\n input = \"\";\n } else {\n var im = input.match(RDS5);\n if (im) {\n var s = im[0];\n input = input.slice(s.length);\n output.push(s);\n } else {\n throw new Error(\"Unexpected dot segment condition\");\n }\n }\n }\n return output.join(\"\");\n}\n\nfunction serialize(components) {\n var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n var protocol = options.iri ? IRI_PROTOCOL : URI_PROTOCOL;\n var uriTokens = [];\n //find scheme handler\n var schemeHandler = SCHEMES[(options.scheme || components.scheme || \"\").toLowerCase()];\n //perform scheme specific serialization\n if (schemeHandler && schemeHandler.serialize) schemeHandler.serialize(components, options);\n if (components.host) {\n //if host component is an IPv6 address\n if (protocol.IPV6ADDRESS.test(components.host)) {}\n //TODO: normalize IPv6 address as per RFC 5952\n\n //if host component is a domain name\n else if (options.domainHost || schemeHandler && schemeHandler.domainHost) {\n //convert IDN via punycode\n try {\n components.host = !options.iri ? punycode.toASCII(components.host.replace(protocol.PCT_ENCODED, pctDecChars).toLowerCase()) : punycode.toUnicode(components.host);\n } catch (e) {\n components.error = components.error || \"Host's domain name can not be converted to \" + (!options.iri ? \"ASCII\" : \"Unicode\") + \" via punycode: \" + e;\n }\n }\n }\n //normalize encoding\n _normalizeComponentEncoding(components, protocol);\n if (options.reference !== \"suffix\" && components.scheme) {\n uriTokens.push(components.scheme);\n uriTokens.push(\":\");\n }\n var authority = _recomposeAuthority(components, options);\n if (authority !== undefined) {\n if (options.reference !== \"suffix\") {\n uriTokens.push(\"//\");\n }\n uriTokens.push(authority);\n if (components.path && components.path.charAt(0) !== \"/\") {\n uriTokens.push(\"/\");\n }\n }\n if (components.path !== undefined) {\n var s = components.path;\n if (!options.absolutePath && (!schemeHandler || !schemeHandler.absolutePath)) {\n s = removeDotSegments(s);\n }\n if (authority === undefined) {\n s = s.replace(/^\\/\\//, \"/%2F\"); //don't allow the path to start with \"//\"\n }\n uriTokens.push(s);\n }\n if (components.query !== undefined) {\n uriTokens.push(\"?\");\n uriTokens.push(components.query);\n }\n if (components.fragment !== undefined) {\n uriTokens.push(\"#\");\n uriTokens.push(components.fragment);\n }\n return uriTokens.join(\"\"); //merge tokens into a string\n}\n\nfunction resolveComponents(base, relative) {\n var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};\n var skipNormalization = arguments[3];\n\n var target = {};\n if (!skipNormalization) {\n base = parse(serialize(base, options), options); //normalize base components\n relative = parse(serialize(relative, options), options); //normalize relative components\n }\n options = options || {};\n if (!options.tolerant && relative.scheme) {\n target.scheme = relative.scheme;\n //target.authority = relative.authority;\n target.userinfo = relative.userinfo;\n target.host = relative.host;\n target.port = relative.port;\n target.path = removeDotSegments(relative.path || \"\");\n target.query = relative.query;\n } else {\n if (relative.userinfo !== undefined || relative.host !== undefined || relative.port !== undefined) {\n //target.authority = relative.authority;\n target.userinfo = relative.userinfo;\n target.host = relative.host;\n target.port = relative.port;\n target.path = removeDotSegments(relative.path || \"\");\n target.query = relative.query;\n } else {\n if (!relative.path) {\n target.path = base.path;\n if (relative.query !== undefined) {\n target.query = relative.query;\n } else {\n target.query = base.query;\n }\n } else {\n if (relative.path.charAt(0) === \"/\") {\n target.path = removeDotSegments(relative.path);\n } else {\n if ((base.userinfo !== undefined || base.host !== undefined || base.port !== undefined) && !base.path) {\n target.path = \"/\" + relative.path;\n } else if (!base.path) {\n target.path = relative.path;\n } else {\n target.path = base.path.slice(0, base.path.lastIndexOf(\"/\") + 1) + relative.path;\n }\n target.path = removeDotSegments(target.path);\n }\n target.query = relative.query;\n }\n //target.authority = base.authority;\n target.userinfo = base.userinfo;\n target.host = base.host;\n target.port = base.port;\n }\n target.scheme = base.scheme;\n }\n target.fragment = relative.fragment;\n return target;\n}\n\nfunction resolve(baseURI, relativeURI, options) {\n var schemelessOptions = assign({ scheme: 'null' }, options);\n return serialize(resolveComponents(parse(baseURI, schemelessOptions), parse(relativeURI, schemelessOptions), schemelessOptions, true), schemelessOptions);\n}\n\nfunction normalize(uri, options) {\n if (typeof uri === \"string\") {\n uri = serialize(parse(uri, options), options);\n } else if (typeOf(uri) === \"object\") {\n uri = parse(serialize(uri, options), options);\n }\n return uri;\n}\n\nfunction equal(uriA, uriB, options) {\n if (typeof uriA === \"string\") {\n uriA = serialize(parse(uriA, options), options);\n } else if (typeOf(uriA) === \"object\") {\n uriA = serialize(uriA, options);\n }\n if (typeof uriB === \"string\") {\n uriB = serialize(parse(uriB, options), options);\n } else if (typeOf(uriB) === \"object\") {\n uriB = serialize(uriB, options);\n }\n return uriA === uriB;\n}\n\nfunction escapeComponent(str, options) {\n return str && str.toString().replace(!options || !options.iri ? URI_PROTOCOL.ESCAPE : IRI_PROTOCOL.ESCAPE, pctEncChar);\n}\n\nfunction unescapeComponent(str, options) {\n return str && str.toString().replace(!options || !options.iri ? URI_PROTOCOL.PCT_ENCODED : IRI_PROTOCOL.PCT_ENCODED, pctDecChars);\n}\n\nvar handler = {\n scheme: \"http\",\n domainHost: true,\n parse: function parse(components, options) {\n //report missing host\n if (!components.host) {\n components.error = components.error || \"HTTP URIs must have a host.\";\n }\n return components;\n },\n serialize: function serialize(components, options) {\n var secure = String(components.scheme).toLowerCase() === \"https\";\n //normalize the default port\n if (components.port === (secure ? 443 : 80) || components.port === \"\") {\n components.port = undefined;\n }\n //normalize the empty path\n if (!components.path) {\n components.path = \"/\";\n }\n //NOTE: We do not parse query strings for HTTP URIs\n //as WWW Form Url Encoded query strings are part of the HTML4+ spec,\n //and not the HTTP spec.\n return components;\n }\n};\n\nvar handler$1 = {\n scheme: \"https\",\n domainHost: handler.domainHost,\n parse: handler.parse,\n serialize: handler.serialize\n};\n\nfunction isSecure(wsComponents) {\n return typeof wsComponents.secure === 'boolean' ? wsComponents.secure : String(wsComponents.scheme).toLowerCase() === \"wss\";\n}\n//RFC 6455\nvar handler$2 = {\n scheme: \"ws\",\n domainHost: true,\n parse: function parse(components, options) {\n var wsComponents = components;\n //indicate if the secure flag is set\n wsComponents.secure = isSecure(wsComponents);\n //construct resouce name\n wsComponents.resourceName = (wsComponents.path || '/') + (wsComponents.query ? '?' + wsComponents.query : '');\n wsComponents.path = undefined;\n wsComponents.query = undefined;\n return wsComponents;\n },\n serialize: function serialize(wsComponents, options) {\n //normalize the default port\n if (wsComponents.port === (isSecure(wsComponents) ? 443 : 80) || wsComponents.port === \"\") {\n wsComponents.port = undefined;\n }\n //ensure scheme matches secure flag\n if (typeof wsComponents.secure === 'boolean') {\n wsComponents.scheme = wsComponents.secure ? 'wss' : 'ws';\n wsComponents.secure = undefined;\n }\n //reconstruct path from resource name\n if (wsComponents.resourceName) {\n var _wsComponents$resourc = wsComponents.resourceName.split('?'),\n _wsComponents$resourc2 = slicedToArray(_wsComponents$resourc, 2),\n path = _wsComponents$resourc2[0],\n query = _wsComponents$resourc2[1];\n\n wsComponents.path = path && path !== '/' ? path : undefined;\n wsComponents.query = query;\n wsComponents.resourceName = undefined;\n }\n //forbid fragment component\n wsComponents.fragment = undefined;\n return wsComponents;\n }\n};\n\nvar handler$3 = {\n scheme: \"wss\",\n domainHost: handler$2.domainHost,\n parse: handler$2.parse,\n serialize: handler$2.serialize\n};\n\nvar O = {};\nvar isIRI = true;\n//RFC 3986\nvar UNRESERVED$$ = \"[A-Za-z0-9\\\\-\\\\.\\\\_\\\\~\" + (isIRI ? \"\\\\xA0-\\\\u200D\\\\u2010-\\\\u2029\\\\u202F-\\\\uD7FF\\\\uF900-\\\\uFDCF\\\\uFDF0-\\\\uFFEF\" : \"\") + \"]\";\nvar HEXDIG$$ = \"[0-9A-Fa-f]\"; //case-insensitive\nvar PCT_ENCODED$ = subexp(subexp(\"%[EFef]\" + HEXDIG$$ + \"%\" + HEXDIG$$ + HEXDIG$$ + \"%\" + HEXDIG$$ + HEXDIG$$) + \"|\" + subexp(\"%[89A-Fa-f]\" + HEXDIG$$ + \"%\" + HEXDIG$$ + HEXDIG$$) + \"|\" + subexp(\"%\" + HEXDIG$$ + HEXDIG$$)); //expanded\n//RFC 5322, except these symbols as per RFC 6068: @ : / ? # [ ] & ; =\n//const ATEXT$$ = \"[A-Za-z0-9\\\\!\\\\#\\\\$\\\\%\\\\&\\\\'\\\\*\\\\+\\\\-\\\\/\\\\=\\\\?\\\\^\\\\_\\\\`\\\\{\\\\|\\\\}\\\\~]\";\n//const WSP$$ = \"[\\\\x20\\\\x09]\";\n//const OBS_QTEXT$$ = \"[\\\\x01-\\\\x08\\\\x0B\\\\x0C\\\\x0E-\\\\x1F\\\\x7F]\"; //(%d1-8 / %d11-12 / %d14-31 / %d127)\n//const QTEXT$$ = merge(\"[\\\\x21\\\\x23-\\\\x5B\\\\x5D-\\\\x7E]\", OBS_QTEXT$$); //%d33 / %d35-91 / %d93-126 / obs-qtext\n//const VCHAR$$ = \"[\\\\x21-\\\\x7E]\";\n//const WSP$$ = \"[\\\\x20\\\\x09]\";\n//const OBS_QP$ = subexp(\"\\\\\\\\\" + merge(\"[\\\\x00\\\\x0D\\\\x0A]\", OBS_QTEXT$$)); //%d0 / CR / LF / obs-qtext\n//const FWS$ = subexp(subexp(WSP$$ + \"*\" + \"\\\\x0D\\\\x0A\") + \"?\" + WSP$$ + \"+\");\n//const QUOTED_PAIR$ = subexp(subexp(\"\\\\\\\\\" + subexp(VCHAR$$ + \"|\" + WSP$$)) + \"|\" + OBS_QP$);\n//const QUOTED_STRING$ = subexp('\\\\\"' + subexp(FWS$ + \"?\" + QCONTENT$) + \"*\" + FWS$ + \"?\" + '\\\\\"');\nvar ATEXT$$ = \"[A-Za-z0-9\\\\!\\\\$\\\\%\\\\'\\\\*\\\\+\\\\-\\\\^\\\\_\\\\`\\\\{\\\\|\\\\}\\\\~]\";\nvar QTEXT$$ = \"[\\\\!\\\\$\\\\%\\\\'\\\\(\\\\)\\\\*\\\\+\\\\,\\\\-\\\\.0-9\\\\<\\\\>A-Z\\\\x5E-\\\\x7E]\";\nvar VCHAR$$ = merge(QTEXT$$, \"[\\\\\\\"\\\\\\\\]\");\nvar SOME_DELIMS$$ = \"[\\\\!\\\\$\\\\'\\\\(\\\\)\\\\*\\\\+\\\\,\\\\;\\\\:\\\\@]\";\nvar UNRESERVED = new RegExp(UNRESERVED$$, \"g\");\nvar PCT_ENCODED = new RegExp(PCT_ENCODED$, \"g\");\nvar NOT_LOCAL_PART = new RegExp(merge(\"[^]\", ATEXT$$, \"[\\\\.]\", '[\\\\\"]', VCHAR$$), \"g\");\nvar NOT_HFNAME = new RegExp(merge(\"[^]\", UNRESERVED$$, SOME_DELIMS$$), \"g\");\nvar NOT_HFVALUE = NOT_HFNAME;\nfunction decodeUnreserved(str) {\n var decStr = pctDecChars(str);\n return !decStr.match(UNRESERVED) ? str : decStr;\n}\nvar handler$4 = {\n scheme: \"mailto\",\n parse: function parse$$1(components, options) {\n var mailtoComponents = components;\n var to = mailtoComponents.to = mailtoComponents.path ? mailtoComponents.path.split(\",\") : [];\n mailtoComponents.path = undefined;\n if (mailtoComponents.query) {\n var unknownHeaders = false;\n var headers = {};\n var hfields = mailtoComponents.query.split(\"&\");\n for (var x = 0, xl = hfields.length; x < xl; ++x) {\n var hfield = hfields[x].split(\"=\");\n switch (hfield[0]) {\n case \"to\":\n var toAddrs = hfield[1].split(\",\");\n for (var _x = 0, _xl = toAddrs.length; _x < _xl; ++_x) {\n to.push(toAddrs[_x]);\n }\n break;\n case \"subject\":\n mailtoComponents.subject = unescapeComponent(hfield[1], options);\n break;\n case \"body\":\n mailtoComponents.body = unescapeComponent(hfield[1], options);\n break;\n default:\n unknownHeaders = true;\n headers[unescapeComponent(hfield[0], options)] = unescapeComponent(hfield[1], options);\n break;\n }\n }\n if (unknownHeaders) mailtoComponents.headers = headers;\n }\n mailtoComponents.query = undefined;\n for (var _x2 = 0, _xl2 = to.length; _x2 < _xl2; ++_x2) {\n var addr = to[_x2].split(\"@\");\n addr[0] = unescapeComponent(addr[0]);\n if (!options.unicodeSupport) {\n //convert Unicode IDN -> ASCII IDN\n try {\n addr[1] = punycode.toASCII(unescapeComponent(addr[1], options).toLowerCase());\n } catch (e) {\n mailtoComponents.error = mailtoComponents.error || \"Email address's domain name can not be converted to ASCII via punycode: \" + e;\n }\n } else {\n addr[1] = unescapeComponent(addr[1], options).toLowerCase();\n }\n to[_x2] = addr.join(\"@\");\n }\n return mailtoComponents;\n },\n serialize: function serialize$$1(mailtoComponents, options) {\n var components = mailtoComponents;\n var to = toArray(mailtoComponents.to);\n if (to) {\n for (var x = 0, xl = to.length; x < xl; ++x) {\n var toAddr = String(to[x]);\n var atIdx = toAddr.lastIndexOf(\"@\");\n var localPart = toAddr.slice(0, atIdx).replace(PCT_ENCODED, decodeUnreserved).replace(PCT_ENCODED, toUpperCase).replace(NOT_LOCAL_PART, pctEncChar);\n var domain = toAddr.slice(atIdx + 1);\n //convert IDN via punycode\n try {\n domain = !options.iri ? punycode.toASCII(unescapeComponent(domain, options).toLowerCase()) : punycode.toUnicode(domain);\n } catch (e) {\n components.error = components.error || \"Email address's domain name can not be converted to \" + (!options.iri ? \"ASCII\" : \"Unicode\") + \" via punycode: \" + e;\n }\n to[x] = localPart + \"@\" + domain;\n }\n components.path = to.join(\",\");\n }\n var headers = mailtoComponents.headers = mailtoComponents.headers || {};\n if (mailtoComponents.subject) headers[\"subject\"] = mailtoComponents.subject;\n if (mailtoComponents.body) headers[\"body\"] = mailtoComponents.body;\n var fields = [];\n for (var name in headers) {\n if (headers[name] !== O[name]) {\n fields.push(name.replace(PCT_ENCODED, decodeUnreserved).replace(PCT_ENCODED, toUpperCase).replace(NOT_HFNAME, pctEncChar) + \"=\" + headers[name].replace(PCT_ENCODED, decodeUnreserved).replace(PCT_ENCODED, toUpperCase).replace(NOT_HFVALUE, pctEncChar));\n }\n }\n if (fields.length) {\n components.query = fields.join(\"&\");\n }\n return components;\n }\n};\n\nvar URN_PARSE = /^([^\\:]+)\\:(.*)/;\n//RFC 2141\nvar handler$5 = {\n scheme: \"urn\",\n parse: function parse$$1(components, options) {\n var matches = components.path && components.path.match(URN_PARSE);\n var urnComponents = components;\n if (matches) {\n var scheme = options.scheme || urnComponents.scheme || \"urn\";\n var nid = matches[1].toLowerCase();\n var nss = matches[2];\n var urnScheme = scheme + \":\" + (options.nid || nid);\n var schemeHandler = SCHEMES[urnScheme];\n urnComponents.nid = nid;\n urnComponents.nss = nss;\n urnComponents.path = undefined;\n if (schemeHandler) {\n urnComponents = schemeHandler.parse(urnComponents, options);\n }\n } else {\n urnComponents.error = urnComponents.error || \"URN can not be parsed.\";\n }\n return urnComponents;\n },\n serialize: function serialize$$1(urnComponents, options) {\n var scheme = options.scheme || urnComponents.scheme || \"urn\";\n var nid = urnComponents.nid;\n var urnScheme = scheme + \":\" + (options.nid || nid);\n var schemeHandler = SCHEMES[urnScheme];\n if (schemeHandler) {\n urnComponents = schemeHandler.serialize(urnComponents, options);\n }\n var uriComponents = urnComponents;\n var nss = urnComponents.nss;\n uriComponents.path = (nid || options.nid) + \":\" + nss;\n return uriComponents;\n }\n};\n\nvar UUID = /^[0-9A-Fa-f]{8}(?:\\-[0-9A-Fa-f]{4}){3}\\-[0-9A-Fa-f]{12}$/;\n//RFC 4122\nvar handler$6 = {\n scheme: \"urn:uuid\",\n parse: function parse(urnComponents, options) {\n var uuidComponents = urnComponents;\n uuidComponents.uuid = uuidComponents.nss;\n uuidComponents.nss = undefined;\n if (!options.tolerant && (!uuidComponents.uuid || !uuidComponents.uuid.match(UUID))) {\n uuidComponents.error = uuidComponents.error || \"UUID is not valid.\";\n }\n return uuidComponents;\n },\n serialize: function serialize(uuidComponents, options) {\n var urnComponents = uuidComponents;\n //normalize UUID\n urnComponents.nss = (uuidComponents.uuid || \"\").toLowerCase();\n return urnComponents;\n }\n};\n\nSCHEMES[handler.scheme] = handler;\nSCHEMES[handler$1.scheme] = handler$1;\nSCHEMES[handler$2.scheme] = handler$2;\nSCHEMES[handler$3.scheme] = handler$3;\nSCHEMES[handler$4.scheme] = handler$4;\nSCHEMES[handler$5.scheme] = handler$5;\nSCHEMES[handler$6.scheme] = handler$6;\n\nexports.SCHEMES = SCHEMES;\nexports.pctEncChar = pctEncChar;\nexports.pctDecChars = pctDecChars;\nexports.parse = parse;\nexports.removeDotSegments = removeDotSegments;\nexports.serialize = serialize;\nexports.resolveComponents = resolveComponents;\nexports.resolve = resolve;\nexports.normalize = normalize;\nexports.equal = equal;\nexports.escapeComponent = escapeComponent;\nexports.unescapeComponent = unescapeComponent;\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\n})));\n//# sourceMappingURL=uri.all.js.map\n","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nObject.defineProperty(exports, \"v1\", {\n enumerable: true,\n get: function () {\n return _v.default;\n }\n});\nObject.defineProperty(exports, \"v3\", {\n enumerable: true,\n get: function () {\n return _v2.default;\n }\n});\nObject.defineProperty(exports, \"v4\", {\n enumerable: true,\n get: function () {\n return _v3.default;\n }\n});\nObject.defineProperty(exports, \"v5\", {\n enumerable: true,\n get: function () {\n return _v4.default;\n }\n});\nObject.defineProperty(exports, \"NIL\", {\n enumerable: true,\n get: function () {\n return _nil.default;\n }\n});\nObject.defineProperty(exports, \"version\", {\n enumerable: true,\n get: function () {\n return _version.default;\n }\n});\nObject.defineProperty(exports, \"validate\", {\n enumerable: true,\n get: function () {\n return _validate.default;\n }\n});\nObject.defineProperty(exports, \"stringify\", {\n enumerable: true,\n get: function () {\n return _stringify.default;\n }\n});\nObject.defineProperty(exports, \"parse\", {\n enumerable: true,\n get: function () {\n return _parse.default;\n }\n});\n\nvar _v = _interopRequireDefault(require(\"./v1.js\"));\n\nvar _v2 = _interopRequireDefault(require(\"./v3.js\"));\n\nvar _v3 = _interopRequireDefault(require(\"./v4.js\"));\n\nvar _v4 = _interopRequireDefault(require(\"./v5.js\"));\n\nvar _nil = _interopRequireDefault(require(\"./nil.js\"));\n\nvar _version = _interopRequireDefault(require(\"./version.js\"));\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nvar _parse = _interopRequireDefault(require(\"./parse.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction md5(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return _crypto.default.createHash('md5').update(bytes).digest();\n}\n\nvar _default = md5;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nvar _default = '00000000-0000-0000-0000-000000000000';\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction parse(uuid) {\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n let v;\n const arr = new Uint8Array(16); // Parse ########-....-....-....-............\n\n arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24;\n arr[1] = v >>> 16 & 0xff;\n arr[2] = v >>> 8 & 0xff;\n arr[3] = v & 0xff; // Parse ........-####-....-....-............\n\n arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8;\n arr[5] = v & 0xff; // Parse ........-....-####-....-............\n\n arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8;\n arr[7] = v & 0xff; // Parse ........-....-....-####-............\n\n arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8;\n arr[9] = v & 0xff; // Parse ........-....-....-....-############\n // (Use \"/\" to avoid 32-bit truncation when bit-shifting high-order bytes)\n\n arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff;\n arr[11] = v / 0x100000000 & 0xff;\n arr[12] = v >>> 24 & 0xff;\n arr[13] = v >>> 16 & 0xff;\n arr[14] = v >>> 8 & 0xff;\n arr[15] = v & 0xff;\n return arr;\n}\n\nvar _default = parse;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nvar _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = rng;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate\n\nlet poolPtr = rnds8Pool.length;\n\nfunction rng() {\n if (poolPtr > rnds8Pool.length - 16) {\n _crypto.default.randomFillSync(rnds8Pool);\n\n poolPtr = 0;\n }\n\n return rnds8Pool.slice(poolPtr, poolPtr += 16);\n}","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction sha1(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return _crypto.default.createHash('sha1').update(bytes).digest();\n}\n\nvar _default = sha1;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\n/**\n * Convert array of 16 byte values to UUID string format of the form:\n * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX\n */\nconst byteToHex = [];\n\nfor (let i = 0; i < 256; ++i) {\n byteToHex.push((i + 0x100).toString(16).substr(1));\n}\n\nfunction stringify(arr, offset = 0) {\n // Note: Be careful editing this code! It's been tuned for performance\n // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434\n const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one\n // of the following:\n // - One or more input array values don't map to a hex octet (leading to\n // \"undefined\" in the uuid)\n // - Invalid input values for the RFC `version` or `variant` fields\n\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Stringified UUID is invalid');\n }\n\n return uuid;\n}\n\nvar _default = stringify;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _rng = _interopRequireDefault(require(\"./rng.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\n// **`v1()` - Generate time-based UUID**\n//\n// Inspired by https://github.com/LiosK/UUID.js\n// and http://docs.python.org/library/uuid.html\nlet _nodeId;\n\nlet _clockseq; // Previous uuid creation time\n\n\nlet _lastMSecs = 0;\nlet _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details\n\nfunction v1(options, buf, offset) {\n let i = buf && offset || 0;\n const b = buf || new Array(16);\n options = options || {};\n let node = options.node || _nodeId;\n let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not\n // specified. We do this lazily to minimize issues related to insufficient\n // system entropy. See #189\n\n if (node == null || clockseq == null) {\n const seedBytes = options.random || (options.rng || _rng.default)();\n\n if (node == null) {\n // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)\n node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]];\n }\n\n if (clockseq == null) {\n // Per 4.2.2, randomize (14 bit) clockseq\n clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff;\n }\n } // UUID timestamps are 100 nano-second units since the Gregorian epoch,\n // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so\n // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'\n // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.\n\n\n let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock\n // cycle to simulate higher resolution clock\n\n let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs)\n\n const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression\n\n if (dt < 0 && options.clockseq === undefined) {\n clockseq = clockseq + 1 & 0x3fff;\n } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new\n // time interval\n\n\n if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) {\n nsecs = 0;\n } // Per 4.2.1.2 Throw error if too many uuids are requested\n\n\n if (nsecs >= 10000) {\n throw new Error(\"uuid.v1(): Can't create more than 10M uuids/sec\");\n }\n\n _lastMSecs = msecs;\n _lastNSecs = nsecs;\n _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch\n\n msecs += 12219292800000; // `time_low`\n\n const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;\n b[i++] = tl >>> 24 & 0xff;\n b[i++] = tl >>> 16 & 0xff;\n b[i++] = tl >>> 8 & 0xff;\n b[i++] = tl & 0xff; // `time_mid`\n\n const tmh = msecs / 0x100000000 * 10000 & 0xfffffff;\n b[i++] = tmh >>> 8 & 0xff;\n b[i++] = tmh & 0xff; // `time_high_and_version`\n\n b[i++] = tmh >>> 24 & 0xf | 0x10; // include version\n\n b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)\n\n b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low`\n\n b[i++] = clockseq & 0xff; // `node`\n\n for (let n = 0; n < 6; ++n) {\n b[i + n] = node[n];\n }\n\n return buf || (0, _stringify.default)(b);\n}\n\nvar _default = v1;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _v = _interopRequireDefault(require(\"./v35.js\"));\n\nvar _md = _interopRequireDefault(require(\"./md5.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst v3 = (0, _v.default)('v3', 0x30, _md.default);\nvar _default = v3;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = _default;\nexports.URL = exports.DNS = void 0;\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nvar _parse = _interopRequireDefault(require(\"./parse.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction stringToBytes(str) {\n str = unescape(encodeURIComponent(str)); // UTF8 escape\n\n const bytes = [];\n\n for (let i = 0; i < str.length; ++i) {\n bytes.push(str.charCodeAt(i));\n }\n\n return bytes;\n}\n\nconst DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';\nexports.DNS = DNS;\nconst URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8';\nexports.URL = URL;\n\nfunction _default(name, version, hashfunc) {\n function generateUUID(value, namespace, buf, offset) {\n if (typeof value === 'string') {\n value = stringToBytes(value);\n }\n\n if (typeof namespace === 'string') {\n namespace = (0, _parse.default)(namespace);\n }\n\n if (namespace.length !== 16) {\n throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)');\n } // Compute hash of namespace and value, Per 4.3\n // Future: Use spread syntax when supported on all platforms, e.g. `bytes =\n // hashfunc([...namespace, ... value])`\n\n\n let bytes = new Uint8Array(16 + value.length);\n bytes.set(namespace);\n bytes.set(value, namespace.length);\n bytes = hashfunc(bytes);\n bytes[6] = bytes[6] & 0x0f | version;\n bytes[8] = bytes[8] & 0x3f | 0x80;\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = bytes[i];\n }\n\n return buf;\n }\n\n return (0, _stringify.default)(bytes);\n } // Function#name is not settable on some platforms (#270)\n\n\n try {\n generateUUID.name = name; // eslint-disable-next-line no-empty\n } catch (err) {} // For CommonJS default export support\n\n\n generateUUID.DNS = DNS;\n generateUUID.URL = URL;\n return generateUUID;\n}","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _rng = _interopRequireDefault(require(\"./rng.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction v4(options, buf, offset) {\n options = options || {};\n\n const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`\n\n\n rnds[6] = rnds[6] & 0x0f | 0x40;\n rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = rnds[i];\n }\n\n return buf;\n }\n\n return (0, _stringify.default)(rnds);\n}\n\nvar _default = v4;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _v = _interopRequireDefault(require(\"./v35.js\"));\n\nvar _sha = _interopRequireDefault(require(\"./sha1.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst v5 = (0, _v.default)('v5', 0x50, _sha.default);\nvar _default = v5;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _regex = _interopRequireDefault(require(\"./regex.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction validate(uuid) {\n return typeof uuid === 'string' && _regex.default.test(uuid);\n}\n\nvar _default = validate;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction version(uuid) {\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n return parseInt(uuid.substr(14, 1), 16);\n}\n\nvar _default = version;\nexports.default = _default;","/**\n * web-streams-polyfill v3.2.1\n */\n(function (global, factory) {\n typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :\n typeof define === 'function' && define.amd ? define(['exports'], factory) :\n (global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.WebStreamsPolyfill = {}));\n}(this, (function (exports) { 'use strict';\n\n /// \n const SymbolPolyfill = typeof Symbol === 'function' && typeof Symbol.iterator === 'symbol' ?\n Symbol :\n description => `Symbol(${description})`;\n\n /// \n function noop() {\n return undefined;\n }\n function getGlobals() {\n if (typeof self !== 'undefined') {\n return self;\n }\n else if (typeof window !== 'undefined') {\n return window;\n }\n else if (typeof global !== 'undefined') {\n return global;\n }\n return undefined;\n }\n const globals = getGlobals();\n\n function typeIsObject(x) {\n return (typeof x === 'object' && x !== null) || typeof x === 'function';\n }\n const rethrowAssertionErrorRejection = noop;\n\n const originalPromise = Promise;\n const originalPromiseThen = Promise.prototype.then;\n const originalPromiseResolve = Promise.resolve.bind(originalPromise);\n const originalPromiseReject = Promise.reject.bind(originalPromise);\n function newPromise(executor) {\n return new originalPromise(executor);\n }\n function promiseResolvedWith(value) {\n return originalPromiseResolve(value);\n }\n function promiseRejectedWith(reason) {\n return originalPromiseReject(reason);\n }\n function PerformPromiseThen(promise, onFulfilled, onRejected) {\n // There doesn't appear to be any way to correctly emulate the behaviour from JavaScript, so this is just an\n // approximation.\n return originalPromiseThen.call(promise, onFulfilled, onRejected);\n }\n function uponPromise(promise, onFulfilled, onRejected) {\n PerformPromiseThen(PerformPromiseThen(promise, onFulfilled, onRejected), undefined, rethrowAssertionErrorRejection);\n }\n function uponFulfillment(promise, onFulfilled) {\n uponPromise(promise, onFulfilled);\n }\n function uponRejection(promise, onRejected) {\n uponPromise(promise, undefined, onRejected);\n }\n function transformPromiseWith(promise, fulfillmentHandler, rejectionHandler) {\n return PerformPromiseThen(promise, fulfillmentHandler, rejectionHandler);\n }\n function setPromiseIsHandledToTrue(promise) {\n PerformPromiseThen(promise, undefined, rethrowAssertionErrorRejection);\n }\n const queueMicrotask = (() => {\n const globalQueueMicrotask = globals && globals.queueMicrotask;\n if (typeof globalQueueMicrotask === 'function') {\n return globalQueueMicrotask;\n }\n const resolvedPromise = promiseResolvedWith(undefined);\n return (fn) => PerformPromiseThen(resolvedPromise, fn);\n })();\n function reflectCall(F, V, args) {\n if (typeof F !== 'function') {\n throw new TypeError('Argument is not a function');\n }\n return Function.prototype.apply.call(F, V, args);\n }\n function promiseCall(F, V, args) {\n try {\n return promiseResolvedWith(reflectCall(F, V, args));\n }\n catch (value) {\n return promiseRejectedWith(value);\n }\n }\n\n // Original from Chromium\n // https://chromium.googlesource.com/chromium/src/+/0aee4434a4dba42a42abaea9bfbc0cd196a63bc1/third_party/blink/renderer/core/streams/SimpleQueue.js\n const QUEUE_MAX_ARRAY_SIZE = 16384;\n /**\n * Simple queue structure.\n *\n * Avoids scalability issues with using a packed array directly by using\n * multiple arrays in a linked list and keeping the array size bounded.\n */\n class SimpleQueue {\n constructor() {\n this._cursor = 0;\n this._size = 0;\n // _front and _back are always defined.\n this._front = {\n _elements: [],\n _next: undefined\n };\n this._back = this._front;\n // The cursor is used to avoid calling Array.shift().\n // It contains the index of the front element of the array inside the\n // front-most node. It is always in the range [0, QUEUE_MAX_ARRAY_SIZE).\n this._cursor = 0;\n // When there is only one node, size === elements.length - cursor.\n this._size = 0;\n }\n get length() {\n return this._size;\n }\n // For exception safety, this method is structured in order:\n // 1. Read state\n // 2. Calculate required state mutations\n // 3. Perform state mutations\n push(element) {\n const oldBack = this._back;\n let newBack = oldBack;\n if (oldBack._elements.length === QUEUE_MAX_ARRAY_SIZE - 1) {\n newBack = {\n _elements: [],\n _next: undefined\n };\n }\n // push() is the mutation most likely to throw an exception, so it\n // goes first.\n oldBack._elements.push(element);\n if (newBack !== oldBack) {\n this._back = newBack;\n oldBack._next = newBack;\n }\n ++this._size;\n }\n // Like push(), shift() follows the read -> calculate -> mutate pattern for\n // exception safety.\n shift() { // must not be called on an empty queue\n const oldFront = this._front;\n let newFront = oldFront;\n const oldCursor = this._cursor;\n let newCursor = oldCursor + 1;\n const elements = oldFront._elements;\n const element = elements[oldCursor];\n if (newCursor === QUEUE_MAX_ARRAY_SIZE) {\n newFront = oldFront._next;\n newCursor = 0;\n }\n // No mutations before this point.\n --this._size;\n this._cursor = newCursor;\n if (oldFront !== newFront) {\n this._front = newFront;\n }\n // Permit shifted element to be garbage collected.\n elements[oldCursor] = undefined;\n return element;\n }\n // The tricky thing about forEach() is that it can be called\n // re-entrantly. The queue may be mutated inside the callback. It is easy to\n // see that push() within the callback has no negative effects since the end\n // of the queue is checked for on every iteration. If shift() is called\n // repeatedly within the callback then the next iteration may return an\n // element that has been removed. In this case the callback will be called\n // with undefined values until we either \"catch up\" with elements that still\n // exist or reach the back of the queue.\n forEach(callback) {\n let i = this._cursor;\n let node = this._front;\n let elements = node._elements;\n while (i !== elements.length || node._next !== undefined) {\n if (i === elements.length) {\n node = node._next;\n elements = node._elements;\n i = 0;\n if (elements.length === 0) {\n break;\n }\n }\n callback(elements[i]);\n ++i;\n }\n }\n // Return the element that would be returned if shift() was called now,\n // without modifying the queue.\n peek() { // must not be called on an empty queue\n const front = this._front;\n const cursor = this._cursor;\n return front._elements[cursor];\n }\n }\n\n function ReadableStreamReaderGenericInitialize(reader, stream) {\n reader._ownerReadableStream = stream;\n stream._reader = reader;\n if (stream._state === 'readable') {\n defaultReaderClosedPromiseInitialize(reader);\n }\n else if (stream._state === 'closed') {\n defaultReaderClosedPromiseInitializeAsResolved(reader);\n }\n else {\n defaultReaderClosedPromiseInitializeAsRejected(reader, stream._storedError);\n }\n }\n // A client of ReadableStreamDefaultReader and ReadableStreamBYOBReader may use these functions directly to bypass state\n // check.\n function ReadableStreamReaderGenericCancel(reader, reason) {\n const stream = reader._ownerReadableStream;\n return ReadableStreamCancel(stream, reason);\n }\n function ReadableStreamReaderGenericRelease(reader) {\n if (reader._ownerReadableStream._state === 'readable') {\n defaultReaderClosedPromiseReject(reader, new TypeError(`Reader was released and can no longer be used to monitor the stream's closedness`));\n }\n else {\n defaultReaderClosedPromiseResetToRejected(reader, new TypeError(`Reader was released and can no longer be used to monitor the stream's closedness`));\n }\n reader._ownerReadableStream._reader = undefined;\n reader._ownerReadableStream = undefined;\n }\n // Helper functions for the readers.\n function readerLockException(name) {\n return new TypeError('Cannot ' + name + ' a stream using a released reader');\n }\n // Helper functions for the ReadableStreamDefaultReader.\n function defaultReaderClosedPromiseInitialize(reader) {\n reader._closedPromise = newPromise((resolve, reject) => {\n reader._closedPromise_resolve = resolve;\n reader._closedPromise_reject = reject;\n });\n }\n function defaultReaderClosedPromiseInitializeAsRejected(reader, reason) {\n defaultReaderClosedPromiseInitialize(reader);\n defaultReaderClosedPromiseReject(reader, reason);\n }\n function defaultReaderClosedPromiseInitializeAsResolved(reader) {\n defaultReaderClosedPromiseInitialize(reader);\n defaultReaderClosedPromiseResolve(reader);\n }\n function defaultReaderClosedPromiseReject(reader, reason) {\n if (reader._closedPromise_reject === undefined) {\n return;\n }\n setPromiseIsHandledToTrue(reader._closedPromise);\n reader._closedPromise_reject(reason);\n reader._closedPromise_resolve = undefined;\n reader._closedPromise_reject = undefined;\n }\n function defaultReaderClosedPromiseResetToRejected(reader, reason) {\n defaultReaderClosedPromiseInitializeAsRejected(reader, reason);\n }\n function defaultReaderClosedPromiseResolve(reader) {\n if (reader._closedPromise_resolve === undefined) {\n return;\n }\n reader._closedPromise_resolve(undefined);\n reader._closedPromise_resolve = undefined;\n reader._closedPromise_reject = undefined;\n }\n\n const AbortSteps = SymbolPolyfill('[[AbortSteps]]');\n const ErrorSteps = SymbolPolyfill('[[ErrorSteps]]');\n const CancelSteps = SymbolPolyfill('[[CancelSteps]]');\n const PullSteps = SymbolPolyfill('[[PullSteps]]');\n\n /// \n // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isFinite#Polyfill\n const NumberIsFinite = Number.isFinite || function (x) {\n return typeof x === 'number' && isFinite(x);\n };\n\n /// \n // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/trunc#Polyfill\n const MathTrunc = Math.trunc || function (v) {\n return v < 0 ? Math.ceil(v) : Math.floor(v);\n };\n\n // https://heycam.github.io/webidl/#idl-dictionaries\n function isDictionary(x) {\n return typeof x === 'object' || typeof x === 'function';\n }\n function assertDictionary(obj, context) {\n if (obj !== undefined && !isDictionary(obj)) {\n throw new TypeError(`${context} is not an object.`);\n }\n }\n // https://heycam.github.io/webidl/#idl-callback-functions\n function assertFunction(x, context) {\n if (typeof x !== 'function') {\n throw new TypeError(`${context} is not a function.`);\n }\n }\n // https://heycam.github.io/webidl/#idl-object\n function isObject(x) {\n return (typeof x === 'object' && x !== null) || typeof x === 'function';\n }\n function assertObject(x, context) {\n if (!isObject(x)) {\n throw new TypeError(`${context} is not an object.`);\n }\n }\n function assertRequiredArgument(x, position, context) {\n if (x === undefined) {\n throw new TypeError(`Parameter ${position} is required in '${context}'.`);\n }\n }\n function assertRequiredField(x, field, context) {\n if (x === undefined) {\n throw new TypeError(`${field} is required in '${context}'.`);\n }\n }\n // https://heycam.github.io/webidl/#idl-unrestricted-double\n function convertUnrestrictedDouble(value) {\n return Number(value);\n }\n function censorNegativeZero(x) {\n return x === 0 ? 0 : x;\n }\n function integerPart(x) {\n return censorNegativeZero(MathTrunc(x));\n }\n // https://heycam.github.io/webidl/#idl-unsigned-long-long\n function convertUnsignedLongLongWithEnforceRange(value, context) {\n const lowerBound = 0;\n const upperBound = Number.MAX_SAFE_INTEGER;\n let x = Number(value);\n x = censorNegativeZero(x);\n if (!NumberIsFinite(x)) {\n throw new TypeError(`${context} is not a finite number`);\n }\n x = integerPart(x);\n if (x < lowerBound || x > upperBound) {\n throw new TypeError(`${context} is outside the accepted range of ${lowerBound} to ${upperBound}, inclusive`);\n }\n if (!NumberIsFinite(x) || x === 0) {\n return 0;\n }\n // TODO Use BigInt if supported?\n // let xBigInt = BigInt(integerPart(x));\n // xBigInt = BigInt.asUintN(64, xBigInt);\n // return Number(xBigInt);\n return x;\n }\n\n function assertReadableStream(x, context) {\n if (!IsReadableStream(x)) {\n throw new TypeError(`${context} is not a ReadableStream.`);\n }\n }\n\n // Abstract operations for the ReadableStream.\n function AcquireReadableStreamDefaultReader(stream) {\n return new ReadableStreamDefaultReader(stream);\n }\n // ReadableStream API exposed for controllers.\n function ReadableStreamAddReadRequest(stream, readRequest) {\n stream._reader._readRequests.push(readRequest);\n }\n function ReadableStreamFulfillReadRequest(stream, chunk, done) {\n const reader = stream._reader;\n const readRequest = reader._readRequests.shift();\n if (done) {\n readRequest._closeSteps();\n }\n else {\n readRequest._chunkSteps(chunk);\n }\n }\n function ReadableStreamGetNumReadRequests(stream) {\n return stream._reader._readRequests.length;\n }\n function ReadableStreamHasDefaultReader(stream) {\n const reader = stream._reader;\n if (reader === undefined) {\n return false;\n }\n if (!IsReadableStreamDefaultReader(reader)) {\n return false;\n }\n return true;\n }\n /**\n * A default reader vended by a {@link ReadableStream}.\n *\n * @public\n */\n class ReadableStreamDefaultReader {\n constructor(stream) {\n assertRequiredArgument(stream, 1, 'ReadableStreamDefaultReader');\n assertReadableStream(stream, 'First parameter');\n if (IsReadableStreamLocked(stream)) {\n throw new TypeError('This stream has already been locked for exclusive reading by another reader');\n }\n ReadableStreamReaderGenericInitialize(this, stream);\n this._readRequests = new SimpleQueue();\n }\n /**\n * Returns a promise that will be fulfilled when the stream becomes closed,\n * or rejected if the stream ever errors or the reader's lock is released before the stream finishes closing.\n */\n get closed() {\n if (!IsReadableStreamDefaultReader(this)) {\n return promiseRejectedWith(defaultReaderBrandCheckException('closed'));\n }\n return this._closedPromise;\n }\n /**\n * If the reader is active, behaves the same as {@link ReadableStream.cancel | stream.cancel(reason)}.\n */\n cancel(reason = undefined) {\n if (!IsReadableStreamDefaultReader(this)) {\n return promiseRejectedWith(defaultReaderBrandCheckException('cancel'));\n }\n if (this._ownerReadableStream === undefined) {\n return promiseRejectedWith(readerLockException('cancel'));\n }\n return ReadableStreamReaderGenericCancel(this, reason);\n }\n /**\n * Returns a promise that allows access to the next chunk from the stream's internal queue, if available.\n *\n * If reading a chunk causes the queue to become empty, more data will be pulled from the underlying source.\n */\n read() {\n if (!IsReadableStreamDefaultReader(this)) {\n return promiseRejectedWith(defaultReaderBrandCheckException('read'));\n }\n if (this._ownerReadableStream === undefined) {\n return promiseRejectedWith(readerLockException('read from'));\n }\n let resolvePromise;\n let rejectPromise;\n const promise = newPromise((resolve, reject) => {\n resolvePromise = resolve;\n rejectPromise = reject;\n });\n const readRequest = {\n _chunkSteps: chunk => resolvePromise({ value: chunk, done: false }),\n _closeSteps: () => resolvePromise({ value: undefined, done: true }),\n _errorSteps: e => rejectPromise(e)\n };\n ReadableStreamDefaultReaderRead(this, readRequest);\n return promise;\n }\n /**\n * Releases the reader's lock on the corresponding stream. After the lock is released, the reader is no longer active.\n * If the associated stream is errored when the lock is released, the reader will appear errored in the same way\n * from now on; otherwise, the reader will appear closed.\n *\n * A reader's lock cannot be released while it still has a pending read request, i.e., if a promise returned by\n * the reader's {@link ReadableStreamDefaultReader.read | read()} method has not yet been settled. Attempting to\n * do so will throw a `TypeError` and leave the reader locked to the stream.\n */\n releaseLock() {\n if (!IsReadableStreamDefaultReader(this)) {\n throw defaultReaderBrandCheckException('releaseLock');\n }\n if (this._ownerReadableStream === undefined) {\n return;\n }\n if (this._readRequests.length > 0) {\n throw new TypeError('Tried to release a reader lock when that reader has pending read() calls un-settled');\n }\n ReadableStreamReaderGenericRelease(this);\n }\n }\n Object.defineProperties(ReadableStreamDefaultReader.prototype, {\n cancel: { enumerable: true },\n read: { enumerable: true },\n releaseLock: { enumerable: true },\n closed: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(ReadableStreamDefaultReader.prototype, SymbolPolyfill.toStringTag, {\n value: 'ReadableStreamDefaultReader',\n configurable: true\n });\n }\n // Abstract operations for the readers.\n function IsReadableStreamDefaultReader(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_readRequests')) {\n return false;\n }\n return x instanceof ReadableStreamDefaultReader;\n }\n function ReadableStreamDefaultReaderRead(reader, readRequest) {\n const stream = reader._ownerReadableStream;\n stream._disturbed = true;\n if (stream._state === 'closed') {\n readRequest._closeSteps();\n }\n else if (stream._state === 'errored') {\n readRequest._errorSteps(stream._storedError);\n }\n else {\n stream._readableStreamController[PullSteps](readRequest);\n }\n }\n // Helper functions for the ReadableStreamDefaultReader.\n function defaultReaderBrandCheckException(name) {\n return new TypeError(`ReadableStreamDefaultReader.prototype.${name} can only be used on a ReadableStreamDefaultReader`);\n }\n\n /// \n /* eslint-disable @typescript-eslint/no-empty-function */\n const AsyncIteratorPrototype = Object.getPrototypeOf(Object.getPrototypeOf(async function* () { }).prototype);\n\n /// \n class ReadableStreamAsyncIteratorImpl {\n constructor(reader, preventCancel) {\n this._ongoingPromise = undefined;\n this._isFinished = false;\n this._reader = reader;\n this._preventCancel = preventCancel;\n }\n next() {\n const nextSteps = () => this._nextSteps();\n this._ongoingPromise = this._ongoingPromise ?\n transformPromiseWith(this._ongoingPromise, nextSteps, nextSteps) :\n nextSteps();\n return this._ongoingPromise;\n }\n return(value) {\n const returnSteps = () => this._returnSteps(value);\n return this._ongoingPromise ?\n transformPromiseWith(this._ongoingPromise, returnSteps, returnSteps) :\n returnSteps();\n }\n _nextSteps() {\n if (this._isFinished) {\n return Promise.resolve({ value: undefined, done: true });\n }\n const reader = this._reader;\n if (reader._ownerReadableStream === undefined) {\n return promiseRejectedWith(readerLockException('iterate'));\n }\n let resolvePromise;\n let rejectPromise;\n const promise = newPromise((resolve, reject) => {\n resolvePromise = resolve;\n rejectPromise = reject;\n });\n const readRequest = {\n _chunkSteps: chunk => {\n this._ongoingPromise = undefined;\n // This needs to be delayed by one microtask, otherwise we stop pulling too early which breaks a test.\n // FIXME Is this a bug in the specification, or in the test?\n queueMicrotask(() => resolvePromise({ value: chunk, done: false }));\n },\n _closeSteps: () => {\n this._ongoingPromise = undefined;\n this._isFinished = true;\n ReadableStreamReaderGenericRelease(reader);\n resolvePromise({ value: undefined, done: true });\n },\n _errorSteps: reason => {\n this._ongoingPromise = undefined;\n this._isFinished = true;\n ReadableStreamReaderGenericRelease(reader);\n rejectPromise(reason);\n }\n };\n ReadableStreamDefaultReaderRead(reader, readRequest);\n return promise;\n }\n _returnSteps(value) {\n if (this._isFinished) {\n return Promise.resolve({ value, done: true });\n }\n this._isFinished = true;\n const reader = this._reader;\n if (reader._ownerReadableStream === undefined) {\n return promiseRejectedWith(readerLockException('finish iterating'));\n }\n if (!this._preventCancel) {\n const result = ReadableStreamReaderGenericCancel(reader, value);\n ReadableStreamReaderGenericRelease(reader);\n return transformPromiseWith(result, () => ({ value, done: true }));\n }\n ReadableStreamReaderGenericRelease(reader);\n return promiseResolvedWith({ value, done: true });\n }\n }\n const ReadableStreamAsyncIteratorPrototype = {\n next() {\n if (!IsReadableStreamAsyncIterator(this)) {\n return promiseRejectedWith(streamAsyncIteratorBrandCheckException('next'));\n }\n return this._asyncIteratorImpl.next();\n },\n return(value) {\n if (!IsReadableStreamAsyncIterator(this)) {\n return promiseRejectedWith(streamAsyncIteratorBrandCheckException('return'));\n }\n return this._asyncIteratorImpl.return(value);\n }\n };\n if (AsyncIteratorPrototype !== undefined) {\n Object.setPrototypeOf(ReadableStreamAsyncIteratorPrototype, AsyncIteratorPrototype);\n }\n // Abstract operations for the ReadableStream.\n function AcquireReadableStreamAsyncIterator(stream, preventCancel) {\n const reader = AcquireReadableStreamDefaultReader(stream);\n const impl = new ReadableStreamAsyncIteratorImpl(reader, preventCancel);\n const iterator = Object.create(ReadableStreamAsyncIteratorPrototype);\n iterator._asyncIteratorImpl = impl;\n return iterator;\n }\n function IsReadableStreamAsyncIterator(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_asyncIteratorImpl')) {\n return false;\n }\n try {\n // noinspection SuspiciousTypeOfGuard\n return x._asyncIteratorImpl instanceof\n ReadableStreamAsyncIteratorImpl;\n }\n catch (_a) {\n return false;\n }\n }\n // Helper functions for the ReadableStream.\n function streamAsyncIteratorBrandCheckException(name) {\n return new TypeError(`ReadableStreamAsyncIterator.${name} can only be used on a ReadableSteamAsyncIterator`);\n }\n\n /// \n // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isNaN#Polyfill\n const NumberIsNaN = Number.isNaN || function (x) {\n // eslint-disable-next-line no-self-compare\n return x !== x;\n };\n\n function CreateArrayFromList(elements) {\n // We use arrays to represent lists, so this is basically a no-op.\n // Do a slice though just in case we happen to depend on the unique-ness.\n return elements.slice();\n }\n function CopyDataBlockBytes(dest, destOffset, src, srcOffset, n) {\n new Uint8Array(dest).set(new Uint8Array(src, srcOffset, n), destOffset);\n }\n // Not implemented correctly\n function TransferArrayBuffer(O) {\n return O;\n }\n // Not implemented correctly\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n function IsDetachedBuffer(O) {\n return false;\n }\n function ArrayBufferSlice(buffer, begin, end) {\n // ArrayBuffer.prototype.slice is not available on IE10\n // https://www.caniuse.com/mdn-javascript_builtins_arraybuffer_slice\n if (buffer.slice) {\n return buffer.slice(begin, end);\n }\n const length = end - begin;\n const slice = new ArrayBuffer(length);\n CopyDataBlockBytes(slice, 0, buffer, begin, length);\n return slice;\n }\n\n function IsNonNegativeNumber(v) {\n if (typeof v !== 'number') {\n return false;\n }\n if (NumberIsNaN(v)) {\n return false;\n }\n if (v < 0) {\n return false;\n }\n return true;\n }\n function CloneAsUint8Array(O) {\n const buffer = ArrayBufferSlice(O.buffer, O.byteOffset, O.byteOffset + O.byteLength);\n return new Uint8Array(buffer);\n }\n\n function DequeueValue(container) {\n const pair = container._queue.shift();\n container._queueTotalSize -= pair.size;\n if (container._queueTotalSize < 0) {\n container._queueTotalSize = 0;\n }\n return pair.value;\n }\n function EnqueueValueWithSize(container, value, size) {\n if (!IsNonNegativeNumber(size) || size === Infinity) {\n throw new RangeError('Size must be a finite, non-NaN, non-negative number.');\n }\n container._queue.push({ value, size });\n container._queueTotalSize += size;\n }\n function PeekQueueValue(container) {\n const pair = container._queue.peek();\n return pair.value;\n }\n function ResetQueue(container) {\n container._queue = new SimpleQueue();\n container._queueTotalSize = 0;\n }\n\n /**\n * A pull-into request in a {@link ReadableByteStreamController}.\n *\n * @public\n */\n class ReadableStreamBYOBRequest {\n constructor() {\n throw new TypeError('Illegal constructor');\n }\n /**\n * Returns the view for writing in to, or `null` if the BYOB request has already been responded to.\n */\n get view() {\n if (!IsReadableStreamBYOBRequest(this)) {\n throw byobRequestBrandCheckException('view');\n }\n return this._view;\n }\n respond(bytesWritten) {\n if (!IsReadableStreamBYOBRequest(this)) {\n throw byobRequestBrandCheckException('respond');\n }\n assertRequiredArgument(bytesWritten, 1, 'respond');\n bytesWritten = convertUnsignedLongLongWithEnforceRange(bytesWritten, 'First parameter');\n if (this._associatedReadableByteStreamController === undefined) {\n throw new TypeError('This BYOB request has been invalidated');\n }\n if (IsDetachedBuffer(this._view.buffer)) ;\n ReadableByteStreamControllerRespond(this._associatedReadableByteStreamController, bytesWritten);\n }\n respondWithNewView(view) {\n if (!IsReadableStreamBYOBRequest(this)) {\n throw byobRequestBrandCheckException('respondWithNewView');\n }\n assertRequiredArgument(view, 1, 'respondWithNewView');\n if (!ArrayBuffer.isView(view)) {\n throw new TypeError('You can only respond with array buffer views');\n }\n if (this._associatedReadableByteStreamController === undefined) {\n throw new TypeError('This BYOB request has been invalidated');\n }\n if (IsDetachedBuffer(view.buffer)) ;\n ReadableByteStreamControllerRespondWithNewView(this._associatedReadableByteStreamController, view);\n }\n }\n Object.defineProperties(ReadableStreamBYOBRequest.prototype, {\n respond: { enumerable: true },\n respondWithNewView: { enumerable: true },\n view: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(ReadableStreamBYOBRequest.prototype, SymbolPolyfill.toStringTag, {\n value: 'ReadableStreamBYOBRequest',\n configurable: true\n });\n }\n /**\n * Allows control of a {@link ReadableStream | readable byte stream}'s state and internal queue.\n *\n * @public\n */\n class ReadableByteStreamController {\n constructor() {\n throw new TypeError('Illegal constructor');\n }\n /**\n * Returns the current BYOB pull request, or `null` if there isn't one.\n */\n get byobRequest() {\n if (!IsReadableByteStreamController(this)) {\n throw byteStreamControllerBrandCheckException('byobRequest');\n }\n return ReadableByteStreamControllerGetBYOBRequest(this);\n }\n /**\n * Returns the desired size to fill the controlled stream's internal queue. It can be negative, if the queue is\n * over-full. An underlying byte source ought to use this information to determine when and how to apply backpressure.\n */\n get desiredSize() {\n if (!IsReadableByteStreamController(this)) {\n throw byteStreamControllerBrandCheckException('desiredSize');\n }\n return ReadableByteStreamControllerGetDesiredSize(this);\n }\n /**\n * Closes the controlled readable stream. Consumers will still be able to read any previously-enqueued chunks from\n * the stream, but once those are read, the stream will become closed.\n */\n close() {\n if (!IsReadableByteStreamController(this)) {\n throw byteStreamControllerBrandCheckException('close');\n }\n if (this._closeRequested) {\n throw new TypeError('The stream has already been closed; do not close it again!');\n }\n const state = this._controlledReadableByteStream._state;\n if (state !== 'readable') {\n throw new TypeError(`The stream (in ${state} state) is not in the readable state and cannot be closed`);\n }\n ReadableByteStreamControllerClose(this);\n }\n enqueue(chunk) {\n if (!IsReadableByteStreamController(this)) {\n throw byteStreamControllerBrandCheckException('enqueue');\n }\n assertRequiredArgument(chunk, 1, 'enqueue');\n if (!ArrayBuffer.isView(chunk)) {\n throw new TypeError('chunk must be an array buffer view');\n }\n if (chunk.byteLength === 0) {\n throw new TypeError('chunk must have non-zero byteLength');\n }\n if (chunk.buffer.byteLength === 0) {\n throw new TypeError(`chunk's buffer must have non-zero byteLength`);\n }\n if (this._closeRequested) {\n throw new TypeError('stream is closed or draining');\n }\n const state = this._controlledReadableByteStream._state;\n if (state !== 'readable') {\n throw new TypeError(`The stream (in ${state} state) is not in the readable state and cannot be enqueued to`);\n }\n ReadableByteStreamControllerEnqueue(this, chunk);\n }\n /**\n * Errors the controlled readable stream, making all future interactions with it fail with the given error `e`.\n */\n error(e = undefined) {\n if (!IsReadableByteStreamController(this)) {\n throw byteStreamControllerBrandCheckException('error');\n }\n ReadableByteStreamControllerError(this, e);\n }\n /** @internal */\n [CancelSteps](reason) {\n ReadableByteStreamControllerClearPendingPullIntos(this);\n ResetQueue(this);\n const result = this._cancelAlgorithm(reason);\n ReadableByteStreamControllerClearAlgorithms(this);\n return result;\n }\n /** @internal */\n [PullSteps](readRequest) {\n const stream = this._controlledReadableByteStream;\n if (this._queueTotalSize > 0) {\n const entry = this._queue.shift();\n this._queueTotalSize -= entry.byteLength;\n ReadableByteStreamControllerHandleQueueDrain(this);\n const view = new Uint8Array(entry.buffer, entry.byteOffset, entry.byteLength);\n readRequest._chunkSteps(view);\n return;\n }\n const autoAllocateChunkSize = this._autoAllocateChunkSize;\n if (autoAllocateChunkSize !== undefined) {\n let buffer;\n try {\n buffer = new ArrayBuffer(autoAllocateChunkSize);\n }\n catch (bufferE) {\n readRequest._errorSteps(bufferE);\n return;\n }\n const pullIntoDescriptor = {\n buffer,\n bufferByteLength: autoAllocateChunkSize,\n byteOffset: 0,\n byteLength: autoAllocateChunkSize,\n bytesFilled: 0,\n elementSize: 1,\n viewConstructor: Uint8Array,\n readerType: 'default'\n };\n this._pendingPullIntos.push(pullIntoDescriptor);\n }\n ReadableStreamAddReadRequest(stream, readRequest);\n ReadableByteStreamControllerCallPullIfNeeded(this);\n }\n }\n Object.defineProperties(ReadableByteStreamController.prototype, {\n close: { enumerable: true },\n enqueue: { enumerable: true },\n error: { enumerable: true },\n byobRequest: { enumerable: true },\n desiredSize: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(ReadableByteStreamController.prototype, SymbolPolyfill.toStringTag, {\n value: 'ReadableByteStreamController',\n configurable: true\n });\n }\n // Abstract operations for the ReadableByteStreamController.\n function IsReadableByteStreamController(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_controlledReadableByteStream')) {\n return false;\n }\n return x instanceof ReadableByteStreamController;\n }\n function IsReadableStreamBYOBRequest(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_associatedReadableByteStreamController')) {\n return false;\n }\n return x instanceof ReadableStreamBYOBRequest;\n }\n function ReadableByteStreamControllerCallPullIfNeeded(controller) {\n const shouldPull = ReadableByteStreamControllerShouldCallPull(controller);\n if (!shouldPull) {\n return;\n }\n if (controller._pulling) {\n controller._pullAgain = true;\n return;\n }\n controller._pulling = true;\n // TODO: Test controller argument\n const pullPromise = controller._pullAlgorithm();\n uponPromise(pullPromise, () => {\n controller._pulling = false;\n if (controller._pullAgain) {\n controller._pullAgain = false;\n ReadableByteStreamControllerCallPullIfNeeded(controller);\n }\n }, e => {\n ReadableByteStreamControllerError(controller, e);\n });\n }\n function ReadableByteStreamControllerClearPendingPullIntos(controller) {\n ReadableByteStreamControllerInvalidateBYOBRequest(controller);\n controller._pendingPullIntos = new SimpleQueue();\n }\n function ReadableByteStreamControllerCommitPullIntoDescriptor(stream, pullIntoDescriptor) {\n let done = false;\n if (stream._state === 'closed') {\n done = true;\n }\n const filledView = ReadableByteStreamControllerConvertPullIntoDescriptor(pullIntoDescriptor);\n if (pullIntoDescriptor.readerType === 'default') {\n ReadableStreamFulfillReadRequest(stream, filledView, done);\n }\n else {\n ReadableStreamFulfillReadIntoRequest(stream, filledView, done);\n }\n }\n function ReadableByteStreamControllerConvertPullIntoDescriptor(pullIntoDescriptor) {\n const bytesFilled = pullIntoDescriptor.bytesFilled;\n const elementSize = pullIntoDescriptor.elementSize;\n return new pullIntoDescriptor.viewConstructor(pullIntoDescriptor.buffer, pullIntoDescriptor.byteOffset, bytesFilled / elementSize);\n }\n function ReadableByteStreamControllerEnqueueChunkToQueue(controller, buffer, byteOffset, byteLength) {\n controller._queue.push({ buffer, byteOffset, byteLength });\n controller._queueTotalSize += byteLength;\n }\n function ReadableByteStreamControllerFillPullIntoDescriptorFromQueue(controller, pullIntoDescriptor) {\n const elementSize = pullIntoDescriptor.elementSize;\n const currentAlignedBytes = pullIntoDescriptor.bytesFilled - pullIntoDescriptor.bytesFilled % elementSize;\n const maxBytesToCopy = Math.min(controller._queueTotalSize, pullIntoDescriptor.byteLength - pullIntoDescriptor.bytesFilled);\n const maxBytesFilled = pullIntoDescriptor.bytesFilled + maxBytesToCopy;\n const maxAlignedBytes = maxBytesFilled - maxBytesFilled % elementSize;\n let totalBytesToCopyRemaining = maxBytesToCopy;\n let ready = false;\n if (maxAlignedBytes > currentAlignedBytes) {\n totalBytesToCopyRemaining = maxAlignedBytes - pullIntoDescriptor.bytesFilled;\n ready = true;\n }\n const queue = controller._queue;\n while (totalBytesToCopyRemaining > 0) {\n const headOfQueue = queue.peek();\n const bytesToCopy = Math.min(totalBytesToCopyRemaining, headOfQueue.byteLength);\n const destStart = pullIntoDescriptor.byteOffset + pullIntoDescriptor.bytesFilled;\n CopyDataBlockBytes(pullIntoDescriptor.buffer, destStart, headOfQueue.buffer, headOfQueue.byteOffset, bytesToCopy);\n if (headOfQueue.byteLength === bytesToCopy) {\n queue.shift();\n }\n else {\n headOfQueue.byteOffset += bytesToCopy;\n headOfQueue.byteLength -= bytesToCopy;\n }\n controller._queueTotalSize -= bytesToCopy;\n ReadableByteStreamControllerFillHeadPullIntoDescriptor(controller, bytesToCopy, pullIntoDescriptor);\n totalBytesToCopyRemaining -= bytesToCopy;\n }\n return ready;\n }\n function ReadableByteStreamControllerFillHeadPullIntoDescriptor(controller, size, pullIntoDescriptor) {\n pullIntoDescriptor.bytesFilled += size;\n }\n function ReadableByteStreamControllerHandleQueueDrain(controller) {\n if (controller._queueTotalSize === 0 && controller._closeRequested) {\n ReadableByteStreamControllerClearAlgorithms(controller);\n ReadableStreamClose(controller._controlledReadableByteStream);\n }\n else {\n ReadableByteStreamControllerCallPullIfNeeded(controller);\n }\n }\n function ReadableByteStreamControllerInvalidateBYOBRequest(controller) {\n if (controller._byobRequest === null) {\n return;\n }\n controller._byobRequest._associatedReadableByteStreamController = undefined;\n controller._byobRequest._view = null;\n controller._byobRequest = null;\n }\n function ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller) {\n while (controller._pendingPullIntos.length > 0) {\n if (controller._queueTotalSize === 0) {\n return;\n }\n const pullIntoDescriptor = controller._pendingPullIntos.peek();\n if (ReadableByteStreamControllerFillPullIntoDescriptorFromQueue(controller, pullIntoDescriptor)) {\n ReadableByteStreamControllerShiftPendingPullInto(controller);\n ReadableByteStreamControllerCommitPullIntoDescriptor(controller._controlledReadableByteStream, pullIntoDescriptor);\n }\n }\n }\n function ReadableByteStreamControllerPullInto(controller, view, readIntoRequest) {\n const stream = controller._controlledReadableByteStream;\n let elementSize = 1;\n if (view.constructor !== DataView) {\n elementSize = view.constructor.BYTES_PER_ELEMENT;\n }\n const ctor = view.constructor;\n // try {\n const buffer = TransferArrayBuffer(view.buffer);\n // } catch (e) {\n // readIntoRequest._errorSteps(e);\n // return;\n // }\n const pullIntoDescriptor = {\n buffer,\n bufferByteLength: buffer.byteLength,\n byteOffset: view.byteOffset,\n byteLength: view.byteLength,\n bytesFilled: 0,\n elementSize,\n viewConstructor: ctor,\n readerType: 'byob'\n };\n if (controller._pendingPullIntos.length > 0) {\n controller._pendingPullIntos.push(pullIntoDescriptor);\n // No ReadableByteStreamControllerCallPullIfNeeded() call since:\n // - No change happens on desiredSize\n // - The source has already been notified of that there's at least 1 pending read(view)\n ReadableStreamAddReadIntoRequest(stream, readIntoRequest);\n return;\n }\n if (stream._state === 'closed') {\n const emptyView = new ctor(pullIntoDescriptor.buffer, pullIntoDescriptor.byteOffset, 0);\n readIntoRequest._closeSteps(emptyView);\n return;\n }\n if (controller._queueTotalSize > 0) {\n if (ReadableByteStreamControllerFillPullIntoDescriptorFromQueue(controller, pullIntoDescriptor)) {\n const filledView = ReadableByteStreamControllerConvertPullIntoDescriptor(pullIntoDescriptor);\n ReadableByteStreamControllerHandleQueueDrain(controller);\n readIntoRequest._chunkSteps(filledView);\n return;\n }\n if (controller._closeRequested) {\n const e = new TypeError('Insufficient bytes to fill elements in the given buffer');\n ReadableByteStreamControllerError(controller, e);\n readIntoRequest._errorSteps(e);\n return;\n }\n }\n controller._pendingPullIntos.push(pullIntoDescriptor);\n ReadableStreamAddReadIntoRequest(stream, readIntoRequest);\n ReadableByteStreamControllerCallPullIfNeeded(controller);\n }\n function ReadableByteStreamControllerRespondInClosedState(controller, firstDescriptor) {\n const stream = controller._controlledReadableByteStream;\n if (ReadableStreamHasBYOBReader(stream)) {\n while (ReadableStreamGetNumReadIntoRequests(stream) > 0) {\n const pullIntoDescriptor = ReadableByteStreamControllerShiftPendingPullInto(controller);\n ReadableByteStreamControllerCommitPullIntoDescriptor(stream, pullIntoDescriptor);\n }\n }\n }\n function ReadableByteStreamControllerRespondInReadableState(controller, bytesWritten, pullIntoDescriptor) {\n ReadableByteStreamControllerFillHeadPullIntoDescriptor(controller, bytesWritten, pullIntoDescriptor);\n if (pullIntoDescriptor.bytesFilled < pullIntoDescriptor.elementSize) {\n return;\n }\n ReadableByteStreamControllerShiftPendingPullInto(controller);\n const remainderSize = pullIntoDescriptor.bytesFilled % pullIntoDescriptor.elementSize;\n if (remainderSize > 0) {\n const end = pullIntoDescriptor.byteOffset + pullIntoDescriptor.bytesFilled;\n const remainder = ArrayBufferSlice(pullIntoDescriptor.buffer, end - remainderSize, end);\n ReadableByteStreamControllerEnqueueChunkToQueue(controller, remainder, 0, remainder.byteLength);\n }\n pullIntoDescriptor.bytesFilled -= remainderSize;\n ReadableByteStreamControllerCommitPullIntoDescriptor(controller._controlledReadableByteStream, pullIntoDescriptor);\n ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller);\n }\n function ReadableByteStreamControllerRespondInternal(controller, bytesWritten) {\n const firstDescriptor = controller._pendingPullIntos.peek();\n ReadableByteStreamControllerInvalidateBYOBRequest(controller);\n const state = controller._controlledReadableByteStream._state;\n if (state === 'closed') {\n ReadableByteStreamControllerRespondInClosedState(controller);\n }\n else {\n ReadableByteStreamControllerRespondInReadableState(controller, bytesWritten, firstDescriptor);\n }\n ReadableByteStreamControllerCallPullIfNeeded(controller);\n }\n function ReadableByteStreamControllerShiftPendingPullInto(controller) {\n const descriptor = controller._pendingPullIntos.shift();\n return descriptor;\n }\n function ReadableByteStreamControllerShouldCallPull(controller) {\n const stream = controller._controlledReadableByteStream;\n if (stream._state !== 'readable') {\n return false;\n }\n if (controller._closeRequested) {\n return false;\n }\n if (!controller._started) {\n return false;\n }\n if (ReadableStreamHasDefaultReader(stream) && ReadableStreamGetNumReadRequests(stream) > 0) {\n return true;\n }\n if (ReadableStreamHasBYOBReader(stream) && ReadableStreamGetNumReadIntoRequests(stream) > 0) {\n return true;\n }\n const desiredSize = ReadableByteStreamControllerGetDesiredSize(controller);\n if (desiredSize > 0) {\n return true;\n }\n return false;\n }\n function ReadableByteStreamControllerClearAlgorithms(controller) {\n controller._pullAlgorithm = undefined;\n controller._cancelAlgorithm = undefined;\n }\n // A client of ReadableByteStreamController may use these functions directly to bypass state check.\n function ReadableByteStreamControllerClose(controller) {\n const stream = controller._controlledReadableByteStream;\n if (controller._closeRequested || stream._state !== 'readable') {\n return;\n }\n if (controller._queueTotalSize > 0) {\n controller._closeRequested = true;\n return;\n }\n if (controller._pendingPullIntos.length > 0) {\n const firstPendingPullInto = controller._pendingPullIntos.peek();\n if (firstPendingPullInto.bytesFilled > 0) {\n const e = new TypeError('Insufficient bytes to fill elements in the given buffer');\n ReadableByteStreamControllerError(controller, e);\n throw e;\n }\n }\n ReadableByteStreamControllerClearAlgorithms(controller);\n ReadableStreamClose(stream);\n }\n function ReadableByteStreamControllerEnqueue(controller, chunk) {\n const stream = controller._controlledReadableByteStream;\n if (controller._closeRequested || stream._state !== 'readable') {\n return;\n }\n const buffer = chunk.buffer;\n const byteOffset = chunk.byteOffset;\n const byteLength = chunk.byteLength;\n const transferredBuffer = TransferArrayBuffer(buffer);\n if (controller._pendingPullIntos.length > 0) {\n const firstPendingPullInto = controller._pendingPullIntos.peek();\n if (IsDetachedBuffer(firstPendingPullInto.buffer)) ;\n firstPendingPullInto.buffer = TransferArrayBuffer(firstPendingPullInto.buffer);\n }\n ReadableByteStreamControllerInvalidateBYOBRequest(controller);\n if (ReadableStreamHasDefaultReader(stream)) {\n if (ReadableStreamGetNumReadRequests(stream) === 0) {\n ReadableByteStreamControllerEnqueueChunkToQueue(controller, transferredBuffer, byteOffset, byteLength);\n }\n else {\n if (controller._pendingPullIntos.length > 0) {\n ReadableByteStreamControllerShiftPendingPullInto(controller);\n }\n const transferredView = new Uint8Array(transferredBuffer, byteOffset, byteLength);\n ReadableStreamFulfillReadRequest(stream, transferredView, false);\n }\n }\n else if (ReadableStreamHasBYOBReader(stream)) {\n // TODO: Ideally in this branch detaching should happen only if the buffer is not consumed fully.\n ReadableByteStreamControllerEnqueueChunkToQueue(controller, transferredBuffer, byteOffset, byteLength);\n ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller);\n }\n else {\n ReadableByteStreamControllerEnqueueChunkToQueue(controller, transferredBuffer, byteOffset, byteLength);\n }\n ReadableByteStreamControllerCallPullIfNeeded(controller);\n }\n function ReadableByteStreamControllerError(controller, e) {\n const stream = controller._controlledReadableByteStream;\n if (stream._state !== 'readable') {\n return;\n }\n ReadableByteStreamControllerClearPendingPullIntos(controller);\n ResetQueue(controller);\n ReadableByteStreamControllerClearAlgorithms(controller);\n ReadableStreamError(stream, e);\n }\n function ReadableByteStreamControllerGetBYOBRequest(controller) {\n if (controller._byobRequest === null && controller._pendingPullIntos.length > 0) {\n const firstDescriptor = controller._pendingPullIntos.peek();\n const view = new Uint8Array(firstDescriptor.buffer, firstDescriptor.byteOffset + firstDescriptor.bytesFilled, firstDescriptor.byteLength - firstDescriptor.bytesFilled);\n const byobRequest = Object.create(ReadableStreamBYOBRequest.prototype);\n SetUpReadableStreamBYOBRequest(byobRequest, controller, view);\n controller._byobRequest = byobRequest;\n }\n return controller._byobRequest;\n }\n function ReadableByteStreamControllerGetDesiredSize(controller) {\n const state = controller._controlledReadableByteStream._state;\n if (state === 'errored') {\n return null;\n }\n if (state === 'closed') {\n return 0;\n }\n return controller._strategyHWM - controller._queueTotalSize;\n }\n function ReadableByteStreamControllerRespond(controller, bytesWritten) {\n const firstDescriptor = controller._pendingPullIntos.peek();\n const state = controller._controlledReadableByteStream._state;\n if (state === 'closed') {\n if (bytesWritten !== 0) {\n throw new TypeError('bytesWritten must be 0 when calling respond() on a closed stream');\n }\n }\n else {\n if (bytesWritten === 0) {\n throw new TypeError('bytesWritten must be greater than 0 when calling respond() on a readable stream');\n }\n if (firstDescriptor.bytesFilled + bytesWritten > firstDescriptor.byteLength) {\n throw new RangeError('bytesWritten out of range');\n }\n }\n firstDescriptor.buffer = TransferArrayBuffer(firstDescriptor.buffer);\n ReadableByteStreamControllerRespondInternal(controller, bytesWritten);\n }\n function ReadableByteStreamControllerRespondWithNewView(controller, view) {\n const firstDescriptor = controller._pendingPullIntos.peek();\n const state = controller._controlledReadableByteStream._state;\n if (state === 'closed') {\n if (view.byteLength !== 0) {\n throw new TypeError('The view\\'s length must be 0 when calling respondWithNewView() on a closed stream');\n }\n }\n else {\n if (view.byteLength === 0) {\n throw new TypeError('The view\\'s length must be greater than 0 when calling respondWithNewView() on a readable stream');\n }\n }\n if (firstDescriptor.byteOffset + firstDescriptor.bytesFilled !== view.byteOffset) {\n throw new RangeError('The region specified by view does not match byobRequest');\n }\n if (firstDescriptor.bufferByteLength !== view.buffer.byteLength) {\n throw new RangeError('The buffer of view has different capacity than byobRequest');\n }\n if (firstDescriptor.bytesFilled + view.byteLength > firstDescriptor.byteLength) {\n throw new RangeError('The region specified by view is larger than byobRequest');\n }\n const viewByteLength = view.byteLength;\n firstDescriptor.buffer = TransferArrayBuffer(view.buffer);\n ReadableByteStreamControllerRespondInternal(controller, viewByteLength);\n }\n function SetUpReadableByteStreamController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, autoAllocateChunkSize) {\n controller._controlledReadableByteStream = stream;\n controller._pullAgain = false;\n controller._pulling = false;\n controller._byobRequest = null;\n // Need to set the slots so that the assert doesn't fire. In the spec the slots already exist implicitly.\n controller._queue = controller._queueTotalSize = undefined;\n ResetQueue(controller);\n controller._closeRequested = false;\n controller._started = false;\n controller._strategyHWM = highWaterMark;\n controller._pullAlgorithm = pullAlgorithm;\n controller._cancelAlgorithm = cancelAlgorithm;\n controller._autoAllocateChunkSize = autoAllocateChunkSize;\n controller._pendingPullIntos = new SimpleQueue();\n stream._readableStreamController = controller;\n const startResult = startAlgorithm();\n uponPromise(promiseResolvedWith(startResult), () => {\n controller._started = true;\n ReadableByteStreamControllerCallPullIfNeeded(controller);\n }, r => {\n ReadableByteStreamControllerError(controller, r);\n });\n }\n function SetUpReadableByteStreamControllerFromUnderlyingSource(stream, underlyingByteSource, highWaterMark) {\n const controller = Object.create(ReadableByteStreamController.prototype);\n let startAlgorithm = () => undefined;\n let pullAlgorithm = () => promiseResolvedWith(undefined);\n let cancelAlgorithm = () => promiseResolvedWith(undefined);\n if (underlyingByteSource.start !== undefined) {\n startAlgorithm = () => underlyingByteSource.start(controller);\n }\n if (underlyingByteSource.pull !== undefined) {\n pullAlgorithm = () => underlyingByteSource.pull(controller);\n }\n if (underlyingByteSource.cancel !== undefined) {\n cancelAlgorithm = reason => underlyingByteSource.cancel(reason);\n }\n const autoAllocateChunkSize = underlyingByteSource.autoAllocateChunkSize;\n if (autoAllocateChunkSize === 0) {\n throw new TypeError('autoAllocateChunkSize must be greater than 0');\n }\n SetUpReadableByteStreamController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, autoAllocateChunkSize);\n }\n function SetUpReadableStreamBYOBRequest(request, controller, view) {\n request._associatedReadableByteStreamController = controller;\n request._view = view;\n }\n // Helper functions for the ReadableStreamBYOBRequest.\n function byobRequestBrandCheckException(name) {\n return new TypeError(`ReadableStreamBYOBRequest.prototype.${name} can only be used on a ReadableStreamBYOBRequest`);\n }\n // Helper functions for the ReadableByteStreamController.\n function byteStreamControllerBrandCheckException(name) {\n return new TypeError(`ReadableByteStreamController.prototype.${name} can only be used on a ReadableByteStreamController`);\n }\n\n // Abstract operations for the ReadableStream.\n function AcquireReadableStreamBYOBReader(stream) {\n return new ReadableStreamBYOBReader(stream);\n }\n // ReadableStream API exposed for controllers.\n function ReadableStreamAddReadIntoRequest(stream, readIntoRequest) {\n stream._reader._readIntoRequests.push(readIntoRequest);\n }\n function ReadableStreamFulfillReadIntoRequest(stream, chunk, done) {\n const reader = stream._reader;\n const readIntoRequest = reader._readIntoRequests.shift();\n if (done) {\n readIntoRequest._closeSteps(chunk);\n }\n else {\n readIntoRequest._chunkSteps(chunk);\n }\n }\n function ReadableStreamGetNumReadIntoRequests(stream) {\n return stream._reader._readIntoRequests.length;\n }\n function ReadableStreamHasBYOBReader(stream) {\n const reader = stream._reader;\n if (reader === undefined) {\n return false;\n }\n if (!IsReadableStreamBYOBReader(reader)) {\n return false;\n }\n return true;\n }\n /**\n * A BYOB reader vended by a {@link ReadableStream}.\n *\n * @public\n */\n class ReadableStreamBYOBReader {\n constructor(stream) {\n assertRequiredArgument(stream, 1, 'ReadableStreamBYOBReader');\n assertReadableStream(stream, 'First parameter');\n if (IsReadableStreamLocked(stream)) {\n throw new TypeError('This stream has already been locked for exclusive reading by another reader');\n }\n if (!IsReadableByteStreamController(stream._readableStreamController)) {\n throw new TypeError('Cannot construct a ReadableStreamBYOBReader for a stream not constructed with a byte ' +\n 'source');\n }\n ReadableStreamReaderGenericInitialize(this, stream);\n this._readIntoRequests = new SimpleQueue();\n }\n /**\n * Returns a promise that will be fulfilled when the stream becomes closed, or rejected if the stream ever errors or\n * the reader's lock is released before the stream finishes closing.\n */\n get closed() {\n if (!IsReadableStreamBYOBReader(this)) {\n return promiseRejectedWith(byobReaderBrandCheckException('closed'));\n }\n return this._closedPromise;\n }\n /**\n * If the reader is active, behaves the same as {@link ReadableStream.cancel | stream.cancel(reason)}.\n */\n cancel(reason = undefined) {\n if (!IsReadableStreamBYOBReader(this)) {\n return promiseRejectedWith(byobReaderBrandCheckException('cancel'));\n }\n if (this._ownerReadableStream === undefined) {\n return promiseRejectedWith(readerLockException('cancel'));\n }\n return ReadableStreamReaderGenericCancel(this, reason);\n }\n /**\n * Attempts to reads bytes into view, and returns a promise resolved with the result.\n *\n * If reading a chunk causes the queue to become empty, more data will be pulled from the underlying source.\n */\n read(view) {\n if (!IsReadableStreamBYOBReader(this)) {\n return promiseRejectedWith(byobReaderBrandCheckException('read'));\n }\n if (!ArrayBuffer.isView(view)) {\n return promiseRejectedWith(new TypeError('view must be an array buffer view'));\n }\n if (view.byteLength === 0) {\n return promiseRejectedWith(new TypeError('view must have non-zero byteLength'));\n }\n if (view.buffer.byteLength === 0) {\n return promiseRejectedWith(new TypeError(`view's buffer must have non-zero byteLength`));\n }\n if (IsDetachedBuffer(view.buffer)) ;\n if (this._ownerReadableStream === undefined) {\n return promiseRejectedWith(readerLockException('read from'));\n }\n let resolvePromise;\n let rejectPromise;\n const promise = newPromise((resolve, reject) => {\n resolvePromise = resolve;\n rejectPromise = reject;\n });\n const readIntoRequest = {\n _chunkSteps: chunk => resolvePromise({ value: chunk, done: false }),\n _closeSteps: chunk => resolvePromise({ value: chunk, done: true }),\n _errorSteps: e => rejectPromise(e)\n };\n ReadableStreamBYOBReaderRead(this, view, readIntoRequest);\n return promise;\n }\n /**\n * Releases the reader's lock on the corresponding stream. After the lock is released, the reader is no longer active.\n * If the associated stream is errored when the lock is released, the reader will appear errored in the same way\n * from now on; otherwise, the reader will appear closed.\n *\n * A reader's lock cannot be released while it still has a pending read request, i.e., if a promise returned by\n * the reader's {@link ReadableStreamBYOBReader.read | read()} method has not yet been settled. Attempting to\n * do so will throw a `TypeError` and leave the reader locked to the stream.\n */\n releaseLock() {\n if (!IsReadableStreamBYOBReader(this)) {\n throw byobReaderBrandCheckException('releaseLock');\n }\n if (this._ownerReadableStream === undefined) {\n return;\n }\n if (this._readIntoRequests.length > 0) {\n throw new TypeError('Tried to release a reader lock when that reader has pending read() calls un-settled');\n }\n ReadableStreamReaderGenericRelease(this);\n }\n }\n Object.defineProperties(ReadableStreamBYOBReader.prototype, {\n cancel: { enumerable: true },\n read: { enumerable: true },\n releaseLock: { enumerable: true },\n closed: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(ReadableStreamBYOBReader.prototype, SymbolPolyfill.toStringTag, {\n value: 'ReadableStreamBYOBReader',\n configurable: true\n });\n }\n // Abstract operations for the readers.\n function IsReadableStreamBYOBReader(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_readIntoRequests')) {\n return false;\n }\n return x instanceof ReadableStreamBYOBReader;\n }\n function ReadableStreamBYOBReaderRead(reader, view, readIntoRequest) {\n const stream = reader._ownerReadableStream;\n stream._disturbed = true;\n if (stream._state === 'errored') {\n readIntoRequest._errorSteps(stream._storedError);\n }\n else {\n ReadableByteStreamControllerPullInto(stream._readableStreamController, view, readIntoRequest);\n }\n }\n // Helper functions for the ReadableStreamBYOBReader.\n function byobReaderBrandCheckException(name) {\n return new TypeError(`ReadableStreamBYOBReader.prototype.${name} can only be used on a ReadableStreamBYOBReader`);\n }\n\n function ExtractHighWaterMark(strategy, defaultHWM) {\n const { highWaterMark } = strategy;\n if (highWaterMark === undefined) {\n return defaultHWM;\n }\n if (NumberIsNaN(highWaterMark) || highWaterMark < 0) {\n throw new RangeError('Invalid highWaterMark');\n }\n return highWaterMark;\n }\n function ExtractSizeAlgorithm(strategy) {\n const { size } = strategy;\n if (!size) {\n return () => 1;\n }\n return size;\n }\n\n function convertQueuingStrategy(init, context) {\n assertDictionary(init, context);\n const highWaterMark = init === null || init === void 0 ? void 0 : init.highWaterMark;\n const size = init === null || init === void 0 ? void 0 : init.size;\n return {\n highWaterMark: highWaterMark === undefined ? undefined : convertUnrestrictedDouble(highWaterMark),\n size: size === undefined ? undefined : convertQueuingStrategySize(size, `${context} has member 'size' that`)\n };\n }\n function convertQueuingStrategySize(fn, context) {\n assertFunction(fn, context);\n return chunk => convertUnrestrictedDouble(fn(chunk));\n }\n\n function convertUnderlyingSink(original, context) {\n assertDictionary(original, context);\n const abort = original === null || original === void 0 ? void 0 : original.abort;\n const close = original === null || original === void 0 ? void 0 : original.close;\n const start = original === null || original === void 0 ? void 0 : original.start;\n const type = original === null || original === void 0 ? void 0 : original.type;\n const write = original === null || original === void 0 ? void 0 : original.write;\n return {\n abort: abort === undefined ?\n undefined :\n convertUnderlyingSinkAbortCallback(abort, original, `${context} has member 'abort' that`),\n close: close === undefined ?\n undefined :\n convertUnderlyingSinkCloseCallback(close, original, `${context} has member 'close' that`),\n start: start === undefined ?\n undefined :\n convertUnderlyingSinkStartCallback(start, original, `${context} has member 'start' that`),\n write: write === undefined ?\n undefined :\n convertUnderlyingSinkWriteCallback(write, original, `${context} has member 'write' that`),\n type\n };\n }\n function convertUnderlyingSinkAbortCallback(fn, original, context) {\n assertFunction(fn, context);\n return (reason) => promiseCall(fn, original, [reason]);\n }\n function convertUnderlyingSinkCloseCallback(fn, original, context) {\n assertFunction(fn, context);\n return () => promiseCall(fn, original, []);\n }\n function convertUnderlyingSinkStartCallback(fn, original, context) {\n assertFunction(fn, context);\n return (controller) => reflectCall(fn, original, [controller]);\n }\n function convertUnderlyingSinkWriteCallback(fn, original, context) {\n assertFunction(fn, context);\n return (chunk, controller) => promiseCall(fn, original, [chunk, controller]);\n }\n\n function assertWritableStream(x, context) {\n if (!IsWritableStream(x)) {\n throw new TypeError(`${context} is not a WritableStream.`);\n }\n }\n\n function isAbortSignal(value) {\n if (typeof value !== 'object' || value === null) {\n return false;\n }\n try {\n return typeof value.aborted === 'boolean';\n }\n catch (_a) {\n // AbortSignal.prototype.aborted throws if its brand check fails\n return false;\n }\n }\n const supportsAbortController = typeof AbortController === 'function';\n /**\n * Construct a new AbortController, if supported by the platform.\n *\n * @internal\n */\n function createAbortController() {\n if (supportsAbortController) {\n return new AbortController();\n }\n return undefined;\n }\n\n /**\n * A writable stream represents a destination for data, into which you can write.\n *\n * @public\n */\n class WritableStream {\n constructor(rawUnderlyingSink = {}, rawStrategy = {}) {\n if (rawUnderlyingSink === undefined) {\n rawUnderlyingSink = null;\n }\n else {\n assertObject(rawUnderlyingSink, 'First parameter');\n }\n const strategy = convertQueuingStrategy(rawStrategy, 'Second parameter');\n const underlyingSink = convertUnderlyingSink(rawUnderlyingSink, 'First parameter');\n InitializeWritableStream(this);\n const type = underlyingSink.type;\n if (type !== undefined) {\n throw new RangeError('Invalid type is specified');\n }\n const sizeAlgorithm = ExtractSizeAlgorithm(strategy);\n const highWaterMark = ExtractHighWaterMark(strategy, 1);\n SetUpWritableStreamDefaultControllerFromUnderlyingSink(this, underlyingSink, highWaterMark, sizeAlgorithm);\n }\n /**\n * Returns whether or not the writable stream is locked to a writer.\n */\n get locked() {\n if (!IsWritableStream(this)) {\n throw streamBrandCheckException$2('locked');\n }\n return IsWritableStreamLocked(this);\n }\n /**\n * Aborts the stream, signaling that the producer can no longer successfully write to the stream and it is to be\n * immediately moved to an errored state, with any queued-up writes discarded. This will also execute any abort\n * mechanism of the underlying sink.\n *\n * The returned promise will fulfill if the stream shuts down successfully, or reject if the underlying sink signaled\n * that there was an error doing so. Additionally, it will reject with a `TypeError` (without attempting to cancel\n * the stream) if the stream is currently locked.\n */\n abort(reason = undefined) {\n if (!IsWritableStream(this)) {\n return promiseRejectedWith(streamBrandCheckException$2('abort'));\n }\n if (IsWritableStreamLocked(this)) {\n return promiseRejectedWith(new TypeError('Cannot abort a stream that already has a writer'));\n }\n return WritableStreamAbort(this, reason);\n }\n /**\n * Closes the stream. The underlying sink will finish processing any previously-written chunks, before invoking its\n * close behavior. During this time any further attempts to write will fail (without erroring the stream).\n *\n * The method returns a promise that will fulfill if all remaining chunks are successfully written and the stream\n * successfully closes, or rejects if an error is encountered during this process. Additionally, it will reject with\n * a `TypeError` (without attempting to cancel the stream) if the stream is currently locked.\n */\n close() {\n if (!IsWritableStream(this)) {\n return promiseRejectedWith(streamBrandCheckException$2('close'));\n }\n if (IsWritableStreamLocked(this)) {\n return promiseRejectedWith(new TypeError('Cannot close a stream that already has a writer'));\n }\n if (WritableStreamCloseQueuedOrInFlight(this)) {\n return promiseRejectedWith(new TypeError('Cannot close an already-closing stream'));\n }\n return WritableStreamClose(this);\n }\n /**\n * Creates a {@link WritableStreamDefaultWriter | writer} and locks the stream to the new writer. While the stream\n * is locked, no other writer can be acquired until this one is released.\n *\n * This functionality is especially useful for creating abstractions that desire the ability to write to a stream\n * without interruption or interleaving. By getting a writer for the stream, you can ensure nobody else can write at\n * the same time, which would cause the resulting written data to be unpredictable and probably useless.\n */\n getWriter() {\n if (!IsWritableStream(this)) {\n throw streamBrandCheckException$2('getWriter');\n }\n return AcquireWritableStreamDefaultWriter(this);\n }\n }\n Object.defineProperties(WritableStream.prototype, {\n abort: { enumerable: true },\n close: { enumerable: true },\n getWriter: { enumerable: true },\n locked: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(WritableStream.prototype, SymbolPolyfill.toStringTag, {\n value: 'WritableStream',\n configurable: true\n });\n }\n // Abstract operations for the WritableStream.\n function AcquireWritableStreamDefaultWriter(stream) {\n return new WritableStreamDefaultWriter(stream);\n }\n // Throws if and only if startAlgorithm throws.\n function CreateWritableStream(startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, highWaterMark = 1, sizeAlgorithm = () => 1) {\n const stream = Object.create(WritableStream.prototype);\n InitializeWritableStream(stream);\n const controller = Object.create(WritableStreamDefaultController.prototype);\n SetUpWritableStreamDefaultController(stream, controller, startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, highWaterMark, sizeAlgorithm);\n return stream;\n }\n function InitializeWritableStream(stream) {\n stream._state = 'writable';\n // The error that will be reported by new method calls once the state becomes errored. Only set when [[state]] is\n // 'erroring' or 'errored'. May be set to an undefined value.\n stream._storedError = undefined;\n stream._writer = undefined;\n // Initialize to undefined first because the constructor of the controller checks this\n // variable to validate the caller.\n stream._writableStreamController = undefined;\n // This queue is placed here instead of the writer class in order to allow for passing a writer to the next data\n // producer without waiting for the queued writes to finish.\n stream._writeRequests = new SimpleQueue();\n // Write requests are removed from _writeRequests when write() is called on the underlying sink. This prevents\n // them from being erroneously rejected on error. If a write() call is in-flight, the request is stored here.\n stream._inFlightWriteRequest = undefined;\n // The promise that was returned from writer.close(). Stored here because it may be fulfilled after the writer\n // has been detached.\n stream._closeRequest = undefined;\n // Close request is removed from _closeRequest when close() is called on the underlying sink. This prevents it\n // from being erroneously rejected on error. If a close() call is in-flight, the request is stored here.\n stream._inFlightCloseRequest = undefined;\n // The promise that was returned from writer.abort(). This may also be fulfilled after the writer has detached.\n stream._pendingAbortRequest = undefined;\n // The backpressure signal set by the controller.\n stream._backpressure = false;\n }\n function IsWritableStream(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_writableStreamController')) {\n return false;\n }\n return x instanceof WritableStream;\n }\n function IsWritableStreamLocked(stream) {\n if (stream._writer === undefined) {\n return false;\n }\n return true;\n }\n function WritableStreamAbort(stream, reason) {\n var _a;\n if (stream._state === 'closed' || stream._state === 'errored') {\n return promiseResolvedWith(undefined);\n }\n stream._writableStreamController._abortReason = reason;\n (_a = stream._writableStreamController._abortController) === null || _a === void 0 ? void 0 : _a.abort();\n // TypeScript narrows the type of `stream._state` down to 'writable' | 'erroring',\n // but it doesn't know that signaling abort runs author code that might have changed the state.\n // Widen the type again by casting to WritableStreamState.\n const state = stream._state;\n if (state === 'closed' || state === 'errored') {\n return promiseResolvedWith(undefined);\n }\n if (stream._pendingAbortRequest !== undefined) {\n return stream._pendingAbortRequest._promise;\n }\n let wasAlreadyErroring = false;\n if (state === 'erroring') {\n wasAlreadyErroring = true;\n // reason will not be used, so don't keep a reference to it.\n reason = undefined;\n }\n const promise = newPromise((resolve, reject) => {\n stream._pendingAbortRequest = {\n _promise: undefined,\n _resolve: resolve,\n _reject: reject,\n _reason: reason,\n _wasAlreadyErroring: wasAlreadyErroring\n };\n });\n stream._pendingAbortRequest._promise = promise;\n if (!wasAlreadyErroring) {\n WritableStreamStartErroring(stream, reason);\n }\n return promise;\n }\n function WritableStreamClose(stream) {\n const state = stream._state;\n if (state === 'closed' || state === 'errored') {\n return promiseRejectedWith(new TypeError(`The stream (in ${state} state) is not in the writable state and cannot be closed`));\n }\n const promise = newPromise((resolve, reject) => {\n const closeRequest = {\n _resolve: resolve,\n _reject: reject\n };\n stream._closeRequest = closeRequest;\n });\n const writer = stream._writer;\n if (writer !== undefined && stream._backpressure && state === 'writable') {\n defaultWriterReadyPromiseResolve(writer);\n }\n WritableStreamDefaultControllerClose(stream._writableStreamController);\n return promise;\n }\n // WritableStream API exposed for controllers.\n function WritableStreamAddWriteRequest(stream) {\n const promise = newPromise((resolve, reject) => {\n const writeRequest = {\n _resolve: resolve,\n _reject: reject\n };\n stream._writeRequests.push(writeRequest);\n });\n return promise;\n }\n function WritableStreamDealWithRejection(stream, error) {\n const state = stream._state;\n if (state === 'writable') {\n WritableStreamStartErroring(stream, error);\n return;\n }\n WritableStreamFinishErroring(stream);\n }\n function WritableStreamStartErroring(stream, reason) {\n const controller = stream._writableStreamController;\n stream._state = 'erroring';\n stream._storedError = reason;\n const writer = stream._writer;\n if (writer !== undefined) {\n WritableStreamDefaultWriterEnsureReadyPromiseRejected(writer, reason);\n }\n if (!WritableStreamHasOperationMarkedInFlight(stream) && controller._started) {\n WritableStreamFinishErroring(stream);\n }\n }\n function WritableStreamFinishErroring(stream) {\n stream._state = 'errored';\n stream._writableStreamController[ErrorSteps]();\n const storedError = stream._storedError;\n stream._writeRequests.forEach(writeRequest => {\n writeRequest._reject(storedError);\n });\n stream._writeRequests = new SimpleQueue();\n if (stream._pendingAbortRequest === undefined) {\n WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream);\n return;\n }\n const abortRequest = stream._pendingAbortRequest;\n stream._pendingAbortRequest = undefined;\n if (abortRequest._wasAlreadyErroring) {\n abortRequest._reject(storedError);\n WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream);\n return;\n }\n const promise = stream._writableStreamController[AbortSteps](abortRequest._reason);\n uponPromise(promise, () => {\n abortRequest._resolve();\n WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream);\n }, (reason) => {\n abortRequest._reject(reason);\n WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream);\n });\n }\n function WritableStreamFinishInFlightWrite(stream) {\n stream._inFlightWriteRequest._resolve(undefined);\n stream._inFlightWriteRequest = undefined;\n }\n function WritableStreamFinishInFlightWriteWithError(stream, error) {\n stream._inFlightWriteRequest._reject(error);\n stream._inFlightWriteRequest = undefined;\n WritableStreamDealWithRejection(stream, error);\n }\n function WritableStreamFinishInFlightClose(stream) {\n stream._inFlightCloseRequest._resolve(undefined);\n stream._inFlightCloseRequest = undefined;\n const state = stream._state;\n if (state === 'erroring') {\n // The error was too late to do anything, so it is ignored.\n stream._storedError = undefined;\n if (stream._pendingAbortRequest !== undefined) {\n stream._pendingAbortRequest._resolve();\n stream._pendingAbortRequest = undefined;\n }\n }\n stream._state = 'closed';\n const writer = stream._writer;\n if (writer !== undefined) {\n defaultWriterClosedPromiseResolve(writer);\n }\n }\n function WritableStreamFinishInFlightCloseWithError(stream, error) {\n stream._inFlightCloseRequest._reject(error);\n stream._inFlightCloseRequest = undefined;\n // Never execute sink abort() after sink close().\n if (stream._pendingAbortRequest !== undefined) {\n stream._pendingAbortRequest._reject(error);\n stream._pendingAbortRequest = undefined;\n }\n WritableStreamDealWithRejection(stream, error);\n }\n // TODO(ricea): Fix alphabetical order.\n function WritableStreamCloseQueuedOrInFlight(stream) {\n if (stream._closeRequest === undefined && stream._inFlightCloseRequest === undefined) {\n return false;\n }\n return true;\n }\n function WritableStreamHasOperationMarkedInFlight(stream) {\n if (stream._inFlightWriteRequest === undefined && stream._inFlightCloseRequest === undefined) {\n return false;\n }\n return true;\n }\n function WritableStreamMarkCloseRequestInFlight(stream) {\n stream._inFlightCloseRequest = stream._closeRequest;\n stream._closeRequest = undefined;\n }\n function WritableStreamMarkFirstWriteRequestInFlight(stream) {\n stream._inFlightWriteRequest = stream._writeRequests.shift();\n }\n function WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream) {\n if (stream._closeRequest !== undefined) {\n stream._closeRequest._reject(stream._storedError);\n stream._closeRequest = undefined;\n }\n const writer = stream._writer;\n if (writer !== undefined) {\n defaultWriterClosedPromiseReject(writer, stream._storedError);\n }\n }\n function WritableStreamUpdateBackpressure(stream, backpressure) {\n const writer = stream._writer;\n if (writer !== undefined && backpressure !== stream._backpressure) {\n if (backpressure) {\n defaultWriterReadyPromiseReset(writer);\n }\n else {\n defaultWriterReadyPromiseResolve(writer);\n }\n }\n stream._backpressure = backpressure;\n }\n /**\n * A default writer vended by a {@link WritableStream}.\n *\n * @public\n */\n class WritableStreamDefaultWriter {\n constructor(stream) {\n assertRequiredArgument(stream, 1, 'WritableStreamDefaultWriter');\n assertWritableStream(stream, 'First parameter');\n if (IsWritableStreamLocked(stream)) {\n throw new TypeError('This stream has already been locked for exclusive writing by another writer');\n }\n this._ownerWritableStream = stream;\n stream._writer = this;\n const state = stream._state;\n if (state === 'writable') {\n if (!WritableStreamCloseQueuedOrInFlight(stream) && stream._backpressure) {\n defaultWriterReadyPromiseInitialize(this);\n }\n else {\n defaultWriterReadyPromiseInitializeAsResolved(this);\n }\n defaultWriterClosedPromiseInitialize(this);\n }\n else if (state === 'erroring') {\n defaultWriterReadyPromiseInitializeAsRejected(this, stream._storedError);\n defaultWriterClosedPromiseInitialize(this);\n }\n else if (state === 'closed') {\n defaultWriterReadyPromiseInitializeAsResolved(this);\n defaultWriterClosedPromiseInitializeAsResolved(this);\n }\n else {\n const storedError = stream._storedError;\n defaultWriterReadyPromiseInitializeAsRejected(this, storedError);\n defaultWriterClosedPromiseInitializeAsRejected(this, storedError);\n }\n }\n /**\n * Returns a promise that will be fulfilled when the stream becomes closed, or rejected if the stream ever errors or\n * the writer’s lock is released before the stream finishes closing.\n */\n get closed() {\n if (!IsWritableStreamDefaultWriter(this)) {\n return promiseRejectedWith(defaultWriterBrandCheckException('closed'));\n }\n return this._closedPromise;\n }\n /**\n * Returns the desired size to fill the stream’s internal queue. It can be negative, if the queue is over-full.\n * A producer can use this information to determine the right amount of data to write.\n *\n * It will be `null` if the stream cannot be successfully written to (due to either being errored, or having an abort\n * queued up). It will return zero if the stream is closed. And the getter will throw an exception if invoked when\n * the writer’s lock is released.\n */\n get desiredSize() {\n if (!IsWritableStreamDefaultWriter(this)) {\n throw defaultWriterBrandCheckException('desiredSize');\n }\n if (this._ownerWritableStream === undefined) {\n throw defaultWriterLockException('desiredSize');\n }\n return WritableStreamDefaultWriterGetDesiredSize(this);\n }\n /**\n * Returns a promise that will be fulfilled when the desired size to fill the stream’s internal queue transitions\n * from non-positive to positive, signaling that it is no longer applying backpressure. Once the desired size dips\n * back to zero or below, the getter will return a new promise that stays pending until the next transition.\n *\n * If the stream becomes errored or aborted, or the writer’s lock is released, the returned promise will become\n * rejected.\n */\n get ready() {\n if (!IsWritableStreamDefaultWriter(this)) {\n return promiseRejectedWith(defaultWriterBrandCheckException('ready'));\n }\n return this._readyPromise;\n }\n /**\n * If the reader is active, behaves the same as {@link WritableStream.abort | stream.abort(reason)}.\n */\n abort(reason = undefined) {\n if (!IsWritableStreamDefaultWriter(this)) {\n return promiseRejectedWith(defaultWriterBrandCheckException('abort'));\n }\n if (this._ownerWritableStream === undefined) {\n return promiseRejectedWith(defaultWriterLockException('abort'));\n }\n return WritableStreamDefaultWriterAbort(this, reason);\n }\n /**\n * If the reader is active, behaves the same as {@link WritableStream.close | stream.close()}.\n */\n close() {\n if (!IsWritableStreamDefaultWriter(this)) {\n return promiseRejectedWith(defaultWriterBrandCheckException('close'));\n }\n const stream = this._ownerWritableStream;\n if (stream === undefined) {\n return promiseRejectedWith(defaultWriterLockException('close'));\n }\n if (WritableStreamCloseQueuedOrInFlight(stream)) {\n return promiseRejectedWith(new TypeError('Cannot close an already-closing stream'));\n }\n return WritableStreamDefaultWriterClose(this);\n }\n /**\n * Releases the writer’s lock on the corresponding stream. After the lock is released, the writer is no longer active.\n * If the associated stream is errored when the lock is released, the writer will appear errored in the same way from\n * now on; otherwise, the writer will appear closed.\n *\n * Note that the lock can still be released even if some ongoing writes have not yet finished (i.e. even if the\n * promises returned from previous calls to {@link WritableStreamDefaultWriter.write | write()} have not yet settled).\n * It’s not necessary to hold the lock on the writer for the duration of the write; the lock instead simply prevents\n * other producers from writing in an interleaved manner.\n */\n releaseLock() {\n if (!IsWritableStreamDefaultWriter(this)) {\n throw defaultWriterBrandCheckException('releaseLock');\n }\n const stream = this._ownerWritableStream;\n if (stream === undefined) {\n return;\n }\n WritableStreamDefaultWriterRelease(this);\n }\n write(chunk = undefined) {\n if (!IsWritableStreamDefaultWriter(this)) {\n return promiseRejectedWith(defaultWriterBrandCheckException('write'));\n }\n if (this._ownerWritableStream === undefined) {\n return promiseRejectedWith(defaultWriterLockException('write to'));\n }\n return WritableStreamDefaultWriterWrite(this, chunk);\n }\n }\n Object.defineProperties(WritableStreamDefaultWriter.prototype, {\n abort: { enumerable: true },\n close: { enumerable: true },\n releaseLock: { enumerable: true },\n write: { enumerable: true },\n closed: { enumerable: true },\n desiredSize: { enumerable: true },\n ready: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(WritableStreamDefaultWriter.prototype, SymbolPolyfill.toStringTag, {\n value: 'WritableStreamDefaultWriter',\n configurable: true\n });\n }\n // Abstract operations for the WritableStreamDefaultWriter.\n function IsWritableStreamDefaultWriter(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_ownerWritableStream')) {\n return false;\n }\n return x instanceof WritableStreamDefaultWriter;\n }\n // A client of WritableStreamDefaultWriter may use these functions directly to bypass state check.\n function WritableStreamDefaultWriterAbort(writer, reason) {\n const stream = writer._ownerWritableStream;\n return WritableStreamAbort(stream, reason);\n }\n function WritableStreamDefaultWriterClose(writer) {\n const stream = writer._ownerWritableStream;\n return WritableStreamClose(stream);\n }\n function WritableStreamDefaultWriterCloseWithErrorPropagation(writer) {\n const stream = writer._ownerWritableStream;\n const state = stream._state;\n if (WritableStreamCloseQueuedOrInFlight(stream) || state === 'closed') {\n return promiseResolvedWith(undefined);\n }\n if (state === 'errored') {\n return promiseRejectedWith(stream._storedError);\n }\n return WritableStreamDefaultWriterClose(writer);\n }\n function WritableStreamDefaultWriterEnsureClosedPromiseRejected(writer, error) {\n if (writer._closedPromiseState === 'pending') {\n defaultWriterClosedPromiseReject(writer, error);\n }\n else {\n defaultWriterClosedPromiseResetToRejected(writer, error);\n }\n }\n function WritableStreamDefaultWriterEnsureReadyPromiseRejected(writer, error) {\n if (writer._readyPromiseState === 'pending') {\n defaultWriterReadyPromiseReject(writer, error);\n }\n else {\n defaultWriterReadyPromiseResetToRejected(writer, error);\n }\n }\n function WritableStreamDefaultWriterGetDesiredSize(writer) {\n const stream = writer._ownerWritableStream;\n const state = stream._state;\n if (state === 'errored' || state === 'erroring') {\n return null;\n }\n if (state === 'closed') {\n return 0;\n }\n return WritableStreamDefaultControllerGetDesiredSize(stream._writableStreamController);\n }\n function WritableStreamDefaultWriterRelease(writer) {\n const stream = writer._ownerWritableStream;\n const releasedError = new TypeError(`Writer was released and can no longer be used to monitor the stream's closedness`);\n WritableStreamDefaultWriterEnsureReadyPromiseRejected(writer, releasedError);\n // The state transitions to \"errored\" before the sink abort() method runs, but the writer.closed promise is not\n // rejected until afterwards. This means that simply testing state will not work.\n WritableStreamDefaultWriterEnsureClosedPromiseRejected(writer, releasedError);\n stream._writer = undefined;\n writer._ownerWritableStream = undefined;\n }\n function WritableStreamDefaultWriterWrite(writer, chunk) {\n const stream = writer._ownerWritableStream;\n const controller = stream._writableStreamController;\n const chunkSize = WritableStreamDefaultControllerGetChunkSize(controller, chunk);\n if (stream !== writer._ownerWritableStream) {\n return promiseRejectedWith(defaultWriterLockException('write to'));\n }\n const state = stream._state;\n if (state === 'errored') {\n return promiseRejectedWith(stream._storedError);\n }\n if (WritableStreamCloseQueuedOrInFlight(stream) || state === 'closed') {\n return promiseRejectedWith(new TypeError('The stream is closing or closed and cannot be written to'));\n }\n if (state === 'erroring') {\n return promiseRejectedWith(stream._storedError);\n }\n const promise = WritableStreamAddWriteRequest(stream);\n WritableStreamDefaultControllerWrite(controller, chunk, chunkSize);\n return promise;\n }\n const closeSentinel = {};\n /**\n * Allows control of a {@link WritableStream | writable stream}'s state and internal queue.\n *\n * @public\n */\n class WritableStreamDefaultController {\n constructor() {\n throw new TypeError('Illegal constructor');\n }\n /**\n * The reason which was passed to `WritableStream.abort(reason)` when the stream was aborted.\n *\n * @deprecated\n * This property has been removed from the specification, see https://github.com/whatwg/streams/pull/1177.\n * Use {@link WritableStreamDefaultController.signal}'s `reason` instead.\n */\n get abortReason() {\n if (!IsWritableStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException$2('abortReason');\n }\n return this._abortReason;\n }\n /**\n * An `AbortSignal` that can be used to abort the pending write or close operation when the stream is aborted.\n */\n get signal() {\n if (!IsWritableStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException$2('signal');\n }\n if (this._abortController === undefined) {\n // Older browsers or older Node versions may not support `AbortController` or `AbortSignal`.\n // We don't want to bundle and ship an `AbortController` polyfill together with our polyfill,\n // so instead we only implement support for `signal` if we find a global `AbortController` constructor.\n throw new TypeError('WritableStreamDefaultController.prototype.signal is not supported');\n }\n return this._abortController.signal;\n }\n /**\n * Closes the controlled writable stream, making all future interactions with it fail with the given error `e`.\n *\n * This method is rarely used, since usually it suffices to return a rejected promise from one of the underlying\n * sink's methods. However, it can be useful for suddenly shutting down a stream in response to an event outside the\n * normal lifecycle of interactions with the underlying sink.\n */\n error(e = undefined) {\n if (!IsWritableStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException$2('error');\n }\n const state = this._controlledWritableStream._state;\n if (state !== 'writable') {\n // The stream is closed, errored or will be soon. The sink can't do anything useful if it gets an error here, so\n // just treat it as a no-op.\n return;\n }\n WritableStreamDefaultControllerError(this, e);\n }\n /** @internal */\n [AbortSteps](reason) {\n const result = this._abortAlgorithm(reason);\n WritableStreamDefaultControllerClearAlgorithms(this);\n return result;\n }\n /** @internal */\n [ErrorSteps]() {\n ResetQueue(this);\n }\n }\n Object.defineProperties(WritableStreamDefaultController.prototype, {\n abortReason: { enumerable: true },\n signal: { enumerable: true },\n error: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(WritableStreamDefaultController.prototype, SymbolPolyfill.toStringTag, {\n value: 'WritableStreamDefaultController',\n configurable: true\n });\n }\n // Abstract operations implementing interface required by the WritableStream.\n function IsWritableStreamDefaultController(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_controlledWritableStream')) {\n return false;\n }\n return x instanceof WritableStreamDefaultController;\n }\n function SetUpWritableStreamDefaultController(stream, controller, startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, highWaterMark, sizeAlgorithm) {\n controller._controlledWritableStream = stream;\n stream._writableStreamController = controller;\n // Need to set the slots so that the assert doesn't fire. In the spec the slots already exist implicitly.\n controller._queue = undefined;\n controller._queueTotalSize = undefined;\n ResetQueue(controller);\n controller._abortReason = undefined;\n controller._abortController = createAbortController();\n controller._started = false;\n controller._strategySizeAlgorithm = sizeAlgorithm;\n controller._strategyHWM = highWaterMark;\n controller._writeAlgorithm = writeAlgorithm;\n controller._closeAlgorithm = closeAlgorithm;\n controller._abortAlgorithm = abortAlgorithm;\n const backpressure = WritableStreamDefaultControllerGetBackpressure(controller);\n WritableStreamUpdateBackpressure(stream, backpressure);\n const startResult = startAlgorithm();\n const startPromise = promiseResolvedWith(startResult);\n uponPromise(startPromise, () => {\n controller._started = true;\n WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller);\n }, r => {\n controller._started = true;\n WritableStreamDealWithRejection(stream, r);\n });\n }\n function SetUpWritableStreamDefaultControllerFromUnderlyingSink(stream, underlyingSink, highWaterMark, sizeAlgorithm) {\n const controller = Object.create(WritableStreamDefaultController.prototype);\n let startAlgorithm = () => undefined;\n let writeAlgorithm = () => promiseResolvedWith(undefined);\n let closeAlgorithm = () => promiseResolvedWith(undefined);\n let abortAlgorithm = () => promiseResolvedWith(undefined);\n if (underlyingSink.start !== undefined) {\n startAlgorithm = () => underlyingSink.start(controller);\n }\n if (underlyingSink.write !== undefined) {\n writeAlgorithm = chunk => underlyingSink.write(chunk, controller);\n }\n if (underlyingSink.close !== undefined) {\n closeAlgorithm = () => underlyingSink.close();\n }\n if (underlyingSink.abort !== undefined) {\n abortAlgorithm = reason => underlyingSink.abort(reason);\n }\n SetUpWritableStreamDefaultController(stream, controller, startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, highWaterMark, sizeAlgorithm);\n }\n // ClearAlgorithms may be called twice. Erroring the same stream in multiple ways will often result in redundant calls.\n function WritableStreamDefaultControllerClearAlgorithms(controller) {\n controller._writeAlgorithm = undefined;\n controller._closeAlgorithm = undefined;\n controller._abortAlgorithm = undefined;\n controller._strategySizeAlgorithm = undefined;\n }\n function WritableStreamDefaultControllerClose(controller) {\n EnqueueValueWithSize(controller, closeSentinel, 0);\n WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller);\n }\n function WritableStreamDefaultControllerGetChunkSize(controller, chunk) {\n try {\n return controller._strategySizeAlgorithm(chunk);\n }\n catch (chunkSizeE) {\n WritableStreamDefaultControllerErrorIfNeeded(controller, chunkSizeE);\n return 1;\n }\n }\n function WritableStreamDefaultControllerGetDesiredSize(controller) {\n return controller._strategyHWM - controller._queueTotalSize;\n }\n function WritableStreamDefaultControllerWrite(controller, chunk, chunkSize) {\n try {\n EnqueueValueWithSize(controller, chunk, chunkSize);\n }\n catch (enqueueE) {\n WritableStreamDefaultControllerErrorIfNeeded(controller, enqueueE);\n return;\n }\n const stream = controller._controlledWritableStream;\n if (!WritableStreamCloseQueuedOrInFlight(stream) && stream._state === 'writable') {\n const backpressure = WritableStreamDefaultControllerGetBackpressure(controller);\n WritableStreamUpdateBackpressure(stream, backpressure);\n }\n WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller);\n }\n // Abstract operations for the WritableStreamDefaultController.\n function WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller) {\n const stream = controller._controlledWritableStream;\n if (!controller._started) {\n return;\n }\n if (stream._inFlightWriteRequest !== undefined) {\n return;\n }\n const state = stream._state;\n if (state === 'erroring') {\n WritableStreamFinishErroring(stream);\n return;\n }\n if (controller._queue.length === 0) {\n return;\n }\n const value = PeekQueueValue(controller);\n if (value === closeSentinel) {\n WritableStreamDefaultControllerProcessClose(controller);\n }\n else {\n WritableStreamDefaultControllerProcessWrite(controller, value);\n }\n }\n function WritableStreamDefaultControllerErrorIfNeeded(controller, error) {\n if (controller._controlledWritableStream._state === 'writable') {\n WritableStreamDefaultControllerError(controller, error);\n }\n }\n function WritableStreamDefaultControllerProcessClose(controller) {\n const stream = controller._controlledWritableStream;\n WritableStreamMarkCloseRequestInFlight(stream);\n DequeueValue(controller);\n const sinkClosePromise = controller._closeAlgorithm();\n WritableStreamDefaultControllerClearAlgorithms(controller);\n uponPromise(sinkClosePromise, () => {\n WritableStreamFinishInFlightClose(stream);\n }, reason => {\n WritableStreamFinishInFlightCloseWithError(stream, reason);\n });\n }\n function WritableStreamDefaultControllerProcessWrite(controller, chunk) {\n const stream = controller._controlledWritableStream;\n WritableStreamMarkFirstWriteRequestInFlight(stream);\n const sinkWritePromise = controller._writeAlgorithm(chunk);\n uponPromise(sinkWritePromise, () => {\n WritableStreamFinishInFlightWrite(stream);\n const state = stream._state;\n DequeueValue(controller);\n if (!WritableStreamCloseQueuedOrInFlight(stream) && state === 'writable') {\n const backpressure = WritableStreamDefaultControllerGetBackpressure(controller);\n WritableStreamUpdateBackpressure(stream, backpressure);\n }\n WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller);\n }, reason => {\n if (stream._state === 'writable') {\n WritableStreamDefaultControllerClearAlgorithms(controller);\n }\n WritableStreamFinishInFlightWriteWithError(stream, reason);\n });\n }\n function WritableStreamDefaultControllerGetBackpressure(controller) {\n const desiredSize = WritableStreamDefaultControllerGetDesiredSize(controller);\n return desiredSize <= 0;\n }\n // A client of WritableStreamDefaultController may use these functions directly to bypass state check.\n function WritableStreamDefaultControllerError(controller, error) {\n const stream = controller._controlledWritableStream;\n WritableStreamDefaultControllerClearAlgorithms(controller);\n WritableStreamStartErroring(stream, error);\n }\n // Helper functions for the WritableStream.\n function streamBrandCheckException$2(name) {\n return new TypeError(`WritableStream.prototype.${name} can only be used on a WritableStream`);\n }\n // Helper functions for the WritableStreamDefaultController.\n function defaultControllerBrandCheckException$2(name) {\n return new TypeError(`WritableStreamDefaultController.prototype.${name} can only be used on a WritableStreamDefaultController`);\n }\n // Helper functions for the WritableStreamDefaultWriter.\n function defaultWriterBrandCheckException(name) {\n return new TypeError(`WritableStreamDefaultWriter.prototype.${name} can only be used on a WritableStreamDefaultWriter`);\n }\n function defaultWriterLockException(name) {\n return new TypeError('Cannot ' + name + ' a stream using a released writer');\n }\n function defaultWriterClosedPromiseInitialize(writer) {\n writer._closedPromise = newPromise((resolve, reject) => {\n writer._closedPromise_resolve = resolve;\n writer._closedPromise_reject = reject;\n writer._closedPromiseState = 'pending';\n });\n }\n function defaultWriterClosedPromiseInitializeAsRejected(writer, reason) {\n defaultWriterClosedPromiseInitialize(writer);\n defaultWriterClosedPromiseReject(writer, reason);\n }\n function defaultWriterClosedPromiseInitializeAsResolved(writer) {\n defaultWriterClosedPromiseInitialize(writer);\n defaultWriterClosedPromiseResolve(writer);\n }\n function defaultWriterClosedPromiseReject(writer, reason) {\n if (writer._closedPromise_reject === undefined) {\n return;\n }\n setPromiseIsHandledToTrue(writer._closedPromise);\n writer._closedPromise_reject(reason);\n writer._closedPromise_resolve = undefined;\n writer._closedPromise_reject = undefined;\n writer._closedPromiseState = 'rejected';\n }\n function defaultWriterClosedPromiseResetToRejected(writer, reason) {\n defaultWriterClosedPromiseInitializeAsRejected(writer, reason);\n }\n function defaultWriterClosedPromiseResolve(writer) {\n if (writer._closedPromise_resolve === undefined) {\n return;\n }\n writer._closedPromise_resolve(undefined);\n writer._closedPromise_resolve = undefined;\n writer._closedPromise_reject = undefined;\n writer._closedPromiseState = 'resolved';\n }\n function defaultWriterReadyPromiseInitialize(writer) {\n writer._readyPromise = newPromise((resolve, reject) => {\n writer._readyPromise_resolve = resolve;\n writer._readyPromise_reject = reject;\n });\n writer._readyPromiseState = 'pending';\n }\n function defaultWriterReadyPromiseInitializeAsRejected(writer, reason) {\n defaultWriterReadyPromiseInitialize(writer);\n defaultWriterReadyPromiseReject(writer, reason);\n }\n function defaultWriterReadyPromiseInitializeAsResolved(writer) {\n defaultWriterReadyPromiseInitialize(writer);\n defaultWriterReadyPromiseResolve(writer);\n }\n function defaultWriterReadyPromiseReject(writer, reason) {\n if (writer._readyPromise_reject === undefined) {\n return;\n }\n setPromiseIsHandledToTrue(writer._readyPromise);\n writer._readyPromise_reject(reason);\n writer._readyPromise_resolve = undefined;\n writer._readyPromise_reject = undefined;\n writer._readyPromiseState = 'rejected';\n }\n function defaultWriterReadyPromiseReset(writer) {\n defaultWriterReadyPromiseInitialize(writer);\n }\n function defaultWriterReadyPromiseResetToRejected(writer, reason) {\n defaultWriterReadyPromiseInitializeAsRejected(writer, reason);\n }\n function defaultWriterReadyPromiseResolve(writer) {\n if (writer._readyPromise_resolve === undefined) {\n return;\n }\n writer._readyPromise_resolve(undefined);\n writer._readyPromise_resolve = undefined;\n writer._readyPromise_reject = undefined;\n writer._readyPromiseState = 'fulfilled';\n }\n\n /// \n const NativeDOMException = typeof DOMException !== 'undefined' ? DOMException : undefined;\n\n /// \n function isDOMExceptionConstructor(ctor) {\n if (!(typeof ctor === 'function' || typeof ctor === 'object')) {\n return false;\n }\n try {\n new ctor();\n return true;\n }\n catch (_a) {\n return false;\n }\n }\n function createDOMExceptionPolyfill() {\n // eslint-disable-next-line no-shadow\n const ctor = function DOMException(message, name) {\n this.message = message || '';\n this.name = name || 'Error';\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n };\n ctor.prototype = Object.create(Error.prototype);\n Object.defineProperty(ctor.prototype, 'constructor', { value: ctor, writable: true, configurable: true });\n return ctor;\n }\n // eslint-disable-next-line no-redeclare\n const DOMException$1 = isDOMExceptionConstructor(NativeDOMException) ? NativeDOMException : createDOMExceptionPolyfill();\n\n function ReadableStreamPipeTo(source, dest, preventClose, preventAbort, preventCancel, signal) {\n const reader = AcquireReadableStreamDefaultReader(source);\n const writer = AcquireWritableStreamDefaultWriter(dest);\n source._disturbed = true;\n let shuttingDown = false;\n // This is used to keep track of the spec's requirement that we wait for ongoing writes during shutdown.\n let currentWrite = promiseResolvedWith(undefined);\n return newPromise((resolve, reject) => {\n let abortAlgorithm;\n if (signal !== undefined) {\n abortAlgorithm = () => {\n const error = new DOMException$1('Aborted', 'AbortError');\n const actions = [];\n if (!preventAbort) {\n actions.push(() => {\n if (dest._state === 'writable') {\n return WritableStreamAbort(dest, error);\n }\n return promiseResolvedWith(undefined);\n });\n }\n if (!preventCancel) {\n actions.push(() => {\n if (source._state === 'readable') {\n return ReadableStreamCancel(source, error);\n }\n return promiseResolvedWith(undefined);\n });\n }\n shutdownWithAction(() => Promise.all(actions.map(action => action())), true, error);\n };\n if (signal.aborted) {\n abortAlgorithm();\n return;\n }\n signal.addEventListener('abort', abortAlgorithm);\n }\n // Using reader and writer, read all chunks from this and write them to dest\n // - Backpressure must be enforced\n // - Shutdown must stop all activity\n function pipeLoop() {\n return newPromise((resolveLoop, rejectLoop) => {\n function next(done) {\n if (done) {\n resolveLoop();\n }\n else {\n // Use `PerformPromiseThen` instead of `uponPromise` to avoid\n // adding unnecessary `.catch(rethrowAssertionErrorRejection)` handlers\n PerformPromiseThen(pipeStep(), next, rejectLoop);\n }\n }\n next(false);\n });\n }\n function pipeStep() {\n if (shuttingDown) {\n return promiseResolvedWith(true);\n }\n return PerformPromiseThen(writer._readyPromise, () => {\n return newPromise((resolveRead, rejectRead) => {\n ReadableStreamDefaultReaderRead(reader, {\n _chunkSteps: chunk => {\n currentWrite = PerformPromiseThen(WritableStreamDefaultWriterWrite(writer, chunk), undefined, noop);\n resolveRead(false);\n },\n _closeSteps: () => resolveRead(true),\n _errorSteps: rejectRead\n });\n });\n });\n }\n // Errors must be propagated forward\n isOrBecomesErrored(source, reader._closedPromise, storedError => {\n if (!preventAbort) {\n shutdownWithAction(() => WritableStreamAbort(dest, storedError), true, storedError);\n }\n else {\n shutdown(true, storedError);\n }\n });\n // Errors must be propagated backward\n isOrBecomesErrored(dest, writer._closedPromise, storedError => {\n if (!preventCancel) {\n shutdownWithAction(() => ReadableStreamCancel(source, storedError), true, storedError);\n }\n else {\n shutdown(true, storedError);\n }\n });\n // Closing must be propagated forward\n isOrBecomesClosed(source, reader._closedPromise, () => {\n if (!preventClose) {\n shutdownWithAction(() => WritableStreamDefaultWriterCloseWithErrorPropagation(writer));\n }\n else {\n shutdown();\n }\n });\n // Closing must be propagated backward\n if (WritableStreamCloseQueuedOrInFlight(dest) || dest._state === 'closed') {\n const destClosed = new TypeError('the destination writable stream closed before all data could be piped to it');\n if (!preventCancel) {\n shutdownWithAction(() => ReadableStreamCancel(source, destClosed), true, destClosed);\n }\n else {\n shutdown(true, destClosed);\n }\n }\n setPromiseIsHandledToTrue(pipeLoop());\n function waitForWritesToFinish() {\n // Another write may have started while we were waiting on this currentWrite, so we have to be sure to wait\n // for that too.\n const oldCurrentWrite = currentWrite;\n return PerformPromiseThen(currentWrite, () => oldCurrentWrite !== currentWrite ? waitForWritesToFinish() : undefined);\n }\n function isOrBecomesErrored(stream, promise, action) {\n if (stream._state === 'errored') {\n action(stream._storedError);\n }\n else {\n uponRejection(promise, action);\n }\n }\n function isOrBecomesClosed(stream, promise, action) {\n if (stream._state === 'closed') {\n action();\n }\n else {\n uponFulfillment(promise, action);\n }\n }\n function shutdownWithAction(action, originalIsError, originalError) {\n if (shuttingDown) {\n return;\n }\n shuttingDown = true;\n if (dest._state === 'writable' && !WritableStreamCloseQueuedOrInFlight(dest)) {\n uponFulfillment(waitForWritesToFinish(), doTheRest);\n }\n else {\n doTheRest();\n }\n function doTheRest() {\n uponPromise(action(), () => finalize(originalIsError, originalError), newError => finalize(true, newError));\n }\n }\n function shutdown(isError, error) {\n if (shuttingDown) {\n return;\n }\n shuttingDown = true;\n if (dest._state === 'writable' && !WritableStreamCloseQueuedOrInFlight(dest)) {\n uponFulfillment(waitForWritesToFinish(), () => finalize(isError, error));\n }\n else {\n finalize(isError, error);\n }\n }\n function finalize(isError, error) {\n WritableStreamDefaultWriterRelease(writer);\n ReadableStreamReaderGenericRelease(reader);\n if (signal !== undefined) {\n signal.removeEventListener('abort', abortAlgorithm);\n }\n if (isError) {\n reject(error);\n }\n else {\n resolve(undefined);\n }\n }\n });\n }\n\n /**\n * Allows control of a {@link ReadableStream | readable stream}'s state and internal queue.\n *\n * @public\n */\n class ReadableStreamDefaultController {\n constructor() {\n throw new TypeError('Illegal constructor');\n }\n /**\n * Returns the desired size to fill the controlled stream's internal queue. It can be negative, if the queue is\n * over-full. An underlying source ought to use this information to determine when and how to apply backpressure.\n */\n get desiredSize() {\n if (!IsReadableStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException$1('desiredSize');\n }\n return ReadableStreamDefaultControllerGetDesiredSize(this);\n }\n /**\n * Closes the controlled readable stream. Consumers will still be able to read any previously-enqueued chunks from\n * the stream, but once those are read, the stream will become closed.\n */\n close() {\n if (!IsReadableStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException$1('close');\n }\n if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(this)) {\n throw new TypeError('The stream is not in a state that permits close');\n }\n ReadableStreamDefaultControllerClose(this);\n }\n enqueue(chunk = undefined) {\n if (!IsReadableStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException$1('enqueue');\n }\n if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(this)) {\n throw new TypeError('The stream is not in a state that permits enqueue');\n }\n return ReadableStreamDefaultControllerEnqueue(this, chunk);\n }\n /**\n * Errors the controlled readable stream, making all future interactions with it fail with the given error `e`.\n */\n error(e = undefined) {\n if (!IsReadableStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException$1('error');\n }\n ReadableStreamDefaultControllerError(this, e);\n }\n /** @internal */\n [CancelSteps](reason) {\n ResetQueue(this);\n const result = this._cancelAlgorithm(reason);\n ReadableStreamDefaultControllerClearAlgorithms(this);\n return result;\n }\n /** @internal */\n [PullSteps](readRequest) {\n const stream = this._controlledReadableStream;\n if (this._queue.length > 0) {\n const chunk = DequeueValue(this);\n if (this._closeRequested && this._queue.length === 0) {\n ReadableStreamDefaultControllerClearAlgorithms(this);\n ReadableStreamClose(stream);\n }\n else {\n ReadableStreamDefaultControllerCallPullIfNeeded(this);\n }\n readRequest._chunkSteps(chunk);\n }\n else {\n ReadableStreamAddReadRequest(stream, readRequest);\n ReadableStreamDefaultControllerCallPullIfNeeded(this);\n }\n }\n }\n Object.defineProperties(ReadableStreamDefaultController.prototype, {\n close: { enumerable: true },\n enqueue: { enumerable: true },\n error: { enumerable: true },\n desiredSize: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(ReadableStreamDefaultController.prototype, SymbolPolyfill.toStringTag, {\n value: 'ReadableStreamDefaultController',\n configurable: true\n });\n }\n // Abstract operations for the ReadableStreamDefaultController.\n function IsReadableStreamDefaultController(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_controlledReadableStream')) {\n return false;\n }\n return x instanceof ReadableStreamDefaultController;\n }\n function ReadableStreamDefaultControllerCallPullIfNeeded(controller) {\n const shouldPull = ReadableStreamDefaultControllerShouldCallPull(controller);\n if (!shouldPull) {\n return;\n }\n if (controller._pulling) {\n controller._pullAgain = true;\n return;\n }\n controller._pulling = true;\n const pullPromise = controller._pullAlgorithm();\n uponPromise(pullPromise, () => {\n controller._pulling = false;\n if (controller._pullAgain) {\n controller._pullAgain = false;\n ReadableStreamDefaultControllerCallPullIfNeeded(controller);\n }\n }, e => {\n ReadableStreamDefaultControllerError(controller, e);\n });\n }\n function ReadableStreamDefaultControllerShouldCallPull(controller) {\n const stream = controller._controlledReadableStream;\n if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(controller)) {\n return false;\n }\n if (!controller._started) {\n return false;\n }\n if (IsReadableStreamLocked(stream) && ReadableStreamGetNumReadRequests(stream) > 0) {\n return true;\n }\n const desiredSize = ReadableStreamDefaultControllerGetDesiredSize(controller);\n if (desiredSize > 0) {\n return true;\n }\n return false;\n }\n function ReadableStreamDefaultControllerClearAlgorithms(controller) {\n controller._pullAlgorithm = undefined;\n controller._cancelAlgorithm = undefined;\n controller._strategySizeAlgorithm = undefined;\n }\n // A client of ReadableStreamDefaultController may use these functions directly to bypass state check.\n function ReadableStreamDefaultControllerClose(controller) {\n if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(controller)) {\n return;\n }\n const stream = controller._controlledReadableStream;\n controller._closeRequested = true;\n if (controller._queue.length === 0) {\n ReadableStreamDefaultControllerClearAlgorithms(controller);\n ReadableStreamClose(stream);\n }\n }\n function ReadableStreamDefaultControllerEnqueue(controller, chunk) {\n if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(controller)) {\n return;\n }\n const stream = controller._controlledReadableStream;\n if (IsReadableStreamLocked(stream) && ReadableStreamGetNumReadRequests(stream) > 0) {\n ReadableStreamFulfillReadRequest(stream, chunk, false);\n }\n else {\n let chunkSize;\n try {\n chunkSize = controller._strategySizeAlgorithm(chunk);\n }\n catch (chunkSizeE) {\n ReadableStreamDefaultControllerError(controller, chunkSizeE);\n throw chunkSizeE;\n }\n try {\n EnqueueValueWithSize(controller, chunk, chunkSize);\n }\n catch (enqueueE) {\n ReadableStreamDefaultControllerError(controller, enqueueE);\n throw enqueueE;\n }\n }\n ReadableStreamDefaultControllerCallPullIfNeeded(controller);\n }\n function ReadableStreamDefaultControllerError(controller, e) {\n const stream = controller._controlledReadableStream;\n if (stream._state !== 'readable') {\n return;\n }\n ResetQueue(controller);\n ReadableStreamDefaultControllerClearAlgorithms(controller);\n ReadableStreamError(stream, e);\n }\n function ReadableStreamDefaultControllerGetDesiredSize(controller) {\n const state = controller._controlledReadableStream._state;\n if (state === 'errored') {\n return null;\n }\n if (state === 'closed') {\n return 0;\n }\n return controller._strategyHWM - controller._queueTotalSize;\n }\n // This is used in the implementation of TransformStream.\n function ReadableStreamDefaultControllerHasBackpressure(controller) {\n if (ReadableStreamDefaultControllerShouldCallPull(controller)) {\n return false;\n }\n return true;\n }\n function ReadableStreamDefaultControllerCanCloseOrEnqueue(controller) {\n const state = controller._controlledReadableStream._state;\n if (!controller._closeRequested && state === 'readable') {\n return true;\n }\n return false;\n }\n function SetUpReadableStreamDefaultController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, sizeAlgorithm) {\n controller._controlledReadableStream = stream;\n controller._queue = undefined;\n controller._queueTotalSize = undefined;\n ResetQueue(controller);\n controller._started = false;\n controller._closeRequested = false;\n controller._pullAgain = false;\n controller._pulling = false;\n controller._strategySizeAlgorithm = sizeAlgorithm;\n controller._strategyHWM = highWaterMark;\n controller._pullAlgorithm = pullAlgorithm;\n controller._cancelAlgorithm = cancelAlgorithm;\n stream._readableStreamController = controller;\n const startResult = startAlgorithm();\n uponPromise(promiseResolvedWith(startResult), () => {\n controller._started = true;\n ReadableStreamDefaultControllerCallPullIfNeeded(controller);\n }, r => {\n ReadableStreamDefaultControllerError(controller, r);\n });\n }\n function SetUpReadableStreamDefaultControllerFromUnderlyingSource(stream, underlyingSource, highWaterMark, sizeAlgorithm) {\n const controller = Object.create(ReadableStreamDefaultController.prototype);\n let startAlgorithm = () => undefined;\n let pullAlgorithm = () => promiseResolvedWith(undefined);\n let cancelAlgorithm = () => promiseResolvedWith(undefined);\n if (underlyingSource.start !== undefined) {\n startAlgorithm = () => underlyingSource.start(controller);\n }\n if (underlyingSource.pull !== undefined) {\n pullAlgorithm = () => underlyingSource.pull(controller);\n }\n if (underlyingSource.cancel !== undefined) {\n cancelAlgorithm = reason => underlyingSource.cancel(reason);\n }\n SetUpReadableStreamDefaultController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, sizeAlgorithm);\n }\n // Helper functions for the ReadableStreamDefaultController.\n function defaultControllerBrandCheckException$1(name) {\n return new TypeError(`ReadableStreamDefaultController.prototype.${name} can only be used on a ReadableStreamDefaultController`);\n }\n\n function ReadableStreamTee(stream, cloneForBranch2) {\n if (IsReadableByteStreamController(stream._readableStreamController)) {\n return ReadableByteStreamTee(stream);\n }\n return ReadableStreamDefaultTee(stream);\n }\n function ReadableStreamDefaultTee(stream, cloneForBranch2) {\n const reader = AcquireReadableStreamDefaultReader(stream);\n let reading = false;\n let readAgain = false;\n let canceled1 = false;\n let canceled2 = false;\n let reason1;\n let reason2;\n let branch1;\n let branch2;\n let resolveCancelPromise;\n const cancelPromise = newPromise(resolve => {\n resolveCancelPromise = resolve;\n });\n function pullAlgorithm() {\n if (reading) {\n readAgain = true;\n return promiseResolvedWith(undefined);\n }\n reading = true;\n const readRequest = {\n _chunkSteps: chunk => {\n // This needs to be delayed a microtask because it takes at least a microtask to detect errors (using\n // reader._closedPromise below), and we want errors in stream to error both branches immediately. We cannot let\n // successful synchronously-available reads get ahead of asynchronously-available errors.\n queueMicrotask(() => {\n readAgain = false;\n const chunk1 = chunk;\n const chunk2 = chunk;\n // There is no way to access the cloning code right now in the reference implementation.\n // If we add one then we'll need an implementation for serializable objects.\n // if (!canceled2 && cloneForBranch2) {\n // chunk2 = StructuredDeserialize(StructuredSerialize(chunk2));\n // }\n if (!canceled1) {\n ReadableStreamDefaultControllerEnqueue(branch1._readableStreamController, chunk1);\n }\n if (!canceled2) {\n ReadableStreamDefaultControllerEnqueue(branch2._readableStreamController, chunk2);\n }\n reading = false;\n if (readAgain) {\n pullAlgorithm();\n }\n });\n },\n _closeSteps: () => {\n reading = false;\n if (!canceled1) {\n ReadableStreamDefaultControllerClose(branch1._readableStreamController);\n }\n if (!canceled2) {\n ReadableStreamDefaultControllerClose(branch2._readableStreamController);\n }\n if (!canceled1 || !canceled2) {\n resolveCancelPromise(undefined);\n }\n },\n _errorSteps: () => {\n reading = false;\n }\n };\n ReadableStreamDefaultReaderRead(reader, readRequest);\n return promiseResolvedWith(undefined);\n }\n function cancel1Algorithm(reason) {\n canceled1 = true;\n reason1 = reason;\n if (canceled2) {\n const compositeReason = CreateArrayFromList([reason1, reason2]);\n const cancelResult = ReadableStreamCancel(stream, compositeReason);\n resolveCancelPromise(cancelResult);\n }\n return cancelPromise;\n }\n function cancel2Algorithm(reason) {\n canceled2 = true;\n reason2 = reason;\n if (canceled1) {\n const compositeReason = CreateArrayFromList([reason1, reason2]);\n const cancelResult = ReadableStreamCancel(stream, compositeReason);\n resolveCancelPromise(cancelResult);\n }\n return cancelPromise;\n }\n function startAlgorithm() {\n // do nothing\n }\n branch1 = CreateReadableStream(startAlgorithm, pullAlgorithm, cancel1Algorithm);\n branch2 = CreateReadableStream(startAlgorithm, pullAlgorithm, cancel2Algorithm);\n uponRejection(reader._closedPromise, (r) => {\n ReadableStreamDefaultControllerError(branch1._readableStreamController, r);\n ReadableStreamDefaultControllerError(branch2._readableStreamController, r);\n if (!canceled1 || !canceled2) {\n resolveCancelPromise(undefined);\n }\n });\n return [branch1, branch2];\n }\n function ReadableByteStreamTee(stream) {\n let reader = AcquireReadableStreamDefaultReader(stream);\n let reading = false;\n let readAgainForBranch1 = false;\n let readAgainForBranch2 = false;\n let canceled1 = false;\n let canceled2 = false;\n let reason1;\n let reason2;\n let branch1;\n let branch2;\n let resolveCancelPromise;\n const cancelPromise = newPromise(resolve => {\n resolveCancelPromise = resolve;\n });\n function forwardReaderError(thisReader) {\n uponRejection(thisReader._closedPromise, r => {\n if (thisReader !== reader) {\n return;\n }\n ReadableByteStreamControllerError(branch1._readableStreamController, r);\n ReadableByteStreamControllerError(branch2._readableStreamController, r);\n if (!canceled1 || !canceled2) {\n resolveCancelPromise(undefined);\n }\n });\n }\n function pullWithDefaultReader() {\n if (IsReadableStreamBYOBReader(reader)) {\n ReadableStreamReaderGenericRelease(reader);\n reader = AcquireReadableStreamDefaultReader(stream);\n forwardReaderError(reader);\n }\n const readRequest = {\n _chunkSteps: chunk => {\n // This needs to be delayed a microtask because it takes at least a microtask to detect errors (using\n // reader._closedPromise below), and we want errors in stream to error both branches immediately. We cannot let\n // successful synchronously-available reads get ahead of asynchronously-available errors.\n queueMicrotask(() => {\n readAgainForBranch1 = false;\n readAgainForBranch2 = false;\n const chunk1 = chunk;\n let chunk2 = chunk;\n if (!canceled1 && !canceled2) {\n try {\n chunk2 = CloneAsUint8Array(chunk);\n }\n catch (cloneE) {\n ReadableByteStreamControllerError(branch1._readableStreamController, cloneE);\n ReadableByteStreamControllerError(branch2._readableStreamController, cloneE);\n resolveCancelPromise(ReadableStreamCancel(stream, cloneE));\n return;\n }\n }\n if (!canceled1) {\n ReadableByteStreamControllerEnqueue(branch1._readableStreamController, chunk1);\n }\n if (!canceled2) {\n ReadableByteStreamControllerEnqueue(branch2._readableStreamController, chunk2);\n }\n reading = false;\n if (readAgainForBranch1) {\n pull1Algorithm();\n }\n else if (readAgainForBranch2) {\n pull2Algorithm();\n }\n });\n },\n _closeSteps: () => {\n reading = false;\n if (!canceled1) {\n ReadableByteStreamControllerClose(branch1._readableStreamController);\n }\n if (!canceled2) {\n ReadableByteStreamControllerClose(branch2._readableStreamController);\n }\n if (branch1._readableStreamController._pendingPullIntos.length > 0) {\n ReadableByteStreamControllerRespond(branch1._readableStreamController, 0);\n }\n if (branch2._readableStreamController._pendingPullIntos.length > 0) {\n ReadableByteStreamControllerRespond(branch2._readableStreamController, 0);\n }\n if (!canceled1 || !canceled2) {\n resolveCancelPromise(undefined);\n }\n },\n _errorSteps: () => {\n reading = false;\n }\n };\n ReadableStreamDefaultReaderRead(reader, readRequest);\n }\n function pullWithBYOBReader(view, forBranch2) {\n if (IsReadableStreamDefaultReader(reader)) {\n ReadableStreamReaderGenericRelease(reader);\n reader = AcquireReadableStreamBYOBReader(stream);\n forwardReaderError(reader);\n }\n const byobBranch = forBranch2 ? branch2 : branch1;\n const otherBranch = forBranch2 ? branch1 : branch2;\n const readIntoRequest = {\n _chunkSteps: chunk => {\n // This needs to be delayed a microtask because it takes at least a microtask to detect errors (using\n // reader._closedPromise below), and we want errors in stream to error both branches immediately. We cannot let\n // successful synchronously-available reads get ahead of asynchronously-available errors.\n queueMicrotask(() => {\n readAgainForBranch1 = false;\n readAgainForBranch2 = false;\n const byobCanceled = forBranch2 ? canceled2 : canceled1;\n const otherCanceled = forBranch2 ? canceled1 : canceled2;\n if (!otherCanceled) {\n let clonedChunk;\n try {\n clonedChunk = CloneAsUint8Array(chunk);\n }\n catch (cloneE) {\n ReadableByteStreamControllerError(byobBranch._readableStreamController, cloneE);\n ReadableByteStreamControllerError(otherBranch._readableStreamController, cloneE);\n resolveCancelPromise(ReadableStreamCancel(stream, cloneE));\n return;\n }\n if (!byobCanceled) {\n ReadableByteStreamControllerRespondWithNewView(byobBranch._readableStreamController, chunk);\n }\n ReadableByteStreamControllerEnqueue(otherBranch._readableStreamController, clonedChunk);\n }\n else if (!byobCanceled) {\n ReadableByteStreamControllerRespondWithNewView(byobBranch._readableStreamController, chunk);\n }\n reading = false;\n if (readAgainForBranch1) {\n pull1Algorithm();\n }\n else if (readAgainForBranch2) {\n pull2Algorithm();\n }\n });\n },\n _closeSteps: chunk => {\n reading = false;\n const byobCanceled = forBranch2 ? canceled2 : canceled1;\n const otherCanceled = forBranch2 ? canceled1 : canceled2;\n if (!byobCanceled) {\n ReadableByteStreamControllerClose(byobBranch._readableStreamController);\n }\n if (!otherCanceled) {\n ReadableByteStreamControllerClose(otherBranch._readableStreamController);\n }\n if (chunk !== undefined) {\n if (!byobCanceled) {\n ReadableByteStreamControllerRespondWithNewView(byobBranch._readableStreamController, chunk);\n }\n if (!otherCanceled && otherBranch._readableStreamController._pendingPullIntos.length > 0) {\n ReadableByteStreamControllerRespond(otherBranch._readableStreamController, 0);\n }\n }\n if (!byobCanceled || !otherCanceled) {\n resolveCancelPromise(undefined);\n }\n },\n _errorSteps: () => {\n reading = false;\n }\n };\n ReadableStreamBYOBReaderRead(reader, view, readIntoRequest);\n }\n function pull1Algorithm() {\n if (reading) {\n readAgainForBranch1 = true;\n return promiseResolvedWith(undefined);\n }\n reading = true;\n const byobRequest = ReadableByteStreamControllerGetBYOBRequest(branch1._readableStreamController);\n if (byobRequest === null) {\n pullWithDefaultReader();\n }\n else {\n pullWithBYOBReader(byobRequest._view, false);\n }\n return promiseResolvedWith(undefined);\n }\n function pull2Algorithm() {\n if (reading) {\n readAgainForBranch2 = true;\n return promiseResolvedWith(undefined);\n }\n reading = true;\n const byobRequest = ReadableByteStreamControllerGetBYOBRequest(branch2._readableStreamController);\n if (byobRequest === null) {\n pullWithDefaultReader();\n }\n else {\n pullWithBYOBReader(byobRequest._view, true);\n }\n return promiseResolvedWith(undefined);\n }\n function cancel1Algorithm(reason) {\n canceled1 = true;\n reason1 = reason;\n if (canceled2) {\n const compositeReason = CreateArrayFromList([reason1, reason2]);\n const cancelResult = ReadableStreamCancel(stream, compositeReason);\n resolveCancelPromise(cancelResult);\n }\n return cancelPromise;\n }\n function cancel2Algorithm(reason) {\n canceled2 = true;\n reason2 = reason;\n if (canceled1) {\n const compositeReason = CreateArrayFromList([reason1, reason2]);\n const cancelResult = ReadableStreamCancel(stream, compositeReason);\n resolveCancelPromise(cancelResult);\n }\n return cancelPromise;\n }\n function startAlgorithm() {\n return;\n }\n branch1 = CreateReadableByteStream(startAlgorithm, pull1Algorithm, cancel1Algorithm);\n branch2 = CreateReadableByteStream(startAlgorithm, pull2Algorithm, cancel2Algorithm);\n forwardReaderError(reader);\n return [branch1, branch2];\n }\n\n function convertUnderlyingDefaultOrByteSource(source, context) {\n assertDictionary(source, context);\n const original = source;\n const autoAllocateChunkSize = original === null || original === void 0 ? void 0 : original.autoAllocateChunkSize;\n const cancel = original === null || original === void 0 ? void 0 : original.cancel;\n const pull = original === null || original === void 0 ? void 0 : original.pull;\n const start = original === null || original === void 0 ? void 0 : original.start;\n const type = original === null || original === void 0 ? void 0 : original.type;\n return {\n autoAllocateChunkSize: autoAllocateChunkSize === undefined ?\n undefined :\n convertUnsignedLongLongWithEnforceRange(autoAllocateChunkSize, `${context} has member 'autoAllocateChunkSize' that`),\n cancel: cancel === undefined ?\n undefined :\n convertUnderlyingSourceCancelCallback(cancel, original, `${context} has member 'cancel' that`),\n pull: pull === undefined ?\n undefined :\n convertUnderlyingSourcePullCallback(pull, original, `${context} has member 'pull' that`),\n start: start === undefined ?\n undefined :\n convertUnderlyingSourceStartCallback(start, original, `${context} has member 'start' that`),\n type: type === undefined ? undefined : convertReadableStreamType(type, `${context} has member 'type' that`)\n };\n }\n function convertUnderlyingSourceCancelCallback(fn, original, context) {\n assertFunction(fn, context);\n return (reason) => promiseCall(fn, original, [reason]);\n }\n function convertUnderlyingSourcePullCallback(fn, original, context) {\n assertFunction(fn, context);\n return (controller) => promiseCall(fn, original, [controller]);\n }\n function convertUnderlyingSourceStartCallback(fn, original, context) {\n assertFunction(fn, context);\n return (controller) => reflectCall(fn, original, [controller]);\n }\n function convertReadableStreamType(type, context) {\n type = `${type}`;\n if (type !== 'bytes') {\n throw new TypeError(`${context} '${type}' is not a valid enumeration value for ReadableStreamType`);\n }\n return type;\n }\n\n function convertReaderOptions(options, context) {\n assertDictionary(options, context);\n const mode = options === null || options === void 0 ? void 0 : options.mode;\n return {\n mode: mode === undefined ? undefined : convertReadableStreamReaderMode(mode, `${context} has member 'mode' that`)\n };\n }\n function convertReadableStreamReaderMode(mode, context) {\n mode = `${mode}`;\n if (mode !== 'byob') {\n throw new TypeError(`${context} '${mode}' is not a valid enumeration value for ReadableStreamReaderMode`);\n }\n return mode;\n }\n\n function convertIteratorOptions(options, context) {\n assertDictionary(options, context);\n const preventCancel = options === null || options === void 0 ? void 0 : options.preventCancel;\n return { preventCancel: Boolean(preventCancel) };\n }\n\n function convertPipeOptions(options, context) {\n assertDictionary(options, context);\n const preventAbort = options === null || options === void 0 ? void 0 : options.preventAbort;\n const preventCancel = options === null || options === void 0 ? void 0 : options.preventCancel;\n const preventClose = options === null || options === void 0 ? void 0 : options.preventClose;\n const signal = options === null || options === void 0 ? void 0 : options.signal;\n if (signal !== undefined) {\n assertAbortSignal(signal, `${context} has member 'signal' that`);\n }\n return {\n preventAbort: Boolean(preventAbort),\n preventCancel: Boolean(preventCancel),\n preventClose: Boolean(preventClose),\n signal\n };\n }\n function assertAbortSignal(signal, context) {\n if (!isAbortSignal(signal)) {\n throw new TypeError(`${context} is not an AbortSignal.`);\n }\n }\n\n function convertReadableWritablePair(pair, context) {\n assertDictionary(pair, context);\n const readable = pair === null || pair === void 0 ? void 0 : pair.readable;\n assertRequiredField(readable, 'readable', 'ReadableWritablePair');\n assertReadableStream(readable, `${context} has member 'readable' that`);\n const writable = pair === null || pair === void 0 ? void 0 : pair.writable;\n assertRequiredField(writable, 'writable', 'ReadableWritablePair');\n assertWritableStream(writable, `${context} has member 'writable' that`);\n return { readable, writable };\n }\n\n /**\n * A readable stream represents a source of data, from which you can read.\n *\n * @public\n */\n class ReadableStream {\n constructor(rawUnderlyingSource = {}, rawStrategy = {}) {\n if (rawUnderlyingSource === undefined) {\n rawUnderlyingSource = null;\n }\n else {\n assertObject(rawUnderlyingSource, 'First parameter');\n }\n const strategy = convertQueuingStrategy(rawStrategy, 'Second parameter');\n const underlyingSource = convertUnderlyingDefaultOrByteSource(rawUnderlyingSource, 'First parameter');\n InitializeReadableStream(this);\n if (underlyingSource.type === 'bytes') {\n if (strategy.size !== undefined) {\n throw new RangeError('The strategy for a byte stream cannot have a size function');\n }\n const highWaterMark = ExtractHighWaterMark(strategy, 0);\n SetUpReadableByteStreamControllerFromUnderlyingSource(this, underlyingSource, highWaterMark);\n }\n else {\n const sizeAlgorithm = ExtractSizeAlgorithm(strategy);\n const highWaterMark = ExtractHighWaterMark(strategy, 1);\n SetUpReadableStreamDefaultControllerFromUnderlyingSource(this, underlyingSource, highWaterMark, sizeAlgorithm);\n }\n }\n /**\n * Whether or not the readable stream is locked to a {@link ReadableStreamDefaultReader | reader}.\n */\n get locked() {\n if (!IsReadableStream(this)) {\n throw streamBrandCheckException$1('locked');\n }\n return IsReadableStreamLocked(this);\n }\n /**\n * Cancels the stream, signaling a loss of interest in the stream by a consumer.\n *\n * The supplied `reason` argument will be given to the underlying source's {@link UnderlyingSource.cancel | cancel()}\n * method, which might or might not use it.\n */\n cancel(reason = undefined) {\n if (!IsReadableStream(this)) {\n return promiseRejectedWith(streamBrandCheckException$1('cancel'));\n }\n if (IsReadableStreamLocked(this)) {\n return promiseRejectedWith(new TypeError('Cannot cancel a stream that already has a reader'));\n }\n return ReadableStreamCancel(this, reason);\n }\n getReader(rawOptions = undefined) {\n if (!IsReadableStream(this)) {\n throw streamBrandCheckException$1('getReader');\n }\n const options = convertReaderOptions(rawOptions, 'First parameter');\n if (options.mode === undefined) {\n return AcquireReadableStreamDefaultReader(this);\n }\n return AcquireReadableStreamBYOBReader(this);\n }\n pipeThrough(rawTransform, rawOptions = {}) {\n if (!IsReadableStream(this)) {\n throw streamBrandCheckException$1('pipeThrough');\n }\n assertRequiredArgument(rawTransform, 1, 'pipeThrough');\n const transform = convertReadableWritablePair(rawTransform, 'First parameter');\n const options = convertPipeOptions(rawOptions, 'Second parameter');\n if (IsReadableStreamLocked(this)) {\n throw new TypeError('ReadableStream.prototype.pipeThrough cannot be used on a locked ReadableStream');\n }\n if (IsWritableStreamLocked(transform.writable)) {\n throw new TypeError('ReadableStream.prototype.pipeThrough cannot be used on a locked WritableStream');\n }\n const promise = ReadableStreamPipeTo(this, transform.writable, options.preventClose, options.preventAbort, options.preventCancel, options.signal);\n setPromiseIsHandledToTrue(promise);\n return transform.readable;\n }\n pipeTo(destination, rawOptions = {}) {\n if (!IsReadableStream(this)) {\n return promiseRejectedWith(streamBrandCheckException$1('pipeTo'));\n }\n if (destination === undefined) {\n return promiseRejectedWith(`Parameter 1 is required in 'pipeTo'.`);\n }\n if (!IsWritableStream(destination)) {\n return promiseRejectedWith(new TypeError(`ReadableStream.prototype.pipeTo's first argument must be a WritableStream`));\n }\n let options;\n try {\n options = convertPipeOptions(rawOptions, 'Second parameter');\n }\n catch (e) {\n return promiseRejectedWith(e);\n }\n if (IsReadableStreamLocked(this)) {\n return promiseRejectedWith(new TypeError('ReadableStream.prototype.pipeTo cannot be used on a locked ReadableStream'));\n }\n if (IsWritableStreamLocked(destination)) {\n return promiseRejectedWith(new TypeError('ReadableStream.prototype.pipeTo cannot be used on a locked WritableStream'));\n }\n return ReadableStreamPipeTo(this, destination, options.preventClose, options.preventAbort, options.preventCancel, options.signal);\n }\n /**\n * Tees this readable stream, returning a two-element array containing the two resulting branches as\n * new {@link ReadableStream} instances.\n *\n * Teeing a stream will lock it, preventing any other consumer from acquiring a reader.\n * To cancel the stream, cancel both of the resulting branches; a composite cancellation reason will then be\n * propagated to the stream's underlying source.\n *\n * Note that the chunks seen in each branch will be the same object. If the chunks are not immutable,\n * this could allow interference between the two branches.\n */\n tee() {\n if (!IsReadableStream(this)) {\n throw streamBrandCheckException$1('tee');\n }\n const branches = ReadableStreamTee(this);\n return CreateArrayFromList(branches);\n }\n values(rawOptions = undefined) {\n if (!IsReadableStream(this)) {\n throw streamBrandCheckException$1('values');\n }\n const options = convertIteratorOptions(rawOptions, 'First parameter');\n return AcquireReadableStreamAsyncIterator(this, options.preventCancel);\n }\n }\n Object.defineProperties(ReadableStream.prototype, {\n cancel: { enumerable: true },\n getReader: { enumerable: true },\n pipeThrough: { enumerable: true },\n pipeTo: { enumerable: true },\n tee: { enumerable: true },\n values: { enumerable: true },\n locked: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(ReadableStream.prototype, SymbolPolyfill.toStringTag, {\n value: 'ReadableStream',\n configurable: true\n });\n }\n if (typeof SymbolPolyfill.asyncIterator === 'symbol') {\n Object.defineProperty(ReadableStream.prototype, SymbolPolyfill.asyncIterator, {\n value: ReadableStream.prototype.values,\n writable: true,\n configurable: true\n });\n }\n // Abstract operations for the ReadableStream.\n // Throws if and only if startAlgorithm throws.\n function CreateReadableStream(startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark = 1, sizeAlgorithm = () => 1) {\n const stream = Object.create(ReadableStream.prototype);\n InitializeReadableStream(stream);\n const controller = Object.create(ReadableStreamDefaultController.prototype);\n SetUpReadableStreamDefaultController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, sizeAlgorithm);\n return stream;\n }\n // Throws if and only if startAlgorithm throws.\n function CreateReadableByteStream(startAlgorithm, pullAlgorithm, cancelAlgorithm) {\n const stream = Object.create(ReadableStream.prototype);\n InitializeReadableStream(stream);\n const controller = Object.create(ReadableByteStreamController.prototype);\n SetUpReadableByteStreamController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, 0, undefined);\n return stream;\n }\n function InitializeReadableStream(stream) {\n stream._state = 'readable';\n stream._reader = undefined;\n stream._storedError = undefined;\n stream._disturbed = false;\n }\n function IsReadableStream(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_readableStreamController')) {\n return false;\n }\n return x instanceof ReadableStream;\n }\n function IsReadableStreamLocked(stream) {\n if (stream._reader === undefined) {\n return false;\n }\n return true;\n }\n // ReadableStream API exposed for controllers.\n function ReadableStreamCancel(stream, reason) {\n stream._disturbed = true;\n if (stream._state === 'closed') {\n return promiseResolvedWith(undefined);\n }\n if (stream._state === 'errored') {\n return promiseRejectedWith(stream._storedError);\n }\n ReadableStreamClose(stream);\n const reader = stream._reader;\n if (reader !== undefined && IsReadableStreamBYOBReader(reader)) {\n reader._readIntoRequests.forEach(readIntoRequest => {\n readIntoRequest._closeSteps(undefined);\n });\n reader._readIntoRequests = new SimpleQueue();\n }\n const sourceCancelPromise = stream._readableStreamController[CancelSteps](reason);\n return transformPromiseWith(sourceCancelPromise, noop);\n }\n function ReadableStreamClose(stream) {\n stream._state = 'closed';\n const reader = stream._reader;\n if (reader === undefined) {\n return;\n }\n defaultReaderClosedPromiseResolve(reader);\n if (IsReadableStreamDefaultReader(reader)) {\n reader._readRequests.forEach(readRequest => {\n readRequest._closeSteps();\n });\n reader._readRequests = new SimpleQueue();\n }\n }\n function ReadableStreamError(stream, e) {\n stream._state = 'errored';\n stream._storedError = e;\n const reader = stream._reader;\n if (reader === undefined) {\n return;\n }\n defaultReaderClosedPromiseReject(reader, e);\n if (IsReadableStreamDefaultReader(reader)) {\n reader._readRequests.forEach(readRequest => {\n readRequest._errorSteps(e);\n });\n reader._readRequests = new SimpleQueue();\n }\n else {\n reader._readIntoRequests.forEach(readIntoRequest => {\n readIntoRequest._errorSteps(e);\n });\n reader._readIntoRequests = new SimpleQueue();\n }\n }\n // Helper functions for the ReadableStream.\n function streamBrandCheckException$1(name) {\n return new TypeError(`ReadableStream.prototype.${name} can only be used on a ReadableStream`);\n }\n\n function convertQueuingStrategyInit(init, context) {\n assertDictionary(init, context);\n const highWaterMark = init === null || init === void 0 ? void 0 : init.highWaterMark;\n assertRequiredField(highWaterMark, 'highWaterMark', 'QueuingStrategyInit');\n return {\n highWaterMark: convertUnrestrictedDouble(highWaterMark)\n };\n }\n\n // The size function must not have a prototype property nor be a constructor\n const byteLengthSizeFunction = (chunk) => {\n return chunk.byteLength;\n };\n try {\n Object.defineProperty(byteLengthSizeFunction, 'name', {\n value: 'size',\n configurable: true\n });\n }\n catch (_a) {\n // This property is non-configurable in older browsers, so ignore if this throws.\n // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/name#browser_compatibility\n }\n /**\n * A queuing strategy that counts the number of bytes in each chunk.\n *\n * @public\n */\n class ByteLengthQueuingStrategy {\n constructor(options) {\n assertRequiredArgument(options, 1, 'ByteLengthQueuingStrategy');\n options = convertQueuingStrategyInit(options, 'First parameter');\n this._byteLengthQueuingStrategyHighWaterMark = options.highWaterMark;\n }\n /**\n * Returns the high water mark provided to the constructor.\n */\n get highWaterMark() {\n if (!IsByteLengthQueuingStrategy(this)) {\n throw byteLengthBrandCheckException('highWaterMark');\n }\n return this._byteLengthQueuingStrategyHighWaterMark;\n }\n /**\n * Measures the size of `chunk` by returning the value of its `byteLength` property.\n */\n get size() {\n if (!IsByteLengthQueuingStrategy(this)) {\n throw byteLengthBrandCheckException('size');\n }\n return byteLengthSizeFunction;\n }\n }\n Object.defineProperties(ByteLengthQueuingStrategy.prototype, {\n highWaterMark: { enumerable: true },\n size: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(ByteLengthQueuingStrategy.prototype, SymbolPolyfill.toStringTag, {\n value: 'ByteLengthQueuingStrategy',\n configurable: true\n });\n }\n // Helper functions for the ByteLengthQueuingStrategy.\n function byteLengthBrandCheckException(name) {\n return new TypeError(`ByteLengthQueuingStrategy.prototype.${name} can only be used on a ByteLengthQueuingStrategy`);\n }\n function IsByteLengthQueuingStrategy(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_byteLengthQueuingStrategyHighWaterMark')) {\n return false;\n }\n return x instanceof ByteLengthQueuingStrategy;\n }\n\n // The size function must not have a prototype property nor be a constructor\n const countSizeFunction = () => {\n return 1;\n };\n try {\n Object.defineProperty(countSizeFunction, 'name', {\n value: 'size',\n configurable: true\n });\n }\n catch (_a) {\n // This property is non-configurable in older browsers, so ignore if this throws.\n // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/name#browser_compatibility\n }\n /**\n * A queuing strategy that counts the number of chunks.\n *\n * @public\n */\n class CountQueuingStrategy {\n constructor(options) {\n assertRequiredArgument(options, 1, 'CountQueuingStrategy');\n options = convertQueuingStrategyInit(options, 'First parameter');\n this._countQueuingStrategyHighWaterMark = options.highWaterMark;\n }\n /**\n * Returns the high water mark provided to the constructor.\n */\n get highWaterMark() {\n if (!IsCountQueuingStrategy(this)) {\n throw countBrandCheckException('highWaterMark');\n }\n return this._countQueuingStrategyHighWaterMark;\n }\n /**\n * Measures the size of `chunk` by always returning 1.\n * This ensures that the total queue size is a count of the number of chunks in the queue.\n */\n get size() {\n if (!IsCountQueuingStrategy(this)) {\n throw countBrandCheckException('size');\n }\n return countSizeFunction;\n }\n }\n Object.defineProperties(CountQueuingStrategy.prototype, {\n highWaterMark: { enumerable: true },\n size: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(CountQueuingStrategy.prototype, SymbolPolyfill.toStringTag, {\n value: 'CountQueuingStrategy',\n configurable: true\n });\n }\n // Helper functions for the CountQueuingStrategy.\n function countBrandCheckException(name) {\n return new TypeError(`CountQueuingStrategy.prototype.${name} can only be used on a CountQueuingStrategy`);\n }\n function IsCountQueuingStrategy(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_countQueuingStrategyHighWaterMark')) {\n return false;\n }\n return x instanceof CountQueuingStrategy;\n }\n\n function convertTransformer(original, context) {\n assertDictionary(original, context);\n const flush = original === null || original === void 0 ? void 0 : original.flush;\n const readableType = original === null || original === void 0 ? void 0 : original.readableType;\n const start = original === null || original === void 0 ? void 0 : original.start;\n const transform = original === null || original === void 0 ? void 0 : original.transform;\n const writableType = original === null || original === void 0 ? void 0 : original.writableType;\n return {\n flush: flush === undefined ?\n undefined :\n convertTransformerFlushCallback(flush, original, `${context} has member 'flush' that`),\n readableType,\n start: start === undefined ?\n undefined :\n convertTransformerStartCallback(start, original, `${context} has member 'start' that`),\n transform: transform === undefined ?\n undefined :\n convertTransformerTransformCallback(transform, original, `${context} has member 'transform' that`),\n writableType\n };\n }\n function convertTransformerFlushCallback(fn, original, context) {\n assertFunction(fn, context);\n return (controller) => promiseCall(fn, original, [controller]);\n }\n function convertTransformerStartCallback(fn, original, context) {\n assertFunction(fn, context);\n return (controller) => reflectCall(fn, original, [controller]);\n }\n function convertTransformerTransformCallback(fn, original, context) {\n assertFunction(fn, context);\n return (chunk, controller) => promiseCall(fn, original, [chunk, controller]);\n }\n\n // Class TransformStream\n /**\n * A transform stream consists of a pair of streams: a {@link WritableStream | writable stream},\n * known as its writable side, and a {@link ReadableStream | readable stream}, known as its readable side.\n * In a manner specific to the transform stream in question, writes to the writable side result in new data being\n * made available for reading from the readable side.\n *\n * @public\n */\n class TransformStream {\n constructor(rawTransformer = {}, rawWritableStrategy = {}, rawReadableStrategy = {}) {\n if (rawTransformer === undefined) {\n rawTransformer = null;\n }\n const writableStrategy = convertQueuingStrategy(rawWritableStrategy, 'Second parameter');\n const readableStrategy = convertQueuingStrategy(rawReadableStrategy, 'Third parameter');\n const transformer = convertTransformer(rawTransformer, 'First parameter');\n if (transformer.readableType !== undefined) {\n throw new RangeError('Invalid readableType specified');\n }\n if (transformer.writableType !== undefined) {\n throw new RangeError('Invalid writableType specified');\n }\n const readableHighWaterMark = ExtractHighWaterMark(readableStrategy, 0);\n const readableSizeAlgorithm = ExtractSizeAlgorithm(readableStrategy);\n const writableHighWaterMark = ExtractHighWaterMark(writableStrategy, 1);\n const writableSizeAlgorithm = ExtractSizeAlgorithm(writableStrategy);\n let startPromise_resolve;\n const startPromise = newPromise(resolve => {\n startPromise_resolve = resolve;\n });\n InitializeTransformStream(this, startPromise, writableHighWaterMark, writableSizeAlgorithm, readableHighWaterMark, readableSizeAlgorithm);\n SetUpTransformStreamDefaultControllerFromTransformer(this, transformer);\n if (transformer.start !== undefined) {\n startPromise_resolve(transformer.start(this._transformStreamController));\n }\n else {\n startPromise_resolve(undefined);\n }\n }\n /**\n * The readable side of the transform stream.\n */\n get readable() {\n if (!IsTransformStream(this)) {\n throw streamBrandCheckException('readable');\n }\n return this._readable;\n }\n /**\n * The writable side of the transform stream.\n */\n get writable() {\n if (!IsTransformStream(this)) {\n throw streamBrandCheckException('writable');\n }\n return this._writable;\n }\n }\n Object.defineProperties(TransformStream.prototype, {\n readable: { enumerable: true },\n writable: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(TransformStream.prototype, SymbolPolyfill.toStringTag, {\n value: 'TransformStream',\n configurable: true\n });\n }\n function InitializeTransformStream(stream, startPromise, writableHighWaterMark, writableSizeAlgorithm, readableHighWaterMark, readableSizeAlgorithm) {\n function startAlgorithm() {\n return startPromise;\n }\n function writeAlgorithm(chunk) {\n return TransformStreamDefaultSinkWriteAlgorithm(stream, chunk);\n }\n function abortAlgorithm(reason) {\n return TransformStreamDefaultSinkAbortAlgorithm(stream, reason);\n }\n function closeAlgorithm() {\n return TransformStreamDefaultSinkCloseAlgorithm(stream);\n }\n stream._writable = CreateWritableStream(startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, writableHighWaterMark, writableSizeAlgorithm);\n function pullAlgorithm() {\n return TransformStreamDefaultSourcePullAlgorithm(stream);\n }\n function cancelAlgorithm(reason) {\n TransformStreamErrorWritableAndUnblockWrite(stream, reason);\n return promiseResolvedWith(undefined);\n }\n stream._readable = CreateReadableStream(startAlgorithm, pullAlgorithm, cancelAlgorithm, readableHighWaterMark, readableSizeAlgorithm);\n // The [[backpressure]] slot is set to undefined so that it can be initialised by TransformStreamSetBackpressure.\n stream._backpressure = undefined;\n stream._backpressureChangePromise = undefined;\n stream._backpressureChangePromise_resolve = undefined;\n TransformStreamSetBackpressure(stream, true);\n stream._transformStreamController = undefined;\n }\n function IsTransformStream(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_transformStreamController')) {\n return false;\n }\n return x instanceof TransformStream;\n }\n // This is a no-op if both sides are already errored.\n function TransformStreamError(stream, e) {\n ReadableStreamDefaultControllerError(stream._readable._readableStreamController, e);\n TransformStreamErrorWritableAndUnblockWrite(stream, e);\n }\n function TransformStreamErrorWritableAndUnblockWrite(stream, e) {\n TransformStreamDefaultControllerClearAlgorithms(stream._transformStreamController);\n WritableStreamDefaultControllerErrorIfNeeded(stream._writable._writableStreamController, e);\n if (stream._backpressure) {\n // Pretend that pull() was called to permit any pending write() calls to complete. TransformStreamSetBackpressure()\n // cannot be called from enqueue() or pull() once the ReadableStream is errored, so this will will be the final time\n // _backpressure is set.\n TransformStreamSetBackpressure(stream, false);\n }\n }\n function TransformStreamSetBackpressure(stream, backpressure) {\n // Passes also when called during construction.\n if (stream._backpressureChangePromise !== undefined) {\n stream._backpressureChangePromise_resolve();\n }\n stream._backpressureChangePromise = newPromise(resolve => {\n stream._backpressureChangePromise_resolve = resolve;\n });\n stream._backpressure = backpressure;\n }\n // Class TransformStreamDefaultController\n /**\n * Allows control of the {@link ReadableStream} and {@link WritableStream} of the associated {@link TransformStream}.\n *\n * @public\n */\n class TransformStreamDefaultController {\n constructor() {\n throw new TypeError('Illegal constructor');\n }\n /**\n * Returns the desired size to fill the readable side’s internal queue. It can be negative, if the queue is over-full.\n */\n get desiredSize() {\n if (!IsTransformStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException('desiredSize');\n }\n const readableController = this._controlledTransformStream._readable._readableStreamController;\n return ReadableStreamDefaultControllerGetDesiredSize(readableController);\n }\n enqueue(chunk = undefined) {\n if (!IsTransformStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException('enqueue');\n }\n TransformStreamDefaultControllerEnqueue(this, chunk);\n }\n /**\n * Errors both the readable side and the writable side of the controlled transform stream, making all future\n * interactions with it fail with the given error `e`. Any chunks queued for transformation will be discarded.\n */\n error(reason = undefined) {\n if (!IsTransformStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException('error');\n }\n TransformStreamDefaultControllerError(this, reason);\n }\n /**\n * Closes the readable side and errors the writable side of the controlled transform stream. This is useful when the\n * transformer only needs to consume a portion of the chunks written to the writable side.\n */\n terminate() {\n if (!IsTransformStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException('terminate');\n }\n TransformStreamDefaultControllerTerminate(this);\n }\n }\n Object.defineProperties(TransformStreamDefaultController.prototype, {\n enqueue: { enumerable: true },\n error: { enumerable: true },\n terminate: { enumerable: true },\n desiredSize: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(TransformStreamDefaultController.prototype, SymbolPolyfill.toStringTag, {\n value: 'TransformStreamDefaultController',\n configurable: true\n });\n }\n // Transform Stream Default Controller Abstract Operations\n function IsTransformStreamDefaultController(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_controlledTransformStream')) {\n return false;\n }\n return x instanceof TransformStreamDefaultController;\n }\n function SetUpTransformStreamDefaultController(stream, controller, transformAlgorithm, flushAlgorithm) {\n controller._controlledTransformStream = stream;\n stream._transformStreamController = controller;\n controller._transformAlgorithm = transformAlgorithm;\n controller._flushAlgorithm = flushAlgorithm;\n }\n function SetUpTransformStreamDefaultControllerFromTransformer(stream, transformer) {\n const controller = Object.create(TransformStreamDefaultController.prototype);\n let transformAlgorithm = (chunk) => {\n try {\n TransformStreamDefaultControllerEnqueue(controller, chunk);\n return promiseResolvedWith(undefined);\n }\n catch (transformResultE) {\n return promiseRejectedWith(transformResultE);\n }\n };\n let flushAlgorithm = () => promiseResolvedWith(undefined);\n if (transformer.transform !== undefined) {\n transformAlgorithm = chunk => transformer.transform(chunk, controller);\n }\n if (transformer.flush !== undefined) {\n flushAlgorithm = () => transformer.flush(controller);\n }\n SetUpTransformStreamDefaultController(stream, controller, transformAlgorithm, flushAlgorithm);\n }\n function TransformStreamDefaultControllerClearAlgorithms(controller) {\n controller._transformAlgorithm = undefined;\n controller._flushAlgorithm = undefined;\n }\n function TransformStreamDefaultControllerEnqueue(controller, chunk) {\n const stream = controller._controlledTransformStream;\n const readableController = stream._readable._readableStreamController;\n if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(readableController)) {\n throw new TypeError('Readable side is not in a state that permits enqueue');\n }\n // We throttle transform invocations based on the backpressure of the ReadableStream, but we still\n // accept TransformStreamDefaultControllerEnqueue() calls.\n try {\n ReadableStreamDefaultControllerEnqueue(readableController, chunk);\n }\n catch (e) {\n // This happens when readableStrategy.size() throws.\n TransformStreamErrorWritableAndUnblockWrite(stream, e);\n throw stream._readable._storedError;\n }\n const backpressure = ReadableStreamDefaultControllerHasBackpressure(readableController);\n if (backpressure !== stream._backpressure) {\n TransformStreamSetBackpressure(stream, true);\n }\n }\n function TransformStreamDefaultControllerError(controller, e) {\n TransformStreamError(controller._controlledTransformStream, e);\n }\n function TransformStreamDefaultControllerPerformTransform(controller, chunk) {\n const transformPromise = controller._transformAlgorithm(chunk);\n return transformPromiseWith(transformPromise, undefined, r => {\n TransformStreamError(controller._controlledTransformStream, r);\n throw r;\n });\n }\n function TransformStreamDefaultControllerTerminate(controller) {\n const stream = controller._controlledTransformStream;\n const readableController = stream._readable._readableStreamController;\n ReadableStreamDefaultControllerClose(readableController);\n const error = new TypeError('TransformStream terminated');\n TransformStreamErrorWritableAndUnblockWrite(stream, error);\n }\n // TransformStreamDefaultSink Algorithms\n function TransformStreamDefaultSinkWriteAlgorithm(stream, chunk) {\n const controller = stream._transformStreamController;\n if (stream._backpressure) {\n const backpressureChangePromise = stream._backpressureChangePromise;\n return transformPromiseWith(backpressureChangePromise, () => {\n const writable = stream._writable;\n const state = writable._state;\n if (state === 'erroring') {\n throw writable._storedError;\n }\n return TransformStreamDefaultControllerPerformTransform(controller, chunk);\n });\n }\n return TransformStreamDefaultControllerPerformTransform(controller, chunk);\n }\n function TransformStreamDefaultSinkAbortAlgorithm(stream, reason) {\n // abort() is not called synchronously, so it is possible for abort() to be called when the stream is already\n // errored.\n TransformStreamError(stream, reason);\n return promiseResolvedWith(undefined);\n }\n function TransformStreamDefaultSinkCloseAlgorithm(stream) {\n // stream._readable cannot change after construction, so caching it across a call to user code is safe.\n const readable = stream._readable;\n const controller = stream._transformStreamController;\n const flushPromise = controller._flushAlgorithm();\n TransformStreamDefaultControllerClearAlgorithms(controller);\n // Return a promise that is fulfilled with undefined on success.\n return transformPromiseWith(flushPromise, () => {\n if (readable._state === 'errored') {\n throw readable._storedError;\n }\n ReadableStreamDefaultControllerClose(readable._readableStreamController);\n }, r => {\n TransformStreamError(stream, r);\n throw readable._storedError;\n });\n }\n // TransformStreamDefaultSource Algorithms\n function TransformStreamDefaultSourcePullAlgorithm(stream) {\n // Invariant. Enforced by the promises returned by start() and pull().\n TransformStreamSetBackpressure(stream, false);\n // Prevent the next pull() call until there is backpressure.\n return stream._backpressureChangePromise;\n }\n // Helper functions for the TransformStreamDefaultController.\n function defaultControllerBrandCheckException(name) {\n return new TypeError(`TransformStreamDefaultController.prototype.${name} can only be used on a TransformStreamDefaultController`);\n }\n // Helper functions for the TransformStream.\n function streamBrandCheckException(name) {\n return new TypeError(`TransformStream.prototype.${name} can only be used on a TransformStream`);\n }\n\n exports.ByteLengthQueuingStrategy = ByteLengthQueuingStrategy;\n exports.CountQueuingStrategy = CountQueuingStrategy;\n exports.ReadableByteStreamController = ReadableByteStreamController;\n exports.ReadableStream = ReadableStream;\n exports.ReadableStreamBYOBReader = ReadableStreamBYOBReader;\n exports.ReadableStreamBYOBRequest = ReadableStreamBYOBRequest;\n exports.ReadableStreamDefaultController = ReadableStreamDefaultController;\n exports.ReadableStreamDefaultReader = ReadableStreamDefaultReader;\n exports.TransformStream = TransformStream;\n exports.TransformStreamDefaultController = TransformStreamDefaultController;\n exports.WritableStream = WritableStream;\n exports.WritableStreamDefaultController = WritableStreamDefaultController;\n exports.WritableStreamDefaultWriter = WritableStreamDefaultWriter;\n\n Object.defineProperty(exports, '__esModule', { value: true });\n\n})));\n//# sourceMappingURL=ponyfill.es2018.js.map\n","module.exports = require(\"assert\");","module.exports = require(\"buffer\");","module.exports = require(\"crypto\");","module.exports = require(\"events\");","module.exports = require(\"fs\");","module.exports = require(\"http\");","module.exports = require(\"https\");","module.exports = require(\"net\");","module.exports = require(\"node:process\");","module.exports = require(\"node:stream/web\");","module.exports = require(\"os\");","module.exports = require(\"path\");","module.exports = require(\"tls\");","module.exports = require(\"util\");","module.exports = require(\"worker_threads\");","/* c8 ignore start */\n// 64 KiB (same size chrome slice theirs blob into Uint8array's)\nconst POOL_SIZE = 65536\n\nif (!globalThis.ReadableStream) {\n // `node:stream/web` got introduced in v16.5.0 as experimental\n // and it's preferred over the polyfilled version. So we also\n // suppress the warning that gets emitted by NodeJS for using it.\n try {\n const process = require('node:process')\n const { emitWarning } = process\n try {\n process.emitWarning = () => {}\n Object.assign(globalThis, require('node:stream/web'))\n process.emitWarning = emitWarning\n } catch (error) {\n process.emitWarning = emitWarning\n throw error\n }\n } catch (error) {\n // fallback to polyfill implementation\n Object.assign(globalThis, require('web-streams-polyfill/dist/ponyfill.es2018.js'))\n }\n}\n\ntry {\n // Don't use node: prefix for this, require+node: is not supported until node v14.14\n // Only `import()` can use prefix in 12.20 and later\n const { Blob } = require('buffer')\n if (Blob && !Blob.prototype.stream) {\n Blob.prototype.stream = function name (params) {\n let position = 0\n const blob = this\n\n return new ReadableStream({\n type: 'bytes',\n async pull (ctrl) {\n const chunk = blob.slice(position, Math.min(blob.size, position + POOL_SIZE))\n const buffer = await chunk.arrayBuffer()\n position += buffer.byteLength\n ctrl.enqueue(new Uint8Array(buffer))\n\n if (position === blob.size) {\n ctrl.close()\n }\n }\n })\n }\n }\n} catch (error) {}\n/* c8 ignore end */\n","import Blob from './index.js'\n\nconst _File = class File extends Blob {\n #lastModified = 0\n #name = ''\n\n /**\n * @param {*[]} fileBits\n * @param {string} fileName\n * @param {{lastModified?: number, type?: string}} options\n */// @ts-ignore\n constructor (fileBits, fileName, options = {}) {\n if (arguments.length < 2) {\n throw new TypeError(`Failed to construct 'File': 2 arguments required, but only ${arguments.length} present.`)\n }\n super(fileBits, options)\n\n if (options === null) options = {}\n\n // Simulate WebIDL type casting for NaN value in lastModified option.\n const lastModified = options.lastModified === undefined ? Date.now() : Number(options.lastModified)\n if (!Number.isNaN(lastModified)) {\n this.#lastModified = lastModified\n }\n\n this.#name = String(fileName)\n }\n\n get name () {\n return this.#name\n }\n\n get lastModified () {\n return this.#lastModified\n }\n\n get [Symbol.toStringTag] () {\n return 'File'\n }\n\n static [Symbol.hasInstance] (object) {\n return !!object && object instanceof Blob &&\n /^(File)$/.test(object[Symbol.toStringTag])\n }\n}\n\n/** @type {typeof globalThis.File} */// @ts-ignore\nexport const File = _File\nexport default File\n","const __WEBPACK_NAMESPACE_OBJECT__ = require(\"node:fs\");","const __WEBPACK_NAMESPACE_OBJECT__ = require(\"node:path\");","import { statSync, createReadStream, promises as fs } from 'node:fs'\nimport { basename } from 'node:path'\nimport DOMException from 'node-domexception'\n\nimport File from './file.js'\nimport Blob from './index.js'\n\nconst { stat } = fs\n\n/**\n * @param {string} path filepath on the disk\n * @param {string} [type] mimetype to use\n */\nconst blobFromSync = (path, type) => fromBlob(statSync(path), path, type)\n\n/**\n * @param {string} path filepath on the disk\n * @param {string} [type] mimetype to use\n * @returns {Promise}\n */\nconst blobFrom = (path, type) => stat(path).then(stat => fromBlob(stat, path, type))\n\n/**\n * @param {string} path filepath on the disk\n * @param {string} [type] mimetype to use\n * @returns {Promise}\n */\nconst fileFrom = (path, type) => stat(path).then(stat => fromFile(stat, path, type))\n\n/**\n * @param {string} path filepath on the disk\n * @param {string} [type] mimetype to use\n */\nconst fileFromSync = (path, type) => fromFile(statSync(path), path, type)\n\n// @ts-ignore\nconst fromBlob = (stat, path, type = '') => new Blob([new BlobDataItem({\n path,\n size: stat.size,\n lastModified: stat.mtimeMs,\n start: 0\n})], { type })\n\n// @ts-ignore\nconst fromFile = (stat, path, type = '') => new File([new BlobDataItem({\n path,\n size: stat.size,\n lastModified: stat.mtimeMs,\n start: 0\n})], basename(path), { type, lastModified: stat.mtimeMs })\n\n/**\n * This is a blob backed up by a file on the disk\n * with minium requirement. Its wrapped around a Blob as a blobPart\n * so you have no direct access to this.\n *\n * @private\n */\nclass BlobDataItem {\n #path\n #start\n\n constructor (options) {\n this.#path = options.path\n this.#start = options.start\n this.size = options.size\n this.lastModified = options.lastModified\n this.originalSize = options.originalSize === undefined\n ? options.size\n : options.originalSize\n }\n\n /**\n * Slicing arguments is first validated and formatted\n * to not be out of range by Blob.prototype.slice\n */\n slice (start, end) {\n return new BlobDataItem({\n path: this.#path,\n lastModified: this.lastModified,\n originalSize: this.originalSize,\n size: end - start,\n start: this.#start + start\n })\n }\n\n async * stream () {\n const { mtimeMs, size } = await stat(this.#path)\n\n if (mtimeMs > this.lastModified || this.originalSize !== size) {\n throw new DOMException('The requested file could not be read, typically due to permission problems that have occurred after a reference to a file was acquired.', 'NotReadableError')\n }\n\n yield * createReadStream(this.#path, {\n start: this.#start,\n end: this.#start + this.size - 1\n })\n }\n\n get [Symbol.toStringTag] () {\n return 'Blob'\n }\n}\n\nexport default blobFromSync\nexport { File, Blob, blobFrom, blobFromSync, fileFrom, fileFromSync }\n","/*! fetch-blob. MIT License. Jimmy Wärting */\n\n// TODO (jimmywarting): in the feature use conditional loading with top level await (requires 14.x)\n// Node has recently added whatwg stream into core\n\nimport './streams.cjs'\n\n// 64 KiB (same size chrome slice theirs blob into Uint8array's)\nconst POOL_SIZE = 65536\n\n/** @param {(Blob | Uint8Array)[]} parts */\nasync function * toIterator (parts, clone = true) {\n for (const part of parts) {\n if ('stream' in part) {\n yield * (/** @type {AsyncIterableIterator} */ (part.stream()))\n } else if (ArrayBuffer.isView(part)) {\n if (clone) {\n let position = part.byteOffset\n const end = part.byteOffset + part.byteLength\n while (position !== end) {\n const size = Math.min(end - position, POOL_SIZE)\n const chunk = part.buffer.slice(position, position + size)\n position += chunk.byteLength\n yield new Uint8Array(chunk)\n }\n } else {\n yield part\n }\n /* c8 ignore next 10 */\n } else {\n // For blobs that have arrayBuffer but no stream method (nodes buffer.Blob)\n let position = 0, b = (/** @type {Blob} */ (part))\n while (position !== b.size) {\n const chunk = b.slice(position, Math.min(b.size, position + POOL_SIZE))\n const buffer = await chunk.arrayBuffer()\n position += buffer.byteLength\n yield new Uint8Array(buffer)\n }\n }\n }\n}\n\nconst _Blob = class Blob {\n /** @type {Array.<(Blob|Uint8Array)>} */\n #parts = []\n #type = ''\n #size = 0\n #endings = 'transparent'\n\n /**\n * The Blob() constructor returns a new Blob object. The content\n * of the blob consists of the concatenation of the values given\n * in the parameter array.\n *\n * @param {*} blobParts\n * @param {{ type?: string, endings?: string }} [options]\n */\n constructor (blobParts = [], options = {}) {\n if (typeof blobParts !== 'object' || blobParts === null) {\n throw new TypeError('Failed to construct \\'Blob\\': The provided value cannot be converted to a sequence.')\n }\n\n if (typeof blobParts[Symbol.iterator] !== 'function') {\n throw new TypeError('Failed to construct \\'Blob\\': The object must have a callable @@iterator property.')\n }\n\n if (typeof options !== 'object' && typeof options !== 'function') {\n throw new TypeError('Failed to construct \\'Blob\\': parameter 2 cannot convert to dictionary.')\n }\n\n if (options === null) options = {}\n\n const encoder = new TextEncoder()\n for (const element of blobParts) {\n let part\n if (ArrayBuffer.isView(element)) {\n part = new Uint8Array(element.buffer.slice(element.byteOffset, element.byteOffset + element.byteLength))\n } else if (element instanceof ArrayBuffer) {\n part = new Uint8Array(element.slice(0))\n } else if (element instanceof Blob) {\n part = element\n } else {\n part = encoder.encode(`${element}`)\n }\n\n const size = ArrayBuffer.isView(part) ? part.byteLength : part.size\n // Avoid pushing empty parts into the array to better GC them\n if (size) {\n this.#size += size\n this.#parts.push(part)\n }\n }\n\n this.#endings = `${options.endings === undefined ? 'transparent' : options.endings}`\n const type = options.type === undefined ? '' : String(options.type)\n this.#type = /^[\\x20-\\x7E]*$/.test(type) ? type : ''\n }\n\n /**\n * The Blob interface's size property returns the\n * size of the Blob in bytes.\n */\n get size () {\n return this.#size\n }\n\n /**\n * The type property of a Blob object returns the MIME type of the file.\n */\n get type () {\n return this.#type\n }\n\n /**\n * The text() method in the Blob interface returns a Promise\n * that resolves with a string containing the contents of\n * the blob, interpreted as UTF-8.\n *\n * @return {Promise}\n */\n async text () {\n // More optimized than using this.arrayBuffer()\n // that requires twice as much ram\n const decoder = new TextDecoder()\n let str = ''\n for await (const part of toIterator(this.#parts, false)) {\n str += decoder.decode(part, { stream: true })\n }\n // Remaining\n str += decoder.decode()\n return str\n }\n\n /**\n * The arrayBuffer() method in the Blob interface returns a\n * Promise that resolves with the contents of the blob as\n * binary data contained in an ArrayBuffer.\n *\n * @return {Promise}\n */\n async arrayBuffer () {\n // Easier way... Just a unnecessary overhead\n // const view = new Uint8Array(this.size);\n // await this.stream().getReader({mode: 'byob'}).read(view);\n // return view.buffer;\n\n const data = new Uint8Array(this.size)\n let offset = 0\n for await (const chunk of toIterator(this.#parts, false)) {\n data.set(chunk, offset)\n offset += chunk.length\n }\n\n return data.buffer\n }\n\n stream () {\n const it = toIterator(this.#parts, true)\n\n return new globalThis.ReadableStream({\n // @ts-ignore\n type: 'bytes',\n async pull (ctrl) {\n const chunk = await it.next()\n chunk.done ? ctrl.close() : ctrl.enqueue(chunk.value)\n },\n\n async cancel () {\n await it.return()\n }\n })\n }\n\n /**\n * The Blob interface's slice() method creates and returns a\n * new Blob object which contains data from a subset of the\n * blob on which it's called.\n *\n * @param {number} [start]\n * @param {number} [end]\n * @param {string} [type]\n */\n slice (start = 0, end = this.size, type = '') {\n const { size } = this\n\n let relativeStart = start < 0 ? Math.max(size + start, 0) : Math.min(start, size)\n let relativeEnd = end < 0 ? Math.max(size + end, 0) : Math.min(end, size)\n\n const span = Math.max(relativeEnd - relativeStart, 0)\n const parts = this.#parts\n const blobParts = []\n let added = 0\n\n for (const part of parts) {\n // don't add the overflow to new blobParts\n if (added >= span) {\n break\n }\n\n const size = ArrayBuffer.isView(part) ? part.byteLength : part.size\n if (relativeStart && size <= relativeStart) {\n // Skip the beginning and change the relative\n // start & end position as we skip the unwanted parts\n relativeStart -= size\n relativeEnd -= size\n } else {\n let chunk\n if (ArrayBuffer.isView(part)) {\n chunk = part.subarray(relativeStart, Math.min(size, relativeEnd))\n added += chunk.byteLength\n } else {\n chunk = part.slice(relativeStart, Math.min(size, relativeEnd))\n added += chunk.size\n }\n relativeEnd -= size\n blobParts.push(chunk)\n relativeStart = 0 // All next sequential parts should start at 0\n }\n }\n\n const blob = new Blob([], { type: String(type).toLowerCase() })\n blob.#size = span\n blob.#parts = blobParts\n\n return blob\n }\n\n get [Symbol.toStringTag] () {\n return 'Blob'\n }\n\n static [Symbol.hasInstance] (object) {\n return (\n object &&\n typeof object === 'object' &&\n typeof object.constructor === 'function' &&\n (\n typeof object.stream === 'function' ||\n typeof object.arrayBuffer === 'function'\n ) &&\n /^(Blob|File)$/.test(object[Symbol.toStringTag])\n )\n }\n}\n\nObject.defineProperties(_Blob.prototype, {\n size: { enumerable: true },\n type: { enumerable: true },\n slice: { enumerable: true }\n})\n\n/** @type {typeof globalThis.Blob} */\nexport const Blob = _Blob\nexport default Blob\n","/*! formdata-polyfill. MIT License. Jimmy Wärting */\n\nimport C from 'fetch-blob'\nimport F from 'fetch-blob/file.js'\n\nvar {toStringTag:t,iterator:i,hasInstance:h}=Symbol,\nr=Math.random,\nm='append,set,get,getAll,delete,keys,values,entries,forEach,constructor'.split(','),\nf=(a,b,c)=>(a+='',/^(Blob|File)$/.test(b && b[t])?[(c=c!==void 0?c+'':b[t]=='File'?b.name:'blob',a),b.name!==c||b[t]=='blob'?new F([b],c,b):b]:[a,b+'']),\ne=(c,f)=>(f?c:c.replace(/\\r?\\n|\\r/g,'\\r\\n')).replace(/\\n/g,'%0A').replace(/\\r/g,'%0D').replace(/\"/g,'%22'),\nx=(n, a, e)=>{if(a.lengthtypeof o[m]!='function')}\nappend(...a){x('append',arguments,2);this.#d.push(f(...a))}\ndelete(a){x('delete',arguments,1);a+='';this.#d=this.#d.filter(([b])=>b!==a)}\nget(a){x('get',arguments,1);a+='';for(var b=this.#d,l=b.length,c=0;cc[0]===a&&b.push(c[1]));return b}\nhas(a){x('has',arguments,1);a+='';return this.#d.some(b=>b[0]===a)}\nforEach(a,b){x('forEach',arguments,1);for(var [c,d]of this)a.call(b,d,c,this)}\nset(...a){x('set',arguments,2);var b=[],c=!0;a=f(...a);this.#d.forEach(d=>{d[0]===a[0]?c&&(c=!b.push(a)):b.push(d)});c&&b.push(a);this.#d=b}\n*entries(){yield*this.#d}\n*keys(){for(var[a]of this)yield a}\n*values(){for(var[,a]of this)yield a}}\n\n/** @param {FormData} F */\nexport function formDataToBlob (F,B=C){\nvar b=`${r()}${r()}`.replace(/\\./g, '').slice(-28).padStart(32, '-'),c=[],p=`--${b}\\r\\nContent-Disposition: form-data; name=\"`\nF.forEach((v,n)=>typeof v=='string'\n?c.push(p+e(n)+`\"\\r\\n\\r\\n${v.replace(/\\r(?!\\n)|(? {\n\treturn (\n\t\ttypeof object === 'object' &&\n\t\ttypeof object.append === 'function' &&\n\t\ttypeof object.delete === 'function' &&\n\t\ttypeof object.get === 'function' &&\n\t\ttypeof object.getAll === 'function' &&\n\t\ttypeof object.has === 'function' &&\n\t\ttypeof object.set === 'function' &&\n\t\ttypeof object.sort === 'function' &&\n\t\tobject[NAME] === 'URLSearchParams'\n\t);\n};\n\n/**\n * Check if `object` is a W3C `Blob` object (which `File` inherits from)\n * @param {*} object - Object to check for\n * @return {boolean}\n */\nexport const isBlob = object => {\n\treturn (\n\t\tobject &&\n\t\ttypeof object === 'object' &&\n\t\ttypeof object.arrayBuffer === 'function' &&\n\t\ttypeof object.type === 'string' &&\n\t\ttypeof object.stream === 'function' &&\n\t\ttypeof object.constructor === 'function' &&\n\t\t/^(Blob|File)$/.test(object[NAME])\n\t);\n};\n\n/**\n * Check if `obj` is an instance of AbortSignal.\n * @param {*} object - Object to check for\n * @return {boolean}\n */\nexport const isAbortSignal = object => {\n\treturn (\n\t\ttypeof object === 'object' && (\n\t\t\tobject[NAME] === 'AbortSignal' ||\n\t\t\tobject[NAME] === 'EventTarget'\n\t\t)\n\t);\n};\n\n/**\n * isDomainOrSubdomain reports whether sub is a subdomain (or exact match) of\n * the parent domain.\n *\n * Both domains must already be in canonical form.\n * @param {string|URL} original\n * @param {string|URL} destination\n */\nexport const isDomainOrSubdomain = (destination, original) => {\n\tconst orig = new URL(original).hostname;\n\tconst dest = new URL(destination).hostname;\n\n\treturn orig === dest || orig.endsWith(`.${dest}`);\n};\n\n/**\n * isSameProtocol reports whether the two provided URLs use the same protocol.\n *\n * Both domains must already be in canonical form.\n * @param {string|URL} original\n * @param {string|URL} destination\n */\nexport const isSameProtocol = (destination, original) => {\n\tconst orig = new URL(original).protocol;\n\tconst dest = new URL(destination).protocol;\n\n\treturn orig === dest;\n};\n","\n/**\n * Body.js\n *\n * Body interface provides common methods for Request and Response\n */\n\nimport Stream, {PassThrough} from 'node:stream';\nimport {types, deprecate, promisify} from 'node:util';\nimport {Buffer} from 'node:buffer';\n\nimport Blob from 'fetch-blob';\nimport {FormData, formDataToBlob} from 'formdata-polyfill/esm.min.js';\n\nimport {FetchError} from './errors/fetch-error.js';\nimport {FetchBaseError} from './errors/base.js';\nimport {isBlob, isURLSearchParameters} from './utils/is.js';\n\nconst pipeline = promisify(Stream.pipeline);\nconst INTERNALS = Symbol('Body internals');\n\n/**\n * Body mixin\n *\n * Ref: https://fetch.spec.whatwg.org/#body\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nexport default class Body {\n\tconstructor(body, {\n\t\tsize = 0\n\t} = {}) {\n\t\tlet boundary = null;\n\n\t\tif (body === null) {\n\t\t\t// Body is undefined or null\n\t\t\tbody = null;\n\t\t} else if (isURLSearchParameters(body)) {\n\t\t\t// Body is a URLSearchParams\n\t\t\tbody = Buffer.from(body.toString());\n\t\t} else if (isBlob(body)) {\n\t\t\t// Body is blob\n\t\t} else if (Buffer.isBuffer(body)) {\n\t\t\t// Body is Buffer\n\t\t} else if (types.isAnyArrayBuffer(body)) {\n\t\t\t// Body is ArrayBuffer\n\t\t\tbody = Buffer.from(body);\n\t\t} else if (ArrayBuffer.isView(body)) {\n\t\t\t// Body is ArrayBufferView\n\t\t\tbody = Buffer.from(body.buffer, body.byteOffset, body.byteLength);\n\t\t} else if (body instanceof Stream) {\n\t\t\t// Body is stream\n\t\t} else if (body instanceof FormData) {\n\t\t\t// Body is FormData\n\t\t\tbody = formDataToBlob(body);\n\t\t\tboundary = body.type.split('=')[1];\n\t\t} else {\n\t\t\t// None of the above\n\t\t\t// coerce to string then buffer\n\t\t\tbody = Buffer.from(String(body));\n\t\t}\n\n\t\tlet stream = body;\n\n\t\tif (Buffer.isBuffer(body)) {\n\t\t\tstream = Stream.Readable.from(body);\n\t\t} else if (isBlob(body)) {\n\t\t\tstream = Stream.Readable.from(body.stream());\n\t\t}\n\n\t\tthis[INTERNALS] = {\n\t\t\tbody,\n\t\t\tstream,\n\t\t\tboundary,\n\t\t\tdisturbed: false,\n\t\t\terror: null\n\t\t};\n\t\tthis.size = size;\n\n\t\tif (body instanceof Stream) {\n\t\t\tbody.on('error', error_ => {\n\t\t\t\tconst error = error_ instanceof FetchBaseError ?\n\t\t\t\t\terror_ :\n\t\t\t\t\tnew FetchError(`Invalid response body while trying to fetch ${this.url}: ${error_.message}`, 'system', error_);\n\t\t\t\tthis[INTERNALS].error = error;\n\t\t\t});\n\t\t}\n\t}\n\n\tget body() {\n\t\treturn this[INTERNALS].stream;\n\t}\n\n\tget bodyUsed() {\n\t\treturn this[INTERNALS].disturbed;\n\t}\n\n\t/**\n\t * Decode response as ArrayBuffer\n\t *\n\t * @return Promise\n\t */\n\tasync arrayBuffer() {\n\t\tconst {buffer, byteOffset, byteLength} = await consumeBody(this);\n\t\treturn buffer.slice(byteOffset, byteOffset + byteLength);\n\t}\n\n\tasync formData() {\n\t\tconst ct = this.headers.get('content-type');\n\n\t\tif (ct.startsWith('application/x-www-form-urlencoded')) {\n\t\t\tconst formData = new FormData();\n\t\t\tconst parameters = new URLSearchParams(await this.text());\n\n\t\t\tfor (const [name, value] of parameters) {\n\t\t\t\tformData.append(name, value);\n\t\t\t}\n\n\t\t\treturn formData;\n\t\t}\n\n\t\tconst {toFormData} = await import('./utils/multipart-parser.js');\n\t\treturn toFormData(this.body, ct);\n\t}\n\n\t/**\n\t * Return raw response as Blob\n\t *\n\t * @return Promise\n\t */\n\tasync blob() {\n\t\tconst ct = (this.headers && this.headers.get('content-type')) || (this[INTERNALS].body && this[INTERNALS].body.type) || '';\n\t\tconst buf = await this.arrayBuffer();\n\n\t\treturn new Blob([buf], {\n\t\t\ttype: ct\n\t\t});\n\t}\n\n\t/**\n\t * Decode response as json\n\t *\n\t * @return Promise\n\t */\n\tasync json() {\n\t\tconst text = await this.text();\n\t\treturn JSON.parse(text);\n\t}\n\n\t/**\n\t * Decode response as text\n\t *\n\t * @return Promise\n\t */\n\tasync text() {\n\t\tconst buffer = await consumeBody(this);\n\t\treturn new TextDecoder().decode(buffer);\n\t}\n\n\t/**\n\t * Decode response as buffer (non-spec api)\n\t *\n\t * @return Promise\n\t */\n\tbuffer() {\n\t\treturn consumeBody(this);\n\t}\n}\n\nBody.prototype.buffer = deprecate(Body.prototype.buffer, 'Please use \\'response.arrayBuffer()\\' instead of \\'response.buffer()\\'', 'node-fetch#buffer');\n\n// In browsers, all properties are enumerable.\nObject.defineProperties(Body.prototype, {\n\tbody: {enumerable: true},\n\tbodyUsed: {enumerable: true},\n\tarrayBuffer: {enumerable: true},\n\tblob: {enumerable: true},\n\tjson: {enumerable: true},\n\ttext: {enumerable: true},\n\tdata: {get: deprecate(() => {},\n\t\t'data doesn\\'t exist, use json(), text(), arrayBuffer(), or body instead',\n\t\t'https://github.com/node-fetch/node-fetch/issues/1000 (response)')}\n});\n\n/**\n * Consume and convert an entire Body to a Buffer.\n *\n * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body\n *\n * @return Promise\n */\nasync function consumeBody(data) {\n\tif (data[INTERNALS].disturbed) {\n\t\tthrow new TypeError(`body used already for: ${data.url}`);\n\t}\n\n\tdata[INTERNALS].disturbed = true;\n\n\tif (data[INTERNALS].error) {\n\t\tthrow data[INTERNALS].error;\n\t}\n\n\tconst {body} = data;\n\n\t// Body is null\n\tif (body === null) {\n\t\treturn Buffer.alloc(0);\n\t}\n\n\t/* c8 ignore next 3 */\n\tif (!(body instanceof Stream)) {\n\t\treturn Buffer.alloc(0);\n\t}\n\n\t// Body is stream\n\t// get ready to actually consume the body\n\tconst accum = [];\n\tlet accumBytes = 0;\n\n\ttry {\n\t\tfor await (const chunk of body) {\n\t\t\tif (data.size > 0 && accumBytes + chunk.length > data.size) {\n\t\t\t\tconst error = new FetchError(`content size at ${data.url} over limit: ${data.size}`, 'max-size');\n\t\t\t\tbody.destroy(error);\n\t\t\t\tthrow error;\n\t\t\t}\n\n\t\t\taccumBytes += chunk.length;\n\t\t\taccum.push(chunk);\n\t\t}\n\t} catch (error) {\n\t\tconst error_ = error instanceof FetchBaseError ? error : new FetchError(`Invalid response body while trying to fetch ${data.url}: ${error.message}`, 'system', error);\n\t\tthrow error_;\n\t}\n\n\tif (body.readableEnded === true || body._readableState.ended === true) {\n\t\ttry {\n\t\t\tif (accum.every(c => typeof c === 'string')) {\n\t\t\t\treturn Buffer.from(accum.join(''));\n\t\t\t}\n\n\t\t\treturn Buffer.concat(accum, accumBytes);\n\t\t} catch (error) {\n\t\t\tthrow new FetchError(`Could not create Buffer from response body for ${data.url}: ${error.message}`, 'system', error);\n\t\t}\n\t} else {\n\t\tthrow new FetchError(`Premature close of server response while trying to fetch ${data.url}`);\n\t}\n}\n\n/**\n * Clone body given Res/Req instance\n *\n * @param Mixed instance Response or Request instance\n * @param String highWaterMark highWaterMark for both PassThrough body streams\n * @return Mixed\n */\nexport const clone = (instance, highWaterMark) => {\n\tlet p1;\n\tlet p2;\n\tlet {body} = instance[INTERNALS];\n\n\t// Don't allow cloning a used body\n\tif (instance.bodyUsed) {\n\t\tthrow new Error('cannot clone body after it is used');\n\t}\n\n\t// Check that body is a stream and not form-data object\n\t// note: we can't clone the form-data object without having it as a dependency\n\tif ((body instanceof Stream) && (typeof body.getBoundary !== 'function')) {\n\t\t// Tee instance body\n\t\tp1 = new PassThrough({highWaterMark});\n\t\tp2 = new PassThrough({highWaterMark});\n\t\tbody.pipe(p1);\n\t\tbody.pipe(p2);\n\t\t// Set instance body to teed body and return the other teed body\n\t\tinstance[INTERNALS].stream = p1;\n\t\tbody = p2;\n\t}\n\n\treturn body;\n};\n\nconst getNonSpecFormDataBoundary = deprecate(\n\tbody => body.getBoundary(),\n\t'form-data doesn\\'t follow the spec and requires special treatment. Use alternative package',\n\t'https://github.com/node-fetch/node-fetch/issues/1167'\n);\n\n/**\n * Performs the operation \"extract a `Content-Type` value from |object|\" as\n * specified in the specification:\n * https://fetch.spec.whatwg.org/#concept-bodyinit-extract\n *\n * This function assumes that instance.body is present.\n *\n * @param {any} body Any options.body input\n * @returns {string | null}\n */\nexport const extractContentType = (body, request) => {\n\t// Body is null or undefined\n\tif (body === null) {\n\t\treturn null;\n\t}\n\n\t// Body is string\n\tif (typeof body === 'string') {\n\t\treturn 'text/plain;charset=UTF-8';\n\t}\n\n\t// Body is a URLSearchParams\n\tif (isURLSearchParameters(body)) {\n\t\treturn 'application/x-www-form-urlencoded;charset=UTF-8';\n\t}\n\n\t// Body is blob\n\tif (isBlob(body)) {\n\t\treturn body.type || null;\n\t}\n\n\t// Body is a Buffer (Buffer, ArrayBuffer or ArrayBufferView)\n\tif (Buffer.isBuffer(body) || types.isAnyArrayBuffer(body) || ArrayBuffer.isView(body)) {\n\t\treturn null;\n\t}\n\n\tif (body instanceof FormData) {\n\t\treturn `multipart/form-data; boundary=${request[INTERNALS].boundary}`;\n\t}\n\n\t// Detect form data input from form-data module\n\tif (body && typeof body.getBoundary === 'function') {\n\t\treturn `multipart/form-data;boundary=${getNonSpecFormDataBoundary(body)}`;\n\t}\n\n\t// Body is stream - can't really do much about this\n\tif (body instanceof Stream) {\n\t\treturn null;\n\t}\n\n\t// Body constructor defaults other things to string\n\treturn 'text/plain;charset=UTF-8';\n};\n\n/**\n * The Fetch Standard treats this as if \"total bytes\" is a property on the body.\n * For us, we have to explicitly get it with a function.\n *\n * ref: https://fetch.spec.whatwg.org/#concept-body-total-bytes\n *\n * @param {any} obj.body Body object from the Body instance.\n * @returns {number | null}\n */\nexport const getTotalBytes = request => {\n\tconst {body} = request[INTERNALS];\n\n\t// Body is null or undefined\n\tif (body === null) {\n\t\treturn 0;\n\t}\n\n\t// Body is Blob\n\tif (isBlob(body)) {\n\t\treturn body.size;\n\t}\n\n\t// Body is Buffer\n\tif (Buffer.isBuffer(body)) {\n\t\treturn body.length;\n\t}\n\n\t// Detect form data input from form-data module\n\tif (body && typeof body.getLengthSync === 'function') {\n\t\treturn body.hasKnownLength && body.hasKnownLength() ? body.getLengthSync() : null;\n\t}\n\n\t// Body is stream\n\treturn null;\n};\n\n/**\n * Write a Body to a Node.js WritableStream (e.g. http.Request) object.\n *\n * @param {Stream.Writable} dest The stream to write to.\n * @param obj.body Body object from the Body instance.\n * @returns {Promise}\n */\nexport const writeToStream = async (dest, {body}) => {\n\tif (body === null) {\n\t\t// Body is null\n\t\tdest.end();\n\t} else {\n\t\t// Body is stream\n\t\tawait pipeline(body, dest);\n\t}\n};\n","/**\n * Headers.js\n *\n * Headers class offers convenient helpers\n */\n\nimport {types} from 'node:util';\nimport http from 'node:http';\n\n/* c8 ignore next 9 */\nconst validateHeaderName = typeof http.validateHeaderName === 'function' ?\n\thttp.validateHeaderName :\n\tname => {\n\t\tif (!/^[\\^`\\-\\w!#$%&'*+.|~]+$/.test(name)) {\n\t\t\tconst error = new TypeError(`Header name must be a valid HTTP token [${name}]`);\n\t\t\tObject.defineProperty(error, 'code', {value: 'ERR_INVALID_HTTP_TOKEN'});\n\t\t\tthrow error;\n\t\t}\n\t};\n\n/* c8 ignore next 9 */\nconst validateHeaderValue = typeof http.validateHeaderValue === 'function' ?\n\thttp.validateHeaderValue :\n\t(name, value) => {\n\t\tif (/[^\\t\\u0020-\\u007E\\u0080-\\u00FF]/.test(value)) {\n\t\t\tconst error = new TypeError(`Invalid character in header content [\"${name}\"]`);\n\t\t\tObject.defineProperty(error, 'code', {value: 'ERR_INVALID_CHAR'});\n\t\t\tthrow error;\n\t\t}\n\t};\n\n/**\n * @typedef {Headers | Record | Iterable | Iterable>} HeadersInit\n */\n\n/**\n * This Fetch API interface allows you to perform various actions on HTTP request and response headers.\n * These actions include retrieving, setting, adding to, and removing.\n * A Headers object has an associated header list, which is initially empty and consists of zero or more name and value pairs.\n * You can add to this using methods like append() (see Examples.)\n * In all methods of this interface, header names are matched by case-insensitive byte sequence.\n *\n */\nexport default class Headers extends URLSearchParams {\n\t/**\n\t * Headers class\n\t *\n\t * @constructor\n\t * @param {HeadersInit} [init] - Response headers\n\t */\n\tconstructor(init) {\n\t\t// Validate and normalize init object in [name, value(s)][]\n\t\t/** @type {string[][]} */\n\t\tlet result = [];\n\t\tif (init instanceof Headers) {\n\t\t\tconst raw = init.raw();\n\t\t\tfor (const [name, values] of Object.entries(raw)) {\n\t\t\t\tresult.push(...values.map(value => [name, value]));\n\t\t\t}\n\t\t} else if (init == null) { // eslint-disable-line no-eq-null, eqeqeq\n\t\t\t// No op\n\t\t} else if (typeof init === 'object' && !types.isBoxedPrimitive(init)) {\n\t\t\tconst method = init[Symbol.iterator];\n\t\t\t// eslint-disable-next-line no-eq-null, eqeqeq\n\t\t\tif (method == null) {\n\t\t\t\t// Record\n\t\t\t\tresult.push(...Object.entries(init));\n\t\t\t} else {\n\t\t\t\tif (typeof method !== 'function') {\n\t\t\t\t\tthrow new TypeError('Header pairs must be iterable');\n\t\t\t\t}\n\n\t\t\t\t// Sequence>\n\t\t\t\t// Note: per spec we have to first exhaust the lists then process them\n\t\t\t\tresult = [...init]\n\t\t\t\t\t.map(pair => {\n\t\t\t\t\t\tif (\n\t\t\t\t\t\t\ttypeof pair !== 'object' || types.isBoxedPrimitive(pair)\n\t\t\t\t\t\t) {\n\t\t\t\t\t\t\tthrow new TypeError('Each header pair must be an iterable object');\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\treturn [...pair];\n\t\t\t\t\t}).map(pair => {\n\t\t\t\t\t\tif (pair.length !== 2) {\n\t\t\t\t\t\t\tthrow new TypeError('Each header pair must be a name/value tuple');\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\treturn [...pair];\n\t\t\t\t\t});\n\t\t\t}\n\t\t} else {\n\t\t\tthrow new TypeError('Failed to construct \\'Headers\\': The provided value is not of type \\'(sequence> or record)');\n\t\t}\n\n\t\t// Validate and lowercase\n\t\tresult =\n\t\t\tresult.length > 0 ?\n\t\t\t\tresult.map(([name, value]) => {\n\t\t\t\t\tvalidateHeaderName(name);\n\t\t\t\t\tvalidateHeaderValue(name, String(value));\n\t\t\t\t\treturn [String(name).toLowerCase(), String(value)];\n\t\t\t\t}) :\n\t\t\t\tundefined;\n\n\t\tsuper(result);\n\n\t\t// Returning a Proxy that will lowercase key names, validate parameters and sort keys\n\t\t// eslint-disable-next-line no-constructor-return\n\t\treturn new Proxy(this, {\n\t\t\tget(target, p, receiver) {\n\t\t\t\tswitch (p) {\n\t\t\t\t\tcase 'append':\n\t\t\t\t\tcase 'set':\n\t\t\t\t\t\treturn (name, value) => {\n\t\t\t\t\t\t\tvalidateHeaderName(name);\n\t\t\t\t\t\t\tvalidateHeaderValue(name, String(value));\n\t\t\t\t\t\t\treturn URLSearchParams.prototype[p].call(\n\t\t\t\t\t\t\t\ttarget,\n\t\t\t\t\t\t\t\tString(name).toLowerCase(),\n\t\t\t\t\t\t\t\tString(value)\n\t\t\t\t\t\t\t);\n\t\t\t\t\t\t};\n\n\t\t\t\t\tcase 'delete':\n\t\t\t\t\tcase 'has':\n\t\t\t\t\tcase 'getAll':\n\t\t\t\t\t\treturn name => {\n\t\t\t\t\t\t\tvalidateHeaderName(name);\n\t\t\t\t\t\t\treturn URLSearchParams.prototype[p].call(\n\t\t\t\t\t\t\t\ttarget,\n\t\t\t\t\t\t\t\tString(name).toLowerCase()\n\t\t\t\t\t\t\t);\n\t\t\t\t\t\t};\n\n\t\t\t\t\tcase 'keys':\n\t\t\t\t\t\treturn () => {\n\t\t\t\t\t\t\ttarget.sort();\n\t\t\t\t\t\t\treturn new Set(URLSearchParams.prototype.keys.call(target)).keys();\n\t\t\t\t\t\t};\n\n\t\t\t\t\tdefault:\n\t\t\t\t\t\treturn Reflect.get(target, p, receiver);\n\t\t\t\t}\n\t\t\t}\n\t\t});\n\t\t/* c8 ignore next */\n\t}\n\n\tget [Symbol.toStringTag]() {\n\t\treturn this.constructor.name;\n\t}\n\n\ttoString() {\n\t\treturn Object.prototype.toString.call(this);\n\t}\n\n\tget(name) {\n\t\tconst values = this.getAll(name);\n\t\tif (values.length === 0) {\n\t\t\treturn null;\n\t\t}\n\n\t\tlet value = values.join(', ');\n\t\tif (/^content-encoding$/i.test(name)) {\n\t\t\tvalue = value.toLowerCase();\n\t\t}\n\n\t\treturn value;\n\t}\n\n\tforEach(callback, thisArg = undefined) {\n\t\tfor (const name of this.keys()) {\n\t\t\tReflect.apply(callback, thisArg, [this.get(name), name, this]);\n\t\t}\n\t}\n\n\t* values() {\n\t\tfor (const name of this.keys()) {\n\t\t\tyield this.get(name);\n\t\t}\n\t}\n\n\t/**\n\t * @type {() => IterableIterator<[string, string]>}\n\t */\n\t* entries() {\n\t\tfor (const name of this.keys()) {\n\t\t\tyield [name, this.get(name)];\n\t\t}\n\t}\n\n\t[Symbol.iterator]() {\n\t\treturn this.entries();\n\t}\n\n\t/**\n\t * Node-fetch non-spec method\n\t * returning all headers and their values as array\n\t * @returns {Record}\n\t */\n\traw() {\n\t\treturn [...this.keys()].reduce((result, key) => {\n\t\t\tresult[key] = this.getAll(key);\n\t\t\treturn result;\n\t\t}, {});\n\t}\n\n\t/**\n\t * For better console.log(headers) and also to convert Headers into Node.js Request compatible format\n\t */\n\t[Symbol.for('nodejs.util.inspect.custom')]() {\n\t\treturn [...this.keys()].reduce((result, key) => {\n\t\t\tconst values = this.getAll(key);\n\t\t\t// Http.request() only supports string as Host header.\n\t\t\t// This hack makes specifying custom Host header possible.\n\t\t\tif (key === 'host') {\n\t\t\t\tresult[key] = values[0];\n\t\t\t} else {\n\t\t\t\tresult[key] = values.length > 1 ? values : values[0];\n\t\t\t}\n\n\t\t\treturn result;\n\t\t}, {});\n\t}\n}\n\n/**\n * Re-shaping object for Web IDL tests\n * Only need to do it for overridden methods\n */\nObject.defineProperties(\n\tHeaders.prototype,\n\t['get', 'entries', 'forEach', 'values'].reduce((result, property) => {\n\t\tresult[property] = {enumerable: true};\n\t\treturn result;\n\t}, {})\n);\n\n/**\n * Create a Headers object from an http.IncomingMessage.rawHeaders, ignoring those that do\n * not conform to HTTP grammar productions.\n * @param {import('http').IncomingMessage['rawHeaders']} headers\n */\nexport function fromRawHeaders(headers = []) {\n\treturn new Headers(\n\t\theaders\n\t\t\t// Split into pairs\n\t\t\t.reduce((result, value, index, array) => {\n\t\t\t\tif (index % 2 === 0) {\n\t\t\t\t\tresult.push(array.slice(index, index + 2));\n\t\t\t\t}\n\n\t\t\t\treturn result;\n\t\t\t}, [])\n\t\t\t.filter(([name, value]) => {\n\t\t\t\ttry {\n\t\t\t\t\tvalidateHeaderName(name);\n\t\t\t\t\tvalidateHeaderValue(name, String(value));\n\t\t\t\t\treturn true;\n\t\t\t\t} catch {\n\t\t\t\t\treturn false;\n\t\t\t\t}\n\t\t\t})\n\n\t);\n}\n","const redirectStatus = new Set([301, 302, 303, 307, 308]);\n\n/**\n * Redirect code matching\n *\n * @param {number} code - Status code\n * @return {boolean}\n */\nexport const isRedirect = code => {\n\treturn redirectStatus.has(code);\n};\n","/**\n * Response.js\n *\n * Response class provides content decoding\n */\n\nimport Headers from './headers.js';\nimport Body, {clone, extractContentType} from './body.js';\nimport {isRedirect} from './utils/is-redirect.js';\n\nconst INTERNALS = Symbol('Response internals');\n\n/**\n * Response class\n *\n * Ref: https://fetch.spec.whatwg.org/#response-class\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nexport default class Response extends Body {\n\tconstructor(body = null, options = {}) {\n\t\tsuper(body, options);\n\n\t\t// eslint-disable-next-line no-eq-null, eqeqeq, no-negated-condition\n\t\tconst status = options.status != null ? options.status : 200;\n\n\t\tconst headers = new Headers(options.headers);\n\n\t\tif (body !== null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(body, this);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tthis[INTERNALS] = {\n\t\t\ttype: 'default',\n\t\t\turl: options.url,\n\t\t\tstatus,\n\t\t\tstatusText: options.statusText || '',\n\t\t\theaders,\n\t\t\tcounter: options.counter,\n\t\t\thighWaterMark: options.highWaterMark\n\t\t};\n\t}\n\n\tget type() {\n\t\treturn this[INTERNALS].type;\n\t}\n\n\tget url() {\n\t\treturn this[INTERNALS].url || '';\n\t}\n\n\tget status() {\n\t\treturn this[INTERNALS].status;\n\t}\n\n\t/**\n\t * Convenience property representing if the request ended normally\n\t */\n\tget ok() {\n\t\treturn this[INTERNALS].status >= 200 && this[INTERNALS].status < 300;\n\t}\n\n\tget redirected() {\n\t\treturn this[INTERNALS].counter > 0;\n\t}\n\n\tget statusText() {\n\t\treturn this[INTERNALS].statusText;\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS].headers;\n\t}\n\n\tget highWaterMark() {\n\t\treturn this[INTERNALS].highWaterMark;\n\t}\n\n\t/**\n\t * Clone this response\n\t *\n\t * @return Response\n\t */\n\tclone() {\n\t\treturn new Response(clone(this, this.highWaterMark), {\n\t\t\ttype: this.type,\n\t\t\turl: this.url,\n\t\t\tstatus: this.status,\n\t\t\tstatusText: this.statusText,\n\t\t\theaders: this.headers,\n\t\t\tok: this.ok,\n\t\t\tredirected: this.redirected,\n\t\t\tsize: this.size,\n\t\t\thighWaterMark: this.highWaterMark\n\t\t});\n\t}\n\n\t/**\n\t * @param {string} url The URL that the new response is to originate from.\n\t * @param {number} status An optional status code for the response (e.g., 302.)\n\t * @returns {Response} A Response object.\n\t */\n\tstatic redirect(url, status = 302) {\n\t\tif (!isRedirect(status)) {\n\t\t\tthrow new RangeError('Failed to execute \"redirect\" on \"response\": Invalid status code');\n\t\t}\n\n\t\treturn new Response(null, {\n\t\t\theaders: {\n\t\t\t\tlocation: new URL(url).toString()\n\t\t\t},\n\t\t\tstatus\n\t\t});\n\t}\n\n\tstatic error() {\n\t\tconst response = new Response(null, {status: 0, statusText: ''});\n\t\tresponse[INTERNALS].type = 'error';\n\t\treturn response;\n\t}\n\n\tget [Symbol.toStringTag]() {\n\t\treturn 'Response';\n\t}\n}\n\nObject.defineProperties(Response.prototype, {\n\ttype: {enumerable: true},\n\turl: {enumerable: true},\n\tstatus: {enumerable: true},\n\tok: {enumerable: true},\n\tredirected: {enumerable: true},\n\tstatusText: {enumerable: true},\n\theaders: {enumerable: true},\n\tclone: {enumerable: true}\n});\n","const __WEBPACK_NAMESPACE_OBJECT__ = require(\"node:url\");","export const getSearch = parsedURL => {\n\tif (parsedURL.search) {\n\t\treturn parsedURL.search;\n\t}\n\n\tconst lastOffset = parsedURL.href.length - 1;\n\tconst hash = parsedURL.hash || (parsedURL.href[lastOffset] === '#' ? '#' : '');\n\treturn parsedURL.href[lastOffset - hash.length] === '?' ? '?' : '';\n};\n","const __WEBPACK_NAMESPACE_OBJECT__ = require(\"node:net\");","import {isIP} from 'node:net';\n\n/**\n * @external URL\n * @see {@link https://developer.mozilla.org/en-US/docs/Web/API/URL|URL}\n */\n\n/**\n * @module utils/referrer\n * @private\n */\n\n/**\n * @see {@link https://w3c.github.io/webappsec-referrer-policy/#strip-url|Referrer Policy §8.4. Strip url for use as a referrer}\n * @param {string} URL\n * @param {boolean} [originOnly=false]\n */\nexport function stripURLForUseAsAReferrer(url, originOnly = false) {\n\t// 1. If url is null, return no referrer.\n\tif (url == null) { // eslint-disable-line no-eq-null, eqeqeq\n\t\treturn 'no-referrer';\n\t}\n\n\turl = new URL(url);\n\n\t// 2. If url's scheme is a local scheme, then return no referrer.\n\tif (/^(about|blob|data):$/.test(url.protocol)) {\n\t\treturn 'no-referrer';\n\t}\n\n\t// 3. Set url's username to the empty string.\n\turl.username = '';\n\n\t// 4. Set url's password to null.\n\t// Note: `null` appears to be a mistake as this actually results in the password being `\"null\"`.\n\turl.password = '';\n\n\t// 5. Set url's fragment to null.\n\t// Note: `null` appears to be a mistake as this actually results in the fragment being `\"#null\"`.\n\turl.hash = '';\n\n\t// 6. If the origin-only flag is true, then:\n\tif (originOnly) {\n\t\t// 6.1. Set url's path to null.\n\t\t// Note: `null` appears to be a mistake as this actually results in the path being `\"/null\"`.\n\t\turl.pathname = '';\n\n\t\t// 6.2. Set url's query to null.\n\t\t// Note: `null` appears to be a mistake as this actually results in the query being `\"?null\"`.\n\t\turl.search = '';\n\t}\n\n\t// 7. Return url.\n\treturn url;\n}\n\n/**\n * @see {@link https://w3c.github.io/webappsec-referrer-policy/#enumdef-referrerpolicy|enum ReferrerPolicy}\n */\nexport const ReferrerPolicy = new Set([\n\t'',\n\t'no-referrer',\n\t'no-referrer-when-downgrade',\n\t'same-origin',\n\t'origin',\n\t'strict-origin',\n\t'origin-when-cross-origin',\n\t'strict-origin-when-cross-origin',\n\t'unsafe-url'\n]);\n\n/**\n * @see {@link https://w3c.github.io/webappsec-referrer-policy/#default-referrer-policy|default referrer policy}\n */\nexport const DEFAULT_REFERRER_POLICY = 'strict-origin-when-cross-origin';\n\n/**\n * @see {@link https://w3c.github.io/webappsec-referrer-policy/#referrer-policies|Referrer Policy §3. Referrer Policies}\n * @param {string} referrerPolicy\n * @returns {string} referrerPolicy\n */\nexport function validateReferrerPolicy(referrerPolicy) {\n\tif (!ReferrerPolicy.has(referrerPolicy)) {\n\t\tthrow new TypeError(`Invalid referrerPolicy: ${referrerPolicy}`);\n\t}\n\n\treturn referrerPolicy;\n}\n\n/**\n * @see {@link https://w3c.github.io/webappsec-secure-contexts/#is-origin-trustworthy|Referrer Policy §3.2. Is origin potentially trustworthy?}\n * @param {external:URL} url\n * @returns `true`: \"Potentially Trustworthy\", `false`: \"Not Trustworthy\"\n */\nexport function isOriginPotentiallyTrustworthy(url) {\n\t// 1. If origin is an opaque origin, return \"Not Trustworthy\".\n\t// Not applicable\n\n\t// 2. Assert: origin is a tuple origin.\n\t// Not for implementations\n\n\t// 3. If origin's scheme is either \"https\" or \"wss\", return \"Potentially Trustworthy\".\n\tif (/^(http|ws)s:$/.test(url.protocol)) {\n\t\treturn true;\n\t}\n\n\t// 4. If origin's host component matches one of the CIDR notations 127.0.0.0/8 or ::1/128 [RFC4632], return \"Potentially Trustworthy\".\n\tconst hostIp = url.host.replace(/(^\\[)|(]$)/g, '');\n\tconst hostIPVersion = isIP(hostIp);\n\n\tif (hostIPVersion === 4 && /^127\\./.test(hostIp)) {\n\t\treturn true;\n\t}\n\n\tif (hostIPVersion === 6 && /^(((0+:){7})|(::(0+:){0,6}))0*1$/.test(hostIp)) {\n\t\treturn true;\n\t}\n\n\t// 5. If origin's host component is \"localhost\" or falls within \".localhost\", and the user agent conforms to the name resolution rules in [let-localhost-be-localhost], return \"Potentially Trustworthy\".\n\t// We are returning FALSE here because we cannot ensure conformance to\n\t// let-localhost-be-loalhost (https://tools.ietf.org/html/draft-west-let-localhost-be-localhost)\n\tif (url.host === 'localhost' || url.host.endsWith('.localhost')) {\n\t\treturn false;\n\t}\n\n\t// 6. If origin's scheme component is file, return \"Potentially Trustworthy\".\n\tif (url.protocol === 'file:') {\n\t\treturn true;\n\t}\n\n\t// 7. If origin's scheme component is one which the user agent considers to be authenticated, return \"Potentially Trustworthy\".\n\t// Not supported\n\n\t// 8. If origin has been configured as a trustworthy origin, return \"Potentially Trustworthy\".\n\t// Not supported\n\n\t// 9. Return \"Not Trustworthy\".\n\treturn false;\n}\n\n/**\n * @see {@link https://w3c.github.io/webappsec-secure-contexts/#is-url-trustworthy|Referrer Policy §3.3. Is url potentially trustworthy?}\n * @param {external:URL} url\n * @returns `true`: \"Potentially Trustworthy\", `false`: \"Not Trustworthy\"\n */\nexport function isUrlPotentiallyTrustworthy(url) {\n\t// 1. If url is \"about:blank\" or \"about:srcdoc\", return \"Potentially Trustworthy\".\n\tif (/^about:(blank|srcdoc)$/.test(url)) {\n\t\treturn true;\n\t}\n\n\t// 2. If url's scheme is \"data\", return \"Potentially Trustworthy\".\n\tif (url.protocol === 'data:') {\n\t\treturn true;\n\t}\n\n\t// Note: The origin of blob: and filesystem: URLs is the origin of the context in which they were\n\t// created. Therefore, blobs created in a trustworthy origin will themselves be potentially\n\t// trustworthy.\n\tif (/^(blob|filesystem):$/.test(url.protocol)) {\n\t\treturn true;\n\t}\n\n\t// 3. Return the result of executing §3.2 Is origin potentially trustworthy? on url's origin.\n\treturn isOriginPotentiallyTrustworthy(url);\n}\n\n/**\n * Modifies the referrerURL to enforce any extra security policy considerations.\n * @see {@link https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer|Referrer Policy §8.3. Determine request's Referrer}, step 7\n * @callback module:utils/referrer~referrerURLCallback\n * @param {external:URL} referrerURL\n * @returns {external:URL} modified referrerURL\n */\n\n/**\n * Modifies the referrerOrigin to enforce any extra security policy considerations.\n * @see {@link https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer|Referrer Policy §8.3. Determine request's Referrer}, step 7\n * @callback module:utils/referrer~referrerOriginCallback\n * @param {external:URL} referrerOrigin\n * @returns {external:URL} modified referrerOrigin\n */\n\n/**\n * @see {@link https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer|Referrer Policy §8.3. Determine request's Referrer}\n * @param {Request} request\n * @param {object} o\n * @param {module:utils/referrer~referrerURLCallback} o.referrerURLCallback\n * @param {module:utils/referrer~referrerOriginCallback} o.referrerOriginCallback\n * @returns {external:URL} Request's referrer\n */\nexport function determineRequestsReferrer(request, {referrerURLCallback, referrerOriginCallback} = {}) {\n\t// There are 2 notes in the specification about invalid pre-conditions. We return null, here, for\n\t// these cases:\n\t// > Note: If request's referrer is \"no-referrer\", Fetch will not call into this algorithm.\n\t// > Note: If request's referrer policy is the empty string, Fetch will not call into this\n\t// > algorithm.\n\tif (request.referrer === 'no-referrer' || request.referrerPolicy === '') {\n\t\treturn null;\n\t}\n\n\t// 1. Let policy be request's associated referrer policy.\n\tconst policy = request.referrerPolicy;\n\n\t// 2. Let environment be request's client.\n\t// not applicable to node.js\n\n\t// 3. Switch on request's referrer:\n\tif (request.referrer === 'about:client') {\n\t\treturn 'no-referrer';\n\t}\n\n\t// \"a URL\": Let referrerSource be request's referrer.\n\tconst referrerSource = request.referrer;\n\n\t// 4. Let request's referrerURL be the result of stripping referrerSource for use as a referrer.\n\tlet referrerURL = stripURLForUseAsAReferrer(referrerSource);\n\n\t// 5. Let referrerOrigin be the result of stripping referrerSource for use as a referrer, with the\n\t// origin-only flag set to true.\n\tlet referrerOrigin = stripURLForUseAsAReferrer(referrerSource, true);\n\n\t// 6. If the result of serializing referrerURL is a string whose length is greater than 4096, set\n\t// referrerURL to referrerOrigin.\n\tif (referrerURL.toString().length > 4096) {\n\t\treferrerURL = referrerOrigin;\n\t}\n\n\t// 7. The user agent MAY alter referrerURL or referrerOrigin at this point to enforce arbitrary\n\t// policy considerations in the interests of minimizing data leakage. For example, the user\n\t// agent could strip the URL down to an origin, modify its host, replace it with an empty\n\t// string, etc.\n\tif (referrerURLCallback) {\n\t\treferrerURL = referrerURLCallback(referrerURL);\n\t}\n\n\tif (referrerOriginCallback) {\n\t\treferrerOrigin = referrerOriginCallback(referrerOrigin);\n\t}\n\n\t// 8.Execute the statements corresponding to the value of policy:\n\tconst currentURL = new URL(request.url);\n\n\tswitch (policy) {\n\t\tcase 'no-referrer':\n\t\t\treturn 'no-referrer';\n\n\t\tcase 'origin':\n\t\t\treturn referrerOrigin;\n\n\t\tcase 'unsafe-url':\n\t\t\treturn referrerURL;\n\n\t\tcase 'strict-origin':\n\t\t\t// 1. If referrerURL is a potentially trustworthy URL and request's current URL is not a\n\t\t\t// potentially trustworthy URL, then return no referrer.\n\t\t\tif (isUrlPotentiallyTrustworthy(referrerURL) && !isUrlPotentiallyTrustworthy(currentURL)) {\n\t\t\t\treturn 'no-referrer';\n\t\t\t}\n\n\t\t\t// 2. Return referrerOrigin.\n\t\t\treturn referrerOrigin.toString();\n\n\t\tcase 'strict-origin-when-cross-origin':\n\t\t\t// 1. If the origin of referrerURL and the origin of request's current URL are the same, then\n\t\t\t// return referrerURL.\n\t\t\tif (referrerURL.origin === currentURL.origin) {\n\t\t\t\treturn referrerURL;\n\t\t\t}\n\n\t\t\t// 2. If referrerURL is a potentially trustworthy URL and request's current URL is not a\n\t\t\t// potentially trustworthy URL, then return no referrer.\n\t\t\tif (isUrlPotentiallyTrustworthy(referrerURL) && !isUrlPotentiallyTrustworthy(currentURL)) {\n\t\t\t\treturn 'no-referrer';\n\t\t\t}\n\n\t\t\t// 3. Return referrerOrigin.\n\t\t\treturn referrerOrigin;\n\n\t\tcase 'same-origin':\n\t\t\t// 1. If the origin of referrerURL and the origin of request's current URL are the same, then\n\t\t\t// return referrerURL.\n\t\t\tif (referrerURL.origin === currentURL.origin) {\n\t\t\t\treturn referrerURL;\n\t\t\t}\n\n\t\t\t// 2. Return no referrer.\n\t\t\treturn 'no-referrer';\n\n\t\tcase 'origin-when-cross-origin':\n\t\t\t// 1. If the origin of referrerURL and the origin of request's current URL are the same, then\n\t\t\t// return referrerURL.\n\t\t\tif (referrerURL.origin === currentURL.origin) {\n\t\t\t\treturn referrerURL;\n\t\t\t}\n\n\t\t\t// Return referrerOrigin.\n\t\t\treturn referrerOrigin;\n\n\t\tcase 'no-referrer-when-downgrade':\n\t\t\t// 1. If referrerURL is a potentially trustworthy URL and request's current URL is not a\n\t\t\t// potentially trustworthy URL, then return no referrer.\n\t\t\tif (isUrlPotentiallyTrustworthy(referrerURL) && !isUrlPotentiallyTrustworthy(currentURL)) {\n\t\t\t\treturn 'no-referrer';\n\t\t\t}\n\n\t\t\t// 2. Return referrerURL.\n\t\t\treturn referrerURL;\n\n\t\tdefault:\n\t\t\tthrow new TypeError(`Invalid referrerPolicy: ${policy}`);\n\t}\n}\n\n/**\n * @see {@link https://w3c.github.io/webappsec-referrer-policy/#parse-referrer-policy-from-header|Referrer Policy §8.1. Parse a referrer policy from a Referrer-Policy header}\n * @param {Headers} headers Response headers\n * @returns {string} policy\n */\nexport function parseReferrerPolicyFromHeader(headers) {\n\t// 1. Let policy-tokens be the result of extracting header list values given `Referrer-Policy`\n\t// and response’s header list.\n\tconst policyTokens = (headers.get('referrer-policy') || '').split(/[,\\s]+/);\n\n\t// 2. Let policy be the empty string.\n\tlet policy = '';\n\n\t// 3. For each token in policy-tokens, if token is a referrer policy and token is not the empty\n\t// string, then set policy to token.\n\t// Note: This algorithm loops over multiple policy values to allow deployment of new policy\n\t// values with fallbacks for older user agents, as described in § 11.1 Unknown Policy Values.\n\tfor (const token of policyTokens) {\n\t\tif (token && ReferrerPolicy.has(token)) {\n\t\t\tpolicy = token;\n\t\t}\n\t}\n\n\t// 4. Return policy.\n\treturn policy;\n}\n","/**\n * Request.js\n *\n * Request class contains server only options\n *\n * All spec algorithm step numbers are based on https://fetch.spec.whatwg.org/commit-snapshots/ae716822cb3a61843226cd090eefc6589446c1d2/.\n */\n\nimport {format as formatUrl} from 'node:url';\nimport {deprecate} from 'node:util';\nimport Headers from './headers.js';\nimport Body, {clone, extractContentType, getTotalBytes} from './body.js';\nimport {isAbortSignal} from './utils/is.js';\nimport {getSearch} from './utils/get-search.js';\nimport {\n\tvalidateReferrerPolicy, determineRequestsReferrer, DEFAULT_REFERRER_POLICY\n} from './utils/referrer.js';\n\nconst INTERNALS = Symbol('Request internals');\n\n/**\n * Check if `obj` is an instance of Request.\n *\n * @param {*} object\n * @return {boolean}\n */\nconst isRequest = object => {\n\treturn (\n\t\ttypeof object === 'object' &&\n\t\ttypeof object[INTERNALS] === 'object'\n\t);\n};\n\nconst doBadDataWarn = deprecate(() => {},\n\t'.data is not a valid RequestInit property, use .body instead',\n\t'https://github.com/node-fetch/node-fetch/issues/1000 (request)');\n\n/**\n * Request class\n *\n * Ref: https://fetch.spec.whatwg.org/#request-class\n *\n * @param Mixed input Url or Request instance\n * @param Object init Custom options\n * @return Void\n */\nexport default class Request extends Body {\n\tconstructor(input, init = {}) {\n\t\tlet parsedURL;\n\n\t\t// Normalize input and force URL to be encoded as UTF-8 (https://github.com/node-fetch/node-fetch/issues/245)\n\t\tif (isRequest(input)) {\n\t\t\tparsedURL = new URL(input.url);\n\t\t} else {\n\t\t\tparsedURL = new URL(input);\n\t\t\tinput = {};\n\t\t}\n\n\t\tif (parsedURL.username !== '' || parsedURL.password !== '') {\n\t\t\tthrow new TypeError(`${parsedURL} is an url with embedded credentials.`);\n\t\t}\n\n\t\tlet method = init.method || input.method || 'GET';\n\t\tif (/^(delete|get|head|options|post|put)$/i.test(method)) {\n\t\t\tmethod = method.toUpperCase();\n\t\t}\n\n\t\tif (!isRequest(init) && 'data' in init) {\n\t\t\tdoBadDataWarn();\n\t\t}\n\n\t\t// eslint-disable-next-line no-eq-null, eqeqeq\n\t\tif ((init.body != null || (isRequest(input) && input.body !== null)) &&\n\t\t\t(method === 'GET' || method === 'HEAD')) {\n\t\t\tthrow new TypeError('Request with GET/HEAD method cannot have body');\n\t\t}\n\n\t\tconst inputBody = init.body ?\n\t\t\tinit.body :\n\t\t\t(isRequest(input) && input.body !== null ?\n\t\t\t\tclone(input) :\n\t\t\t\tnull);\n\n\t\tsuper(inputBody, {\n\t\t\tsize: init.size || input.size || 0\n\t\t});\n\n\t\tconst headers = new Headers(init.headers || input.headers || {});\n\n\t\tif (inputBody !== null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(inputBody, this);\n\t\t\tif (contentType) {\n\t\t\t\theaders.set('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tlet signal = isRequest(input) ?\n\t\t\tinput.signal :\n\t\t\tnull;\n\t\tif ('signal' in init) {\n\t\t\tsignal = init.signal;\n\t\t}\n\n\t\t// eslint-disable-next-line no-eq-null, eqeqeq\n\t\tif (signal != null && !isAbortSignal(signal)) {\n\t\t\tthrow new TypeError('Expected signal to be an instanceof AbortSignal or EventTarget');\n\t\t}\n\n\t\t// §5.4, Request constructor steps, step 15.1\n\t\t// eslint-disable-next-line no-eq-null, eqeqeq\n\t\tlet referrer = init.referrer == null ? input.referrer : init.referrer;\n\t\tif (referrer === '') {\n\t\t\t// §5.4, Request constructor steps, step 15.2\n\t\t\treferrer = 'no-referrer';\n\t\t} else if (referrer) {\n\t\t\t// §5.4, Request constructor steps, step 15.3.1, 15.3.2\n\t\t\tconst parsedReferrer = new URL(referrer);\n\t\t\t// §5.4, Request constructor steps, step 15.3.3, 15.3.4\n\t\t\treferrer = /^about:(\\/\\/)?client$/.test(parsedReferrer) ? 'client' : parsedReferrer;\n\t\t} else {\n\t\t\treferrer = undefined;\n\t\t}\n\n\t\tthis[INTERNALS] = {\n\t\t\tmethod,\n\t\t\tredirect: init.redirect || input.redirect || 'follow',\n\t\t\theaders,\n\t\t\tparsedURL,\n\t\t\tsignal,\n\t\t\treferrer\n\t\t};\n\n\t\t// Node-fetch-only options\n\t\tthis.follow = init.follow === undefined ? (input.follow === undefined ? 20 : input.follow) : init.follow;\n\t\tthis.compress = init.compress === undefined ? (input.compress === undefined ? true : input.compress) : init.compress;\n\t\tthis.counter = init.counter || input.counter || 0;\n\t\tthis.agent = init.agent || input.agent;\n\t\tthis.highWaterMark = init.highWaterMark || input.highWaterMark || 16384;\n\t\tthis.insecureHTTPParser = init.insecureHTTPParser || input.insecureHTTPParser || false;\n\n\t\t// §5.4, Request constructor steps, step 16.\n\t\t// Default is empty string per https://fetch.spec.whatwg.org/#concept-request-referrer-policy\n\t\tthis.referrerPolicy = init.referrerPolicy || input.referrerPolicy || '';\n\t}\n\n\t/** @returns {string} */\n\tget method() {\n\t\treturn this[INTERNALS].method;\n\t}\n\n\t/** @returns {string} */\n\tget url() {\n\t\treturn formatUrl(this[INTERNALS].parsedURL);\n\t}\n\n\t/** @returns {Headers} */\n\tget headers() {\n\t\treturn this[INTERNALS].headers;\n\t}\n\n\tget redirect() {\n\t\treturn this[INTERNALS].redirect;\n\t}\n\n\t/** @returns {AbortSignal} */\n\tget signal() {\n\t\treturn this[INTERNALS].signal;\n\t}\n\n\t// https://fetch.spec.whatwg.org/#dom-request-referrer\n\tget referrer() {\n\t\tif (this[INTERNALS].referrer === 'no-referrer') {\n\t\t\treturn '';\n\t\t}\n\n\t\tif (this[INTERNALS].referrer === 'client') {\n\t\t\treturn 'about:client';\n\t\t}\n\n\t\tif (this[INTERNALS].referrer) {\n\t\t\treturn this[INTERNALS].referrer.toString();\n\t\t}\n\n\t\treturn undefined;\n\t}\n\n\tget referrerPolicy() {\n\t\treturn this[INTERNALS].referrerPolicy;\n\t}\n\n\tset referrerPolicy(referrerPolicy) {\n\t\tthis[INTERNALS].referrerPolicy = validateReferrerPolicy(referrerPolicy);\n\t}\n\n\t/**\n\t * Clone this request\n\t *\n\t * @return Request\n\t */\n\tclone() {\n\t\treturn new Request(this);\n\t}\n\n\tget [Symbol.toStringTag]() {\n\t\treturn 'Request';\n\t}\n}\n\nObject.defineProperties(Request.prototype, {\n\tmethod: {enumerable: true},\n\turl: {enumerable: true},\n\theaders: {enumerable: true},\n\tredirect: {enumerable: true},\n\tclone: {enumerable: true},\n\tsignal: {enumerable: true},\n\treferrer: {enumerable: true},\n\treferrerPolicy: {enumerable: true}\n});\n\n/**\n * Convert a Request to Node.js http request options.\n *\n * @param {Request} request - A Request instance\n * @return The options object to be passed to http.request\n */\nexport const getNodeRequestOptions = request => {\n\tconst {parsedURL} = request[INTERNALS];\n\tconst headers = new Headers(request[INTERNALS].headers);\n\n\t// Fetch step 1.3\n\tif (!headers.has('Accept')) {\n\t\theaders.set('Accept', '*/*');\n\t}\n\n\t// HTTP-network-or-cache fetch steps 2.4-2.7\n\tlet contentLengthValue = null;\n\tif (request.body === null && /^(post|put)$/i.test(request.method)) {\n\t\tcontentLengthValue = '0';\n\t}\n\n\tif (request.body !== null) {\n\t\tconst totalBytes = getTotalBytes(request);\n\t\t// Set Content-Length if totalBytes is a number (that is not NaN)\n\t\tif (typeof totalBytes === 'number' && !Number.isNaN(totalBytes)) {\n\t\t\tcontentLengthValue = String(totalBytes);\n\t\t}\n\t}\n\n\tif (contentLengthValue) {\n\t\theaders.set('Content-Length', contentLengthValue);\n\t}\n\n\t// 4.1. Main fetch, step 2.6\n\t// > If request's referrer policy is the empty string, then set request's referrer policy to the\n\t// > default referrer policy.\n\tif (request.referrerPolicy === '') {\n\t\trequest.referrerPolicy = DEFAULT_REFERRER_POLICY;\n\t}\n\n\t// 4.1. Main fetch, step 2.7\n\t// > If request's referrer is not \"no-referrer\", set request's referrer to the result of invoking\n\t// > determine request's referrer.\n\tif (request.referrer && request.referrer !== 'no-referrer') {\n\t\trequest[INTERNALS].referrer = determineRequestsReferrer(request);\n\t} else {\n\t\trequest[INTERNALS].referrer = 'no-referrer';\n\t}\n\n\t// 4.5. HTTP-network-or-cache fetch, step 6.9\n\t// > If httpRequest's referrer is a URL, then append `Referer`/httpRequest's referrer, serialized\n\t// > and isomorphic encoded, to httpRequest's header list.\n\tif (request[INTERNALS].referrer instanceof URL) {\n\t\theaders.set('Referer', request.referrer);\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.11\n\tif (!headers.has('User-Agent')) {\n\t\theaders.set('User-Agent', 'node-fetch');\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.15\n\tif (request.compress && !headers.has('Accept-Encoding')) {\n\t\theaders.set('Accept-Encoding', 'gzip, deflate, br');\n\t}\n\n\tlet {agent} = request;\n\tif (typeof agent === 'function') {\n\t\tagent = agent(parsedURL);\n\t}\n\n\tif (!headers.has('Connection') && !agent) {\n\t\theaders.set('Connection', 'close');\n\t}\n\n\t// HTTP-network fetch step 4.2\n\t// chunked encoding is handled by Node.js\n\n\tconst search = getSearch(parsedURL);\n\n\t// Pass the full URL directly to request(), but overwrite the following\n\t// options:\n\tconst options = {\n\t\t// Overwrite search to retain trailing ? (issue #776)\n\t\tpath: parsedURL.pathname + search,\n\t\t// The following options are not expressed in the URL\n\t\tmethod: request.method,\n\t\theaders: headers[Symbol.for('nodejs.util.inspect.custom')](),\n\t\tinsecureHTTPParser: request.insecureHTTPParser,\n\t\tagent\n\t};\n\n\treturn {\n\t\t/** @type {URL} */\n\t\tparsedURL,\n\t\toptions\n\t};\n};\n","import {FetchBaseError} from './base.js';\n\n/**\n * AbortError interface for cancelled requests\n */\nexport class AbortError extends FetchBaseError {\n\tconstructor(message, type = 'aborted') {\n\t\tsuper(message, type);\n\t}\n}\n","/**\n * Index.js\n *\n * a request API compatible with window.fetch\n *\n * All spec algorithm step numbers are based on https://fetch.spec.whatwg.org/commit-snapshots/ae716822cb3a61843226cd090eefc6589446c1d2/.\n */\n\nimport http from 'node:http';\nimport https from 'node:https';\nimport zlib from 'node:zlib';\nimport Stream, {PassThrough, pipeline as pump} from 'node:stream';\nimport {Buffer} from 'node:buffer';\n\nimport dataUriToBuffer from 'data-uri-to-buffer';\n\nimport {writeToStream, clone} from './body.js';\nimport Response from './response.js';\nimport Headers, {fromRawHeaders} from './headers.js';\nimport Request, {getNodeRequestOptions} from './request.js';\nimport {FetchError} from './errors/fetch-error.js';\nimport {AbortError} from './errors/abort-error.js';\nimport {isRedirect} from './utils/is-redirect.js';\nimport {FormData} from 'formdata-polyfill/esm.min.js';\nimport {isDomainOrSubdomain, isSameProtocol} from './utils/is.js';\nimport {parseReferrerPolicyFromHeader} from './utils/referrer.js';\nimport {\n\tBlob,\n\tFile,\n\tfileFromSync,\n\tfileFrom,\n\tblobFromSync,\n\tblobFrom\n} from 'fetch-blob/from.js';\n\nexport {FormData, Headers, Request, Response, FetchError, AbortError, isRedirect};\nexport {Blob, File, fileFromSync, fileFrom, blobFromSync, blobFrom};\n\nconst supportedSchemas = new Set(['data:', 'http:', 'https:']);\n\n/**\n * Fetch function\n *\n * @param {string | URL | import('./request').default} url - Absolute url or Request instance\n * @param {*} [options_] - Fetch options\n * @return {Promise}\n */\nexport default async function fetch(url, options_) {\n\treturn new Promise((resolve, reject) => {\n\t\t// Build request object\n\t\tconst request = new Request(url, options_);\n\t\tconst {parsedURL, options} = getNodeRequestOptions(request);\n\t\tif (!supportedSchemas.has(parsedURL.protocol)) {\n\t\t\tthrow new TypeError(`node-fetch cannot load ${url}. URL scheme \"${parsedURL.protocol.replace(/:$/, '')}\" is not supported.`);\n\t\t}\n\n\t\tif (parsedURL.protocol === 'data:') {\n\t\t\tconst data = dataUriToBuffer(request.url);\n\t\t\tconst response = new Response(data, {headers: {'Content-Type': data.typeFull}});\n\t\t\tresolve(response);\n\t\t\treturn;\n\t\t}\n\n\t\t// Wrap http.request into fetch\n\t\tconst send = (parsedURL.protocol === 'https:' ? https : http).request;\n\t\tconst {signal} = request;\n\t\tlet response = null;\n\n\t\tconst abort = () => {\n\t\t\tconst error = new AbortError('The operation was aborted.');\n\t\t\treject(error);\n\t\t\tif (request.body && request.body instanceof Stream.Readable) {\n\t\t\t\trequest.body.destroy(error);\n\t\t\t}\n\n\t\t\tif (!response || !response.body) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tresponse.body.emit('error', error);\n\t\t};\n\n\t\tif (signal && signal.aborted) {\n\t\t\tabort();\n\t\t\treturn;\n\t\t}\n\n\t\tconst abortAndFinalize = () => {\n\t\t\tabort();\n\t\t\tfinalize();\n\t\t};\n\n\t\t// Send request\n\t\tconst request_ = send(parsedURL.toString(), options);\n\n\t\tif (signal) {\n\t\t\tsignal.addEventListener('abort', abortAndFinalize);\n\t\t}\n\n\t\tconst finalize = () => {\n\t\t\trequest_.abort();\n\t\t\tif (signal) {\n\t\t\t\tsignal.removeEventListener('abort', abortAndFinalize);\n\t\t\t}\n\t\t};\n\n\t\trequest_.on('error', error => {\n\t\t\treject(new FetchError(`request to ${request.url} failed, reason: ${error.message}`, 'system', error));\n\t\t\tfinalize();\n\t\t});\n\n\t\tfixResponseChunkedTransferBadEnding(request_, error => {\n\t\t\tif (response && response.body) {\n\t\t\t\tresponse.body.destroy(error);\n\t\t\t}\n\t\t});\n\n\t\t/* c8 ignore next 18 */\n\t\tif (process.version < 'v14') {\n\t\t\t// Before Node.js 14, pipeline() does not fully support async iterators and does not always\n\t\t\t// properly handle when the socket close/end events are out of order.\n\t\t\trequest_.on('socket', s => {\n\t\t\t\tlet endedWithEventsCount;\n\t\t\t\ts.prependListener('end', () => {\n\t\t\t\t\tendedWithEventsCount = s._eventsCount;\n\t\t\t\t});\n\t\t\t\ts.prependListener('close', hadError => {\n\t\t\t\t\t// if end happened before close but the socket didn't emit an error, do it now\n\t\t\t\t\tif (response && endedWithEventsCount < s._eventsCount && !hadError) {\n\t\t\t\t\t\tconst error = new Error('Premature close');\n\t\t\t\t\t\terror.code = 'ERR_STREAM_PREMATURE_CLOSE';\n\t\t\t\t\t\tresponse.body.emit('error', error);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t});\n\t\t}\n\n\t\trequest_.on('response', response_ => {\n\t\t\trequest_.setTimeout(0);\n\t\t\tconst headers = fromRawHeaders(response_.rawHeaders);\n\n\t\t\t// HTTP fetch step 5\n\t\t\tif (isRedirect(response_.statusCode)) {\n\t\t\t\t// HTTP fetch step 5.2\n\t\t\t\tconst location = headers.get('Location');\n\n\t\t\t\t// HTTP fetch step 5.3\n\t\t\t\tlet locationURL = null;\n\t\t\t\ttry {\n\t\t\t\t\tlocationURL = location === null ? null : new URL(location, request.url);\n\t\t\t\t} catch {\n\t\t\t\t\t// error here can only be invalid URL in Location: header\n\t\t\t\t\t// do not throw when options.redirect == manual\n\t\t\t\t\t// let the user extract the errorneous redirect URL\n\t\t\t\t\tif (request.redirect !== 'manual') {\n\t\t\t\t\t\treject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// HTTP fetch step 5.5\n\t\t\t\tswitch (request.redirect) {\n\t\t\t\t\tcase 'error':\n\t\t\t\t\t\treject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\tcase 'manual':\n\t\t\t\t\t\t// Nothing to do\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 'follow': {\n\t\t\t\t\t\t// HTTP-redirect fetch step 2\n\t\t\t\t\t\tif (locationURL === null) {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 5\n\t\t\t\t\t\tif (request.counter >= request.follow) {\n\t\t\t\t\t\t\treject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 6 (counter increment)\n\t\t\t\t\t\t// Create a new Request object.\n\t\t\t\t\t\tconst requestOptions = {\n\t\t\t\t\t\t\theaders: new Headers(request.headers),\n\t\t\t\t\t\t\tfollow: request.follow,\n\t\t\t\t\t\t\tcounter: request.counter + 1,\n\t\t\t\t\t\t\tagent: request.agent,\n\t\t\t\t\t\t\tcompress: request.compress,\n\t\t\t\t\t\t\tmethod: request.method,\n\t\t\t\t\t\t\tbody: clone(request),\n\t\t\t\t\t\t\tsignal: request.signal,\n\t\t\t\t\t\t\tsize: request.size,\n\t\t\t\t\t\t\treferrer: request.referrer,\n\t\t\t\t\t\t\treferrerPolicy: request.referrerPolicy\n\t\t\t\t\t\t};\n\n\t\t\t\t\t\t// when forwarding sensitive headers like \"Authorization\",\n\t\t\t\t\t\t// \"WWW-Authenticate\", and \"Cookie\" to untrusted targets,\n\t\t\t\t\t\t// headers will be ignored when following a redirect to a domain\n\t\t\t\t\t\t// that is not a subdomain match or exact match of the initial domain.\n\t\t\t\t\t\t// For example, a redirect from \"foo.com\" to either \"foo.com\" or \"sub.foo.com\"\n\t\t\t\t\t\t// will forward the sensitive headers, but a redirect to \"bar.com\" will not.\n\t\t\t\t\t\t// headers will also be ignored when following a redirect to a domain using\n\t\t\t\t\t\t// a different protocol. For example, a redirect from \"https://foo.com\" to \"http://foo.com\"\n\t\t\t\t\t\t// will not forward the sensitive headers\n\t\t\t\t\t\tif (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) {\n\t\t\t\t\t\t\tfor (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) {\n\t\t\t\t\t\t\t\trequestOptions.headers.delete(name);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 9\n\t\t\t\t\t\tif (response_.statusCode !== 303 && request.body && options_.body instanceof Stream.Readable) {\n\t\t\t\t\t\t\treject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 11\n\t\t\t\t\t\tif (response_.statusCode === 303 || ((response_.statusCode === 301 || response_.statusCode === 302) && request.method === 'POST')) {\n\t\t\t\t\t\t\trequestOptions.method = 'GET';\n\t\t\t\t\t\t\trequestOptions.body = undefined;\n\t\t\t\t\t\t\trequestOptions.headers.delete('content-length');\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 14\n\t\t\t\t\t\tconst responseReferrerPolicy = parseReferrerPolicyFromHeader(headers);\n\t\t\t\t\t\tif (responseReferrerPolicy) {\n\t\t\t\t\t\t\trequestOptions.referrerPolicy = responseReferrerPolicy;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 15\n\t\t\t\t\t\tresolve(fetch(new Request(locationURL, requestOptions)));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\t}\n\n\t\t\t\t\tdefault:\n\t\t\t\t\t\treturn reject(new TypeError(`Redirect option '${request.redirect}' is not a valid value of RequestRedirect`));\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// Prepare response\n\t\t\tif (signal) {\n\t\t\t\tresponse_.once('end', () => {\n\t\t\t\t\tsignal.removeEventListener('abort', abortAndFinalize);\n\t\t\t\t});\n\t\t\t}\n\n\t\t\tlet body = pump(response_, new PassThrough(), error => {\n\t\t\t\tif (error) {\n\t\t\t\t\treject(error);\n\t\t\t\t}\n\t\t\t});\n\t\t\t// see https://github.com/nodejs/node/pull/29376\n\t\t\t/* c8 ignore next 3 */\n\t\t\tif (process.version < 'v12.10') {\n\t\t\t\tresponse_.on('aborted', abortAndFinalize);\n\t\t\t}\n\n\t\t\tconst responseOptions = {\n\t\t\t\turl: request.url,\n\t\t\t\tstatus: response_.statusCode,\n\t\t\t\tstatusText: response_.statusMessage,\n\t\t\t\theaders,\n\t\t\t\tsize: request.size,\n\t\t\t\tcounter: request.counter,\n\t\t\t\thighWaterMark: request.highWaterMark\n\t\t\t};\n\n\t\t\t// HTTP-network fetch step 12.1.1.3\n\t\t\tconst codings = headers.get('Content-Encoding');\n\n\t\t\t// HTTP-network fetch step 12.1.1.4: handle content codings\n\n\t\t\t// in following scenarios we ignore compression support\n\t\t\t// 1. compression support is disabled\n\t\t\t// 2. HEAD request\n\t\t\t// 3. no Content-Encoding header\n\t\t\t// 4. no content response (204)\n\t\t\t// 5. content not modified response (304)\n\t\t\tif (!request.compress || request.method === 'HEAD' || codings === null || response_.statusCode === 204 || response_.statusCode === 304) {\n\t\t\t\tresponse = new Response(body, responseOptions);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// For Node v6+\n\t\t\t// Be less strict when decoding compressed responses, since sometimes\n\t\t\t// servers send slightly invalid responses that are still accepted\n\t\t\t// by common browsers.\n\t\t\t// Always using Z_SYNC_FLUSH is what cURL does.\n\t\t\tconst zlibOptions = {\n\t\t\t\tflush: zlib.Z_SYNC_FLUSH,\n\t\t\t\tfinishFlush: zlib.Z_SYNC_FLUSH\n\t\t\t};\n\n\t\t\t// For gzip\n\t\t\tif (codings === 'gzip' || codings === 'x-gzip') {\n\t\t\t\tbody = pump(body, zlib.createGunzip(zlibOptions), error => {\n\t\t\t\t\tif (error) {\n\t\t\t\t\t\treject(error);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\tresponse = new Response(body, responseOptions);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// For deflate\n\t\t\tif (codings === 'deflate' || codings === 'x-deflate') {\n\t\t\t\t// Handle the infamous raw deflate response from old servers\n\t\t\t\t// a hack for old IIS and Apache servers\n\t\t\t\tconst raw = pump(response_, new PassThrough(), error => {\n\t\t\t\t\tif (error) {\n\t\t\t\t\t\treject(error);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\traw.once('data', chunk => {\n\t\t\t\t\t// See http://stackoverflow.com/questions/37519828\n\t\t\t\t\tif ((chunk[0] & 0x0F) === 0x08) {\n\t\t\t\t\t\tbody = pump(body, zlib.createInflate(), error => {\n\t\t\t\t\t\t\tif (error) {\n\t\t\t\t\t\t\t\treject(error);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t});\n\t\t\t\t\t} else {\n\t\t\t\t\t\tbody = pump(body, zlib.createInflateRaw(), error => {\n\t\t\t\t\t\t\tif (error) {\n\t\t\t\t\t\t\t\treject(error);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t});\n\t\t\t\t\t}\n\n\t\t\t\t\tresponse = new Response(body, responseOptions);\n\t\t\t\t\tresolve(response);\n\t\t\t\t});\n\t\t\t\traw.once('end', () => {\n\t\t\t\t\t// Some old IIS servers return zero-length OK deflate responses, so\n\t\t\t\t\t// 'data' is never emitted. See https://github.com/node-fetch/node-fetch/pull/903\n\t\t\t\t\tif (!response) {\n\t\t\t\t\t\tresponse = new Response(body, responseOptions);\n\t\t\t\t\t\tresolve(response);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// For br\n\t\t\tif (codings === 'br') {\n\t\t\t\tbody = pump(body, zlib.createBrotliDecompress(), error => {\n\t\t\t\t\tif (error) {\n\t\t\t\t\t\treject(error);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\tresponse = new Response(body, responseOptions);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// Otherwise, use response as-is\n\t\t\tresponse = new Response(body, responseOptions);\n\t\t\tresolve(response);\n\t\t});\n\n\t\t// eslint-disable-next-line promise/prefer-await-to-then\n\t\twriteToStream(request_, request).catch(reject);\n\t});\n}\n\nfunction fixResponseChunkedTransferBadEnding(request, errorCallback) {\n\tconst LAST_CHUNK = Buffer.from('0\\r\\n\\r\\n');\n\n\tlet isChunkedTransfer = false;\n\tlet properLastChunkReceived = false;\n\tlet previousChunk;\n\n\trequest.on('response', response => {\n\t\tconst {headers} = response;\n\t\tisChunkedTransfer = headers['transfer-encoding'] === 'chunked' && !headers['content-length'];\n\t});\n\n\trequest.on('socket', socket => {\n\t\tconst onSocketClose = () => {\n\t\t\tif (isChunkedTransfer && !properLastChunkReceived) {\n\t\t\t\tconst error = new Error('Premature close');\n\t\t\t\terror.code = 'ERR_STREAM_PREMATURE_CLOSE';\n\t\t\t\terrorCallback(error);\n\t\t\t}\n\t\t};\n\n\t\tconst onData = buf => {\n\t\t\tproperLastChunkReceived = Buffer.compare(buf.slice(-5), LAST_CHUNK) === 0;\n\n\t\t\t// Sometimes final 0-length chunk and end of message code are in separate packets\n\t\t\tif (!properLastChunkReceived && previousChunk) {\n\t\t\t\tproperLastChunkReceived = (\n\t\t\t\t\tBuffer.compare(previousChunk.slice(-3), LAST_CHUNK.slice(0, 3)) === 0 &&\n\t\t\t\t\tBuffer.compare(buf.slice(-2), LAST_CHUNK.slice(3)) === 0\n\t\t\t\t);\n\t\t\t}\n\n\t\t\tpreviousChunk = buf;\n\t\t};\n\n\t\tsocket.prependListener('close', onSocketClose);\n\t\tsocket.on('data', onData);\n\n\t\trequest.on('close', () => {\n\t\t\tsocket.removeListener('close', onSocketClose);\n\t\t\tsocket.removeListener('data', onData);\n\t\t});\n\t});\n}\n","// The module cache\nvar __webpack_module_cache__ = {};\n\n// The require function\nfunction __webpack_require__(moduleId) {\n\t// Check if module is in cache\n\tvar cachedModule = __webpack_module_cache__[moduleId];\n\tif (cachedModule !== undefined) {\n\t\treturn cachedModule.exports;\n\t}\n\t// Create a new module (and put it into the cache)\n\tvar module = __webpack_module_cache__[moduleId] = {\n\t\t// no module.id needed\n\t\t// no module.loaded needed\n\t\texports: {}\n\t};\n\n\t// Execute the module function\n\tvar threw = true;\n\ttry {\n\t\t__webpack_modules__[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\t\tthrew = false;\n\t} finally {\n\t\tif(threw) delete __webpack_module_cache__[moduleId];\n\t}\n\n\t// Return the exports of the module\n\treturn module.exports;\n}\n\n// expose the modules object (__webpack_modules__)\n__webpack_require__.m = __webpack_modules__;\n\n","// define getter functions for harmony exports\n__webpack_require__.d = (exports, definition) => {\n\tfor(var key in definition) {\n\t\tif(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {\n\t\t\tObject.defineProperty(exports, key, { enumerable: true, get: definition[key] });\n\t\t}\n\t}\n};","__webpack_require__.f = {};\n// This file contains only the entry chunk.\n// The chunk loading function for additional chunks\n__webpack_require__.e = (chunkId) => {\n\treturn Promise.all(Object.keys(__webpack_require__.f).reduce((promises, key) => {\n\t\t__webpack_require__.f[key](chunkId, promises);\n\t\treturn promises;\n\t}, []));\n};","// This function allow to reference async chunks\n__webpack_require__.u = (chunkId) => {\n\t// return url for filenames based on template\n\treturn \"\" + chunkId + \".index.js\";\n};","__webpack_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))","// define __esModule on exports\n__webpack_require__.r = (exports) => {\n\tif(typeof Symbol !== 'undefined' && Symbol.toStringTag) {\n\t\tObject.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });\n\t}\n\tObject.defineProperty(exports, '__esModule', { value: true });\n};","\nif (typeof __webpack_require__ !== 'undefined') __webpack_require__.ab = __dirname + \"/\";","// no baseURI\n\n// object to store loaded chunks\n// \"1\" means \"loaded\", otherwise not loaded yet\nvar installedChunks = {\n\t179: 1\n};\n\n// no on chunks loaded\n\nvar installChunk = (chunk) => {\n\tvar moreModules = chunk.modules, chunkIds = chunk.ids, runtime = chunk.runtime;\n\tfor(var moduleId in moreModules) {\n\t\tif(__webpack_require__.o(moreModules, moduleId)) {\n\t\t\t__webpack_require__.m[moduleId] = moreModules[moduleId];\n\t\t}\n\t}\n\tif(runtime) runtime(__webpack_require__);\n\tfor(var i = 0; i < chunkIds.length; i++)\n\t\tinstalledChunks[chunkIds[i]] = 1;\n\n};\n\n// require() chunk loading for javascript\n__webpack_require__.f.require = (chunkId, promises) => {\n\t// \"1\" is the signal for \"already loaded\"\n\tif(!installedChunks[chunkId]) {\n\t\tif(true) { // all chunks have JS\n\t\t\tinstallChunk(require(\"./\" + __webpack_require__.u(chunkId)));\n\t\t} else installedChunks[chunkId] = 1;\n\t}\n};\n\n// no external install chunk\n\n// no HMR\n\n// no HMR manifest","","// startup\n// Load entry module and return exports\n// This entry module is referenced by other modules so it can't be inlined\nvar __webpack_exports__ = __webpack_require__(3109);\n",""],"names":[],"sourceRoot":""} \ No newline at end of file +{"version":3,"file":"index.js","mappings":";;;;;;;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACpBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1RA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5lBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1JA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACxrBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9IA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1HA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACXA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1JA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7fA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3HA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACxmBA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACXA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACXA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACZA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACRA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3FA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACxBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5FA;;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvQA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACl6CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpBA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;;;;;;;;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AC7BA;AACA;;;;AEDA;AACA;AACA;AACA","sources":["../webpack://deploying-marker-with-issue/./lib/actions/attach-marker.js","../webpack://deploying-marker-with-issue/./lib/actions/check-marker-attached.js","../webpack://deploying-marker-with-issue/./lib/actions/check-marker-detached-or-assigned-actor.js","../webpack://deploying-marker-with-issue/./lib/actions/check-marker-detached.js","../webpack://deploying-marker-with-issue/./lib/actions/detach-marker.js","../webpack://deploying-marker-with-issue/./lib/common/env.js","../webpack://deploying-marker-with-issue/./lib/common/error.js","../webpack://deploying-marker-with-issue/./lib/common/input.js","../webpack://deploying-marker-with-issue/./lib/common/label.js","../webpack://deploying-marker-with-issue/./lib/common/messages.js","../webpack://deploying-marker-with-issue/./lib/common/validater.js","../webpack://deploying-marker-with-issue/./lib/github/common.js","../webpack://deploying-marker-with-issue/./lib/github/issue-get.js","../webpack://deploying-marker-with-issue/./lib/github/issue-update.js","../webpack://deploying-marker-with-issue/./lib/github/label-create.js","../webpack://deploying-marker-with-issue/./lib/github/label-get.js","../webpack://deploying-marker-with-issue/./lib/github/user-get.js","../webpack://deploying-marker-with-issue/./lib/main.js","../webpack://deploying-marker-with-issue/./node_modules/@actions/core/lib/command.js","../webpack://deploying-marker-with-issue/./node_modules/@actions/core/lib/core.js","../webpack://deploying-marker-with-issue/./node_modules/@actions/core/lib/file-command.js","../webpack://deploying-marker-with-issue/./node_modules/@actions/core/lib/oidc-utils.js","../webpack://deploying-marker-with-issue/./node_modules/@actions/core/lib/path-utils.js","../webpack://deploying-marker-with-issue/./node_modules/@actions/core/lib/summary.js","../webpack://deploying-marker-with-issue/./node_modules/@actions/core/lib/utils.js","../webpack://deploying-marker-with-issue/./node_modules/@actions/http-client/lib/auth.js","../webpack://deploying-marker-with-issue/./node_modules/@actions/http-client/lib/index.js","../webpack://deploying-marker-with-issue/./node_modules/@actions/http-client/lib/proxy.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/ajv.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/compile/codegen/code.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/compile/codegen/index.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/compile/codegen/scope.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/compile/errors.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/compile/index.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/compile/names.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/compile/ref_error.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/compile/resolve.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/compile/rules.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/compile/util.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/compile/validate/applicability.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/compile/validate/boolSchema.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/compile/validate/dataType.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/compile/validate/defaults.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/compile/validate/index.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/compile/validate/keyword.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/compile/validate/subschema.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/core.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/runtime/equal.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/runtime/ucs2length.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/runtime/uri.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/runtime/validation_error.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/applicator/additionalItems.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/applicator/additionalProperties.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/applicator/allOf.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/applicator/anyOf.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/applicator/contains.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/applicator/dependencies.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/applicator/if.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/applicator/index.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/applicator/items.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/applicator/items2020.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/applicator/not.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/applicator/oneOf.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/applicator/patternProperties.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/applicator/prefixItems.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/applicator/properties.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/applicator/propertyNames.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/applicator/thenElse.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/code.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/core/id.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/core/index.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/core/ref.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/discriminator/index.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/discriminator/types.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/draft7.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/format/format.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/format/index.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/metadata.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/validation/const.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/validation/enum.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/validation/index.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/validation/limitItems.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/validation/limitLength.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/validation/limitNumber.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/validation/limitProperties.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/validation/multipleOf.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/validation/pattern.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/validation/required.js","../webpack://deploying-marker-with-issue/./node_modules/ajv/dist/vocabularies/validation/uniqueItems.js","../webpack://deploying-marker-with-issue/./node_modules/fast-deep-equal/index.js","../webpack://deploying-marker-with-issue/./node_modules/json-schema-traverse/index.js","../webpack://deploying-marker-with-issue/./node_modules/tunnel/index.js","../webpack://deploying-marker-with-issue/./node_modules/tunnel/lib/tunnel.js","../webpack://deploying-marker-with-issue/./node_modules/uri-js/dist/es5/uri.all.js","../webpack://deploying-marker-with-issue/./node_modules/uuid/dist/index.js","../webpack://deploying-marker-with-issue/./node_modules/uuid/dist/md5.js","../webpack://deploying-marker-with-issue/./node_modules/uuid/dist/nil.js","../webpack://deploying-marker-with-issue/./node_modules/uuid/dist/parse.js","../webpack://deploying-marker-with-issue/./node_modules/uuid/dist/regex.js","../webpack://deploying-marker-with-issue/./node_modules/uuid/dist/rng.js","../webpack://deploying-marker-with-issue/./node_modules/uuid/dist/sha1.js","../webpack://deploying-marker-with-issue/./node_modules/uuid/dist/stringify.js","../webpack://deploying-marker-with-issue/./node_modules/uuid/dist/v1.js","../webpack://deploying-marker-with-issue/./node_modules/uuid/dist/v3.js","../webpack://deploying-marker-with-issue/./node_modules/uuid/dist/v35.js","../webpack://deploying-marker-with-issue/./node_modules/uuid/dist/v4.js","../webpack://deploying-marker-with-issue/./node_modules/uuid/dist/v5.js","../webpack://deploying-marker-with-issue/./node_modules/uuid/dist/validate.js","../webpack://deploying-marker-with-issue/./node_modules/uuid/dist/version.js","../webpack://deploying-marker-with-issue/external node-commonjs \"assert\"","../webpack://deploying-marker-with-issue/external node-commonjs \"crypto\"","../webpack://deploying-marker-with-issue/external node-commonjs \"events\"","../webpack://deploying-marker-with-issue/external node-commonjs \"fs\"","../webpack://deploying-marker-with-issue/external node-commonjs \"http\"","../webpack://deploying-marker-with-issue/external node-commonjs \"https\"","../webpack://deploying-marker-with-issue/external node-commonjs \"net\"","../webpack://deploying-marker-with-issue/external node-commonjs \"os\"","../webpack://deploying-marker-with-issue/external node-commonjs \"path\"","../webpack://deploying-marker-with-issue/external node-commonjs \"tls\"","../webpack://deploying-marker-with-issue/external node-commonjs \"util\"","../webpack://deploying-marker-with-issue/webpack/bootstrap","../webpack://deploying-marker-with-issue/webpack/runtime/compat","../webpack://deploying-marker-with-issue/webpack/before-startup","../webpack://deploying-marker-with-issue/webpack/startup","../webpack://deploying-marker-with-issue/webpack/after-startup"],"sourcesContent":["\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.attachMarker = void 0;\nconst issue_get_1 = require(\"../github/issue-get\");\nconst issue_update_1 = require(\"../github/issue-update\");\nconst label_get_1 = require(\"../github/label-get\");\nconst label_1 = require(\"../common/label\");\nconst label_create_1 = require(\"../github/label-create\");\nconst error_1 = require(\"../common/error\");\nconst messages_1 = require(\"../common/messages\");\nconst attachMarker = (input) => __awaiter(void 0, void 0, void 0, function* () {\n const { repoOwner, repoName, issueNumber, exitWithError, actorId } = input;\n const attached = yield (0, label_1.attachedMarkerOnIssue)(repoOwner, repoName, issueNumber);\n if (attached) {\n if (exitWithError) {\n (0, error_1.onError)((0, messages_1.getMessage)('error:label_already_attached'));\n }\n return;\n }\n const beforeIssueData = yield (0, issue_get_1.getIssue)({ repoOwner, repoName, issueNumber });\n const { issue } = beforeIssueData.data.organization.repository;\n const labels = (yield (0, label_get_1.getLabels)({ repoOwner, repoName, labelName: label_1.LabelName }))\n .data.organization.repository.labels.nodes;\n const label = labels.find(({ name }) => name === label_1.LabelName);\n let labelId = label === null || label === void 0 ? void 0 : label.id;\n if (labelId == null) {\n const createLabelResult = yield (0, label_create_1.createLabel)({\n repoOwner,\n repoName,\n labelName: label_1.LabelName\n });\n labelId = createLabelResult.node_id;\n }\n yield (0, issue_update_1.updateIssue)({\n issueId: issue.id,\n body: issue.body,\n assigneeIds: [actorId],\n labelIds: [labelId]\n });\n});\nexports.attachMarker = attachMarker;\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.checkMarkerAttached = void 0;\nconst error_1 = require(\"../common/error\");\nconst label_1 = require(\"../common/label\");\nconst messages_1 = require(\"../common/messages\");\nconst checkMarkerAttached = (input) => __awaiter(void 0, void 0, void 0, function* () {\n const { repoOwner, repoName, issueNumber, exitWithError } = input;\n const attached = yield (0, label_1.attachedMarkerOnIssue)(repoOwner, repoName, issueNumber);\n if (!attached) {\n if (exitWithError) {\n (0, error_1.onError)((0, messages_1.getMessage)('error:label_already_detached'));\n }\n else {\n (0, error_1.onWarning)((0, messages_1.getMessage)('warning:label_already_detached'));\n }\n }\n});\nexports.checkMarkerAttached = checkMarkerAttached;\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.checkMarkerDetachedOrAssignedActor = void 0;\nconst label_1 = require(\"../common/label\");\nconst error_1 = require(\"../common/error\");\nconst messages_1 = require(\"../common/messages\");\nconst issue_get_1 = require(\"../github/issue-get\");\nconst checkMarkerDetachedOrAssignedActor = (input) => __awaiter(void 0, void 0, void 0, function* () {\n const { repoOwner, repoName, issueNumber, exitWithError, actorId } = input;\n const attached = yield (0, label_1.attachedMarkerOnIssue)(repoOwner, repoName, issueNumber);\n const { assignees } = (yield (0, issue_get_1.getIssue)({ repoOwner, repoName, issueNumber })).data\n .organization.repository.issue;\n const assigned = assignees.nodes.some(({ id }) => id === actorId);\n if (!attached || assigned) {\n return;\n }\n else if (exitWithError) {\n (0, error_1.onError)((0, messages_1.getMessage)('error:label_already_attached_and_not_assigned_actor'));\n }\n else {\n (0, error_1.onWarning)((0, messages_1.getMessage)('warning:label_already_attached_and_not_assigned_actor'));\n }\n});\nexports.checkMarkerDetachedOrAssignedActor = checkMarkerDetachedOrAssignedActor;\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.checkMarkerDetached = void 0;\nconst label_1 = require(\"../common/label\");\nconst error_1 = require(\"../common/error\");\nconst messages_1 = require(\"../common/messages\");\nconst checkMarkerDetached = (input) => __awaiter(void 0, void 0, void 0, function* () {\n const { repoOwner, repoName, issueNumber, exitWithError } = input;\n const attached = yield (0, label_1.attachedMarkerOnIssue)(repoOwner, repoName, issueNumber);\n if (attached) {\n if (exitWithError) {\n (0, error_1.onError)((0, messages_1.getMessage)('error:label_already_attached'));\n }\n else {\n (0, error_1.onWarning)((0, messages_1.getMessage)('warning:label_already_attached'));\n }\n }\n});\nexports.checkMarkerDetached = checkMarkerDetached;\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.detachMarker = void 0;\nconst issue_get_1 = require(\"../github/issue-get\");\nconst issue_update_1 = require(\"../github/issue-update\");\nconst label_1 = require(\"../common/label\");\nconst error_1 = require(\"../common/error\");\nconst messages_1 = require(\"../common/messages\");\nconst detachMarker = (input) => __awaiter(void 0, void 0, void 0, function* () {\n const { repoOwner, repoName, issueNumber, exitWithError } = input;\n const attached = yield (0, label_1.attachedMarkerOnIssue)(repoOwner, repoName, issueNumber);\n if (!attached) {\n if (exitWithError) {\n (0, error_1.onError)((0, messages_1.getMessage)('error:label_already_detached'));\n }\n return;\n }\n const issue = yield (0, issue_get_1.getIssue)({ repoOwner, repoName, issueNumber });\n const { id: issueId, body, labels } = issue.data.organization.repository.issue;\n const labelIds = labels.nodes\n .filter(({ name }) => name !== label_1.LabelName)\n .map(({ id }) => id);\n yield (0, issue_update_1.updateIssue)({ issueId, body, assigneeIds: [], labelIds });\n});\nexports.detachMarker = detachMarker;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getEnvVar = void 0;\nconst getEnvVar = (name) => {\n const value = process.env[name];\n if (!value)\n throw new Error(`Missing environment variable ${name}`);\n return value;\n};\nexports.getEnvVar = getEnvVar;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.onWarning = exports.onError = void 0;\nconst core = __importStar(require(\"@actions/core\"));\nconst onError = (message) => {\n core.setFailed(message);\n};\nexports.onError = onError;\nconst onWarning = (message) => {\n core.warning(message);\n};\nexports.onWarning = onWarning;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getInput = void 0;\nconst core = __importStar(require(\"@actions/core\"));\nconst user_get_1 = require(\"../github/user-get\");\nconst env_1 = require(\"./env\");\nconst getInput = () => __awaiter(void 0, void 0, void 0, function* () {\n const action = core.getInput('action', { required: true });\n const issueNumber = parseInt(core.getInput('issue-number', { required: true }), 10);\n const exitWithError = core.getBooleanInput('exit-with-error', {\n required: false\n });\n // https://docs.github.com/en/actions/learn-github-actions/environment-variables\n const [repoOwner, repoName] = (0, env_1.getEnvVar)('GITHUB_REPOSITORY').split('/');\n const actor = (0, env_1.getEnvVar)('GITHUB_ACTOR');\n const { id: actorId } = (yield (0, user_get_1.getUser)({ login: actor })).data.user;\n return {\n action,\n issueNumber,\n exitWithError,\n repoOwner,\n repoName,\n actor,\n actorId\n };\n});\nexports.getInput = getInput;\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.attachedMarkerOnIssue = exports.DefaultLabelDescription = exports.DefaultLabelColor = exports.LabelName = void 0;\nconst issue_get_1 = require(\"../github/issue-get\");\nexports.LabelName = 'Deploying';\nexports.DefaultLabelColor = 'FF0000';\nexports.DefaultLabelDescription = 'This label indicates that deployment is in progress.';\nconst attachedMarkerOnIssue = (repoOwner, repoName, issueNumber) => __awaiter(void 0, void 0, void 0, function* () {\n const issueData = yield (0, issue_get_1.getIssue)({ repoOwner, repoName, issueNumber });\n const labels = issueData.data.organization.repository.issue.labels.nodes;\n const label = labels.find(({ name }) => name === exports.LabelName);\n return label != null;\n});\nexports.attachedMarkerOnIssue = attachedMarkerOnIssue;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getMessage = void 0;\nconst label_1 = require(\"./label\");\nconst getMessage = (key) => {\n const message = Messages[key];\n if (message != null) {\n return message;\n }\n throw new Error(`Unknown message key: ${key}`);\n};\nexports.getMessage = getMessage;\nconst Messages = {\n 'error:label_already_attached': [\n `ERROR: \"${label_1.LabelName}\" label already attached.`,\n '- Please check latest issue comments',\n `- If to detach \"${label_1.LabelName}\" label is no problem, please detach the label manually.`\n ].join('\\n'),\n 'error:label_already_detached': [\n `ERROR: \"${label_1.LabelName}\" label already detached.`,\n '- Please check latest issue comments',\n `- If to attach \"${label_1.LabelName}\" label is no problem, please attach the label manually.`\n ].join('\\n'),\n 'error:label_already_attached_and_not_assigned_actor': [\n `ERROR: \"${label_1.LabelName}\" label already attached and not assigned actor.`,\n '- Please check latest issue comments',\n `- If to detach \"${label_1.LabelName}\" label is no problem, please attach the label manually.`,\n `- If to remove actor in assignees is no problem, please remove assign manually.`\n ].join('\\n'),\n 'warning:label_already_attached': `WARNING: \"${label_1.LabelName}\" label already attached, but not errored because exit-with-error is false.`,\n 'warning:label_already_detached': `WARNING: \"${label_1.LabelName}\" label already detached, but not errored because exit-with-error is false.`,\n 'warning:label_already_attached_and_not_assigned_actor': `WARNING: \"${label_1.LabelName}\" label already attached and not assigned actor, but not errored because exit-with-error is false.`\n};\n","\"use strict\";\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.buildValidator = void 0;\nconst ajv_1 = __importDefault(require(\"ajv\"));\nconst ajv = new ajv_1.default({ allErrors: true });\nconst buildValidator = (schema) => {\n const validate = ajv.compile(schema);\n return (params) => {\n if (validate(params)) {\n return params;\n }\n throw new Error(`Schema Error: ${JSON.stringify(validate.errors)}`);\n };\n};\nexports.buildValidator = buildValidator;\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.fetchGitHubApiV3 = exports.fetchGitHubGraphQL = void 0;\nconst https_1 = __importDefault(require(\"https\"));\nconst env_1 = require(\"../common/env\");\nconst request = (method, url, body) => __awaiter(void 0, void 0, void 0, function* () {\n return new Promise((resolve, reject) => {\n const req = https_1.default.request(url, {\n method,\n headers: {\n Authorization: `token ${(0, env_1.getEnvVar)('GITHUB_TOKEN')}`,\n Accept: 'application/json',\n 'User-Agent': 'Connehito/deploying-marker-with-issue'\n },\n timeout: 10000\n });\n req.on('response', res => {\n if (res.statusCode !== 200) {\n throw new Error(`Invalid status code: ${res.statusCode}`);\n }\n try {\n const chunks = [];\n res.on('data', chunk => {\n chunks.push(chunk);\n });\n res.on('end', () => {\n resolve(JSON.parse(Buffer.concat(chunks).toString('utf-8')));\n });\n res.on('error', reject);\n }\n catch (err) {\n reject(err);\n }\n });\n req.on('error', reject);\n req.write(body);\n req.end();\n });\n});\nconst fetchGitHubGraphQL = (query, variables = {}) => __awaiter(void 0, void 0, void 0, function* () {\n return request('POST', 'https://api.github.com/graphql', JSON.stringify({ query, variables }));\n});\nexports.fetchGitHubGraphQL = fetchGitHubGraphQL;\nconst fetchGitHubApiV3 = (method, path, body = '') => __awaiter(void 0, void 0, void 0, function* () { return request(method, `https://api.github.com${path}`, body); });\nexports.fetchGitHubApiV3 = fetchGitHubApiV3;\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getIssue = void 0;\nconst validater_1 = require(\"../common/validater\");\nconst common_1 = require(\"./common\");\nconst IssueSchema = {\n type: 'object',\n additionalProperties: false,\n required: ['data'],\n properties: {\n data: {\n type: 'object',\n additionalProperties: false,\n required: ['organization'],\n properties: {\n organization: {\n type: 'object',\n additionalProperties: false,\n required: ['repository'],\n properties: {\n repository: {\n type: 'object',\n additionalProperties: false,\n required: ['issue'],\n properties: {\n issue: {\n type: 'object',\n additionalProperties: false,\n required: ['id', 'body', 'assignees', 'labels'],\n properties: {\n id: { type: 'string' },\n body: { type: 'string' },\n assignees: {\n type: 'object',\n additionalProperties: false,\n required: ['nodes'],\n properties: {\n nodes: {\n type: 'array',\n items: {\n type: 'object',\n additionalProperties: false,\n required: ['id', 'login'],\n properties: {\n id: { type: 'string' },\n login: { type: 'string' }\n }\n }\n }\n }\n },\n labels: {\n type: 'object',\n additionalProperties: false,\n required: ['nodes'],\n properties: {\n nodes: {\n type: 'array',\n items: {\n type: 'object',\n additionalProperties: false,\n required: ['id', 'name'],\n properties: {\n id: { type: 'string' },\n name: { type: 'string' }\n }\n }\n }\n }\n }\n }\n }\n }\n }\n }\n }\n }\n }\n }\n};\nconst getIssue = (args) => __awaiter(void 0, void 0, void 0, function* () {\n const result = yield (0, common_1.fetchGitHubGraphQL)(`query ($repoOwner: String!, $repoName: String!, $issueNumber: Int!) {\n organization(login: $repoOwner) {\n repository(name: $repoName) {\n issue(number: $issueNumber) {\n id\n body\n assignees(first: 100) {\n nodes {\n id\n login\n }\n }\n labels(first: 100) {\n nodes {\n id\n name\n }\n }\n }\n }\n }\n }`, Object.assign({}, args));\n return (0, validater_1.buildValidator)(IssueSchema)(result);\n});\nexports.getIssue = getIssue;\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.updateIssue = void 0;\nconst common_1 = require(\"./common\");\nconst updateIssue = (args) => __awaiter(void 0, void 0, void 0, function* () {\n yield (0, common_1.fetchGitHubGraphQL)(`mutation ($issueId: ID!, $body: String!, $assigneeIds: [ID!] $labelIds: [ID!]) {\n updateIssue(input: {id: $issueId, body: $body, assigneeIds: $assigneeIds, labelIds: $labelIds}) {\n clientMutationId\n }\n }`, Object.assign({}, args));\n});\nexports.updateIssue = updateIssue;\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.createLabel = void 0;\nconst common_1 = require(\"./common\");\nconst label_1 = require(\"../common/label\");\nconst validater_1 = require(\"../common/validater\");\n// https://docs.github.com/ja/rest/issues/labels#create-a-label\nconst Schema = {\n title: 'Label',\n description: 'Color-coded labels help you categorize and filter your issues (just like labels in Gmail).',\n type: 'object',\n properties: {\n id: {\n type: 'integer',\n examples: [208045946]\n },\n node_id: {\n type: 'string',\n examples: ['MDU6TGFiZWwyMDgwNDU5NDY=']\n },\n url: {\n description: 'URL for the label',\n type: 'string',\n examples: ['https://api.github.com/repositories/42/labels/bug']\n },\n name: {\n description: 'The name of the label.',\n type: 'string',\n examples: ['bug']\n },\n description: {\n type: ['string', 'null'],\n examples: [\"Something isn't working\"]\n },\n color: {\n description: '6-character hex code, without the leading #, identifying the color',\n type: 'string',\n examples: ['FFFFFF']\n },\n default: {\n type: 'boolean',\n examples: [true]\n }\n },\n required: ['id', 'node_id', 'url', 'name', 'description', 'color', 'default']\n};\nconst createLabel = (args) => __awaiter(void 0, void 0, void 0, function* () {\n const { repoOwner, repoName, labelName, labelColor, labelDescription } = args;\n const data = yield (0, common_1.fetchGitHubApiV3)('POST', `/repos/${repoOwner}/${repoName}/labels`, JSON.stringify({\n name: labelName,\n color: labelColor !== null && labelColor !== void 0 ? labelColor : label_1.DefaultLabelColor,\n description: labelDescription !== null && labelDescription !== void 0 ? labelDescription : label_1.DefaultLabelDescription\n }));\n return (0, validater_1.buildValidator)(Schema)(data);\n});\nexports.createLabel = createLabel;\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getLabels = exports.Schema = void 0;\nconst common_1 = require(\"./common\");\nconst validater_1 = require(\"../common/validater\");\nexports.Schema = {\n type: 'object',\n additionalProperties: false,\n required: ['data'],\n properties: {\n data: {\n type: 'object',\n additionalProperties: false,\n required: ['organization'],\n properties: {\n organization: {\n type: 'object',\n additionalProperties: false,\n required: ['repository'],\n properties: {\n repository: {\n type: 'object',\n additionalProperties: false,\n required: ['labels'],\n properties: {\n labels: {\n type: 'object',\n additionalProperties: false,\n required: ['nodes'],\n properties: {\n nodes: {\n type: 'array',\n items: {\n type: 'object',\n additionalProperties: false,\n required: ['id', 'name'],\n properties: {\n id: { type: 'string' },\n name: { type: 'string' }\n }\n }\n }\n }\n }\n }\n }\n }\n }\n }\n }\n }\n};\nconst getLabels = (args) => __awaiter(void 0, void 0, void 0, function* () {\n const result = yield (0, common_1.fetchGitHubGraphQL)(`query ($repoOwner: String!, $repoName: String!, $labelName: String!) {\n organization(login: $repoOwner) {\n repository(name: $repoName) {\n labels(first: 100, query: $labelName) {\n nodes {\n id\n name\n }\n }\n }\n }\n }`, Object.assign({}, args));\n return (0, validater_1.buildValidator)(exports.Schema)(result);\n});\nexports.getLabels = getLabels;\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getUser = void 0;\nconst validater_1 = require(\"../common/validater\");\nconst common_1 = require(\"./common\");\nconst UserSchema = {\n type: 'object',\n additionalProperties: false,\n required: ['data'],\n properties: {\n data: {\n type: 'object',\n additionalProperties: false,\n required: ['user'],\n properties: {\n user: {\n type: 'object',\n additionalProperties: false,\n required: ['id'],\n properties: {\n id: {\n type: 'string'\n }\n }\n }\n }\n }\n }\n};\nconst getUser = (args) => __awaiter(void 0, void 0, void 0, function* () {\n const result = yield (0, common_1.fetchGitHubGraphQL)(`query ($login: String!) {\n user(login: $login) {\n id\n }\n }`, Object.assign({}, args));\n return (0, validater_1.buildValidator)(UserSchema)(result);\n});\nexports.getUser = getUser;\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst input_1 = require(\"./common/input\");\nconst attach_marker_1 = require(\"./actions/attach-marker\");\nconst check_marker_attached_1 = require(\"./actions/check-marker-attached\");\nconst check_marker_detached_1 = require(\"./actions/check-marker-detached\");\nconst detach_marker_1 = require(\"./actions/detach-marker\");\nconst error_1 = require(\"./common/error\");\nconst check_marker_detached_or_assigned_actor_1 = require(\"./actions/check-marker-detached-or-assigned-actor\");\nconst run = () => __awaiter(void 0, void 0, void 0, function* () {\n try {\n const input = yield (0, input_1.getInput)();\n switch (input.action) {\n case 'check-marker-attached':\n yield (0, check_marker_attached_1.checkMarkerAttached)(input);\n break;\n case 'check-marker-detached':\n yield (0, check_marker_detached_1.checkMarkerDetached)(input);\n break;\n case 'attach-marker':\n yield (0, attach_marker_1.attachMarker)(input);\n break;\n case 'detach-marker':\n yield (0, detach_marker_1.detachMarker)(input);\n break;\n case 'check-marker-detached-or-assigned-actor':\n yield (0, check_marker_detached_or_assigned_actor_1.checkMarkerDetachedOrAssignedActor)(input);\n break;\n default:\n (0, error_1.onError)(`Undefined action: ${input.action}`);\n }\n }\n catch (error) {\n if (error instanceof Error) {\n (0, error_1.onError)(error.message);\n }\n else {\n (0, error_1.onError)(`ERROR: ${JSON.stringify(error)}`);\n }\n }\n});\nrun();\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.issue = exports.issueCommand = void 0;\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\n/**\n * Commands\n *\n * Command Format:\n * ::name key=value,key=value::message\n *\n * Examples:\n * ::warning::This is the message\n * ::set-env name=MY_VAR::some value\n */\nfunction issueCommand(command, properties, message) {\n const cmd = new Command(command, properties, message);\n process.stdout.write(cmd.toString() + os.EOL);\n}\nexports.issueCommand = issueCommand;\nfunction issue(name, message = '') {\n issueCommand(name, {}, message);\n}\nexports.issue = issue;\nconst CMD_STRING = '::';\nclass Command {\n constructor(command, properties, message) {\n if (!command) {\n command = 'missing.command';\n }\n this.command = command;\n this.properties = properties;\n this.message = message;\n }\n toString() {\n let cmdStr = CMD_STRING + this.command;\n if (this.properties && Object.keys(this.properties).length > 0) {\n cmdStr += ' ';\n let first = true;\n for (const key in this.properties) {\n if (this.properties.hasOwnProperty(key)) {\n const val = this.properties[key];\n if (val) {\n if (first) {\n first = false;\n }\n else {\n cmdStr += ',';\n }\n cmdStr += `${key}=${escapeProperty(val)}`;\n }\n }\n }\n }\n cmdStr += `${CMD_STRING}${escapeData(this.message)}`;\n return cmdStr;\n }\n}\nfunction escapeData(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A');\n}\nfunction escapeProperty(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A')\n .replace(/:/g, '%3A')\n .replace(/,/g, '%2C');\n}\n//# sourceMappingURL=command.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;\nconst command_1 = require(\"./command\");\nconst file_command_1 = require(\"./file-command\");\nconst utils_1 = require(\"./utils\");\nconst os = __importStar(require(\"os\"));\nconst path = __importStar(require(\"path\"));\nconst uuid_1 = require(\"uuid\");\nconst oidc_utils_1 = require(\"./oidc-utils\");\n/**\n * The code to exit an action\n */\nvar ExitCode;\n(function (ExitCode) {\n /**\n * A code indicating that the action was successful\n */\n ExitCode[ExitCode[\"Success\"] = 0] = \"Success\";\n /**\n * A code indicating that the action was a failure\n */\n ExitCode[ExitCode[\"Failure\"] = 1] = \"Failure\";\n})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));\n//-----------------------------------------------------------------------\n// Variables\n//-----------------------------------------------------------------------\n/**\n * Sets env variable for this action and future actions in the job\n * @param name the name of the variable to set\n * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction exportVariable(name, val) {\n const convertedVal = utils_1.toCommandValue(val);\n process.env[name] = convertedVal;\n const filePath = process.env['GITHUB_ENV'] || '';\n if (filePath) {\n const delimiter = `ghadelimiter_${uuid_1.v4()}`;\n // These should realistically never happen, but just in case someone finds a way to exploit uuid generation let's not allow keys or values that contain the delimiter.\n if (name.includes(delimiter)) {\n throw new Error(`Unexpected input: name should not contain the delimiter \"${delimiter}\"`);\n }\n if (convertedVal.includes(delimiter)) {\n throw new Error(`Unexpected input: value should not contain the delimiter \"${delimiter}\"`);\n }\n const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;\n file_command_1.issueCommand('ENV', commandValue);\n }\n else {\n command_1.issueCommand('set-env', { name }, convertedVal);\n }\n}\nexports.exportVariable = exportVariable;\n/**\n * Registers a secret which will get masked from logs\n * @param secret value of the secret\n */\nfunction setSecret(secret) {\n command_1.issueCommand('add-mask', {}, secret);\n}\nexports.setSecret = setSecret;\n/**\n * Prepends inputPath to the PATH (for this action and future actions)\n * @param inputPath\n */\nfunction addPath(inputPath) {\n const filePath = process.env['GITHUB_PATH'] || '';\n if (filePath) {\n file_command_1.issueCommand('PATH', inputPath);\n }\n else {\n command_1.issueCommand('add-path', {}, inputPath);\n }\n process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;\n}\nexports.addPath = addPath;\n/**\n * Gets the value of an input.\n * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed.\n * Returns an empty string if the value is not defined.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string\n */\nfunction getInput(name, options) {\n const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';\n if (options && options.required && !val) {\n throw new Error(`Input required and not supplied: ${name}`);\n }\n if (options && options.trimWhitespace === false) {\n return val;\n }\n return val.trim();\n}\nexports.getInput = getInput;\n/**\n * Gets the values of an multiline input. Each value is also trimmed.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string[]\n *\n */\nfunction getMultilineInput(name, options) {\n const inputs = getInput(name, options)\n .split('\\n')\n .filter(x => x !== '');\n return inputs;\n}\nexports.getMultilineInput = getMultilineInput;\n/**\n * Gets the input value of the boolean type in the YAML 1.2 \"core schema\" specification.\n * Support boolean input list: `true | True | TRUE | false | False | FALSE` .\n * The return value is also in boolean type.\n * ref: https://yaml.org/spec/1.2/spec.html#id2804923\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns boolean\n */\nfunction getBooleanInput(name, options) {\n const trueValue = ['true', 'True', 'TRUE'];\n const falseValue = ['false', 'False', 'FALSE'];\n const val = getInput(name, options);\n if (trueValue.includes(val))\n return true;\n if (falseValue.includes(val))\n return false;\n throw new TypeError(`Input does not meet YAML 1.2 \"Core Schema\" specification: ${name}\\n` +\n `Support boolean input list: \\`true | True | TRUE | false | False | FALSE\\``);\n}\nexports.getBooleanInput = getBooleanInput;\n/**\n * Sets the value of an output.\n *\n * @param name name of the output to set\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction setOutput(name, value) {\n process.stdout.write(os.EOL);\n command_1.issueCommand('set-output', { name }, value);\n}\nexports.setOutput = setOutput;\n/**\n * Enables or disables the echoing of commands into stdout for the rest of the step.\n * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.\n *\n */\nfunction setCommandEcho(enabled) {\n command_1.issue('echo', enabled ? 'on' : 'off');\n}\nexports.setCommandEcho = setCommandEcho;\n//-----------------------------------------------------------------------\n// Results\n//-----------------------------------------------------------------------\n/**\n * Sets the action status to failed.\n * When the action exits it will be with an exit code of 1\n * @param message add error issue message\n */\nfunction setFailed(message) {\n process.exitCode = ExitCode.Failure;\n error(message);\n}\nexports.setFailed = setFailed;\n//-----------------------------------------------------------------------\n// Logging Commands\n//-----------------------------------------------------------------------\n/**\n * Gets whether Actions Step Debug is on or not\n */\nfunction isDebug() {\n return process.env['RUNNER_DEBUG'] === '1';\n}\nexports.isDebug = isDebug;\n/**\n * Writes debug message to user log\n * @param message debug message\n */\nfunction debug(message) {\n command_1.issueCommand('debug', {}, message);\n}\nexports.debug = debug;\n/**\n * Adds an error issue\n * @param message error issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction error(message, properties = {}) {\n command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.error = error;\n/**\n * Adds a warning issue\n * @param message warning issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction warning(message, properties = {}) {\n command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.warning = warning;\n/**\n * Adds a notice issue\n * @param message notice issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction notice(message, properties = {}) {\n command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.notice = notice;\n/**\n * Writes info to log with console.log.\n * @param message info message\n */\nfunction info(message) {\n process.stdout.write(message + os.EOL);\n}\nexports.info = info;\n/**\n * Begin an output group.\n *\n * Output until the next `groupEnd` will be foldable in this group\n *\n * @param name The name of the output group\n */\nfunction startGroup(name) {\n command_1.issue('group', name);\n}\nexports.startGroup = startGroup;\n/**\n * End an output group.\n */\nfunction endGroup() {\n command_1.issue('endgroup');\n}\nexports.endGroup = endGroup;\n/**\n * Wrap an asynchronous function call in a group.\n *\n * Returns the same type as the function itself.\n *\n * @param name The name of the group\n * @param fn The function to wrap in the group\n */\nfunction group(name, fn) {\n return __awaiter(this, void 0, void 0, function* () {\n startGroup(name);\n let result;\n try {\n result = yield fn();\n }\n finally {\n endGroup();\n }\n return result;\n });\n}\nexports.group = group;\n//-----------------------------------------------------------------------\n// Wrapper action state\n//-----------------------------------------------------------------------\n/**\n * Saves state for current action, the state can only be retrieved by this action's post job execution.\n *\n * @param name name of the state to store\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction saveState(name, value) {\n command_1.issueCommand('save-state', { name }, value);\n}\nexports.saveState = saveState;\n/**\n * Gets the value of an state set by this action's main execution.\n *\n * @param name name of the state to get\n * @returns string\n */\nfunction getState(name) {\n return process.env[`STATE_${name}`] || '';\n}\nexports.getState = getState;\nfunction getIDToken(aud) {\n return __awaiter(this, void 0, void 0, function* () {\n return yield oidc_utils_1.OidcClient.getIDToken(aud);\n });\n}\nexports.getIDToken = getIDToken;\n/**\n * Summary exports\n */\nvar summary_1 = require(\"./summary\");\nObject.defineProperty(exports, \"summary\", { enumerable: true, get: function () { return summary_1.summary; } });\n/**\n * @deprecated use core.summary\n */\nvar summary_2 = require(\"./summary\");\nObject.defineProperty(exports, \"markdownSummary\", { enumerable: true, get: function () { return summary_2.markdownSummary; } });\n/**\n * Path exports\n */\nvar path_utils_1 = require(\"./path-utils\");\nObject.defineProperty(exports, \"toPosixPath\", { enumerable: true, get: function () { return path_utils_1.toPosixPath; } });\nObject.defineProperty(exports, \"toWin32Path\", { enumerable: true, get: function () { return path_utils_1.toWin32Path; } });\nObject.defineProperty(exports, \"toPlatformPath\", { enumerable: true, get: function () { return path_utils_1.toPlatformPath; } });\n//# sourceMappingURL=core.js.map","\"use strict\";\n// For internal use, subject to change.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.issueCommand = void 0;\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nconst fs = __importStar(require(\"fs\"));\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\nfunction issueCommand(command, message) {\n const filePath = process.env[`GITHUB_${command}`];\n if (!filePath) {\n throw new Error(`Unable to find environment variable for file command ${command}`);\n }\n if (!fs.existsSync(filePath)) {\n throw new Error(`Missing file at path: ${filePath}`);\n }\n fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {\n encoding: 'utf8'\n });\n}\nexports.issueCommand = issueCommand;\n//# sourceMappingURL=file-command.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.OidcClient = void 0;\nconst http_client_1 = require(\"@actions/http-client\");\nconst auth_1 = require(\"@actions/http-client/lib/auth\");\nconst core_1 = require(\"./core\");\nclass OidcClient {\n static createHttpClient(allowRetry = true, maxRetry = 10) {\n const requestOptions = {\n allowRetries: allowRetry,\n maxRetries: maxRetry\n };\n return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions);\n }\n static getRequestToken() {\n const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN'];\n if (!token) {\n throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable');\n }\n return token;\n }\n static getIDTokenUrl() {\n const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL'];\n if (!runtimeUrl) {\n throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable');\n }\n return runtimeUrl;\n }\n static getCall(id_token_url) {\n var _a;\n return __awaiter(this, void 0, void 0, function* () {\n const httpclient = OidcClient.createHttpClient();\n const res = yield httpclient\n .getJson(id_token_url)\n .catch(error => {\n throw new Error(`Failed to get ID Token. \\n \n Error Code : ${error.statusCode}\\n \n Error Message: ${error.result.message}`);\n });\n const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;\n if (!id_token) {\n throw new Error('Response json body do not have ID Token field');\n }\n return id_token;\n });\n }\n static getIDToken(audience) {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n // New ID Token is requested from action service\n let id_token_url = OidcClient.getIDTokenUrl();\n if (audience) {\n const encodedAudience = encodeURIComponent(audience);\n id_token_url = `${id_token_url}&audience=${encodedAudience}`;\n }\n core_1.debug(`ID token url is ${id_token_url}`);\n const id_token = yield OidcClient.getCall(id_token_url);\n core_1.setSecret(id_token);\n return id_token;\n }\n catch (error) {\n throw new Error(`Error message: ${error.message}`);\n }\n });\n }\n}\nexports.OidcClient = OidcClient;\n//# sourceMappingURL=oidc-utils.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0;\nconst path = __importStar(require(\"path\"));\n/**\n * toPosixPath converts the given path to the posix form. On Windows, \\\\ will be\n * replaced with /.\n *\n * @param pth. Path to transform.\n * @return string Posix path.\n */\nfunction toPosixPath(pth) {\n return pth.replace(/[\\\\]/g, '/');\n}\nexports.toPosixPath = toPosixPath;\n/**\n * toWin32Path converts the given path to the win32 form. On Linux, / will be\n * replaced with \\\\.\n *\n * @param pth. Path to transform.\n * @return string Win32 path.\n */\nfunction toWin32Path(pth) {\n return pth.replace(/[/]/g, '\\\\');\n}\nexports.toWin32Path = toWin32Path;\n/**\n * toPlatformPath converts the given path to a platform-specific path. It does\n * this by replacing instances of / and \\ with the platform-specific path\n * separator.\n *\n * @param pth The path to platformize.\n * @return string The platform-specific path.\n */\nfunction toPlatformPath(pth) {\n return pth.replace(/[/\\\\]/g, path.sep);\n}\nexports.toPlatformPath = toPlatformPath;\n//# sourceMappingURL=path-utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0;\nconst os_1 = require(\"os\");\nconst fs_1 = require(\"fs\");\nconst { access, appendFile, writeFile } = fs_1.promises;\nexports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';\nexports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';\nclass Summary {\n constructor() {\n this._buffer = '';\n }\n /**\n * Finds the summary file path from the environment, rejects if env var is not found or file does not exist\n * Also checks r/w permissions.\n *\n * @returns step summary file path\n */\n filePath() {\n return __awaiter(this, void 0, void 0, function* () {\n if (this._filePath) {\n return this._filePath;\n }\n const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR];\n if (!pathFromEnv) {\n throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);\n }\n try {\n yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);\n }\n catch (_a) {\n throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);\n }\n this._filePath = pathFromEnv;\n return this._filePath;\n });\n }\n /**\n * Wraps content in an HTML tag, adding any HTML attributes\n *\n * @param {string} tag HTML tag to wrap\n * @param {string | null} content content within the tag\n * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add\n *\n * @returns {string} content wrapped in HTML element\n */\n wrap(tag, content, attrs = {}) {\n const htmlAttrs = Object.entries(attrs)\n .map(([key, value]) => ` ${key}=\"${value}\"`)\n .join('');\n if (!content) {\n return `<${tag}${htmlAttrs}>`;\n }\n return `<${tag}${htmlAttrs}>${content}`;\n }\n /**\n * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default.\n *\n * @param {SummaryWriteOptions} [options] (optional) options for write operation\n *\n * @returns {Promise} summary instance\n */\n write(options) {\n return __awaiter(this, void 0, void 0, function* () {\n const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite);\n const filePath = yield this.filePath();\n const writeFunc = overwrite ? writeFile : appendFile;\n yield writeFunc(filePath, this._buffer, { encoding: 'utf8' });\n return this.emptyBuffer();\n });\n }\n /**\n * Clears the summary buffer and wipes the summary file\n *\n * @returns {Summary} summary instance\n */\n clear() {\n return __awaiter(this, void 0, void 0, function* () {\n return this.emptyBuffer().write({ overwrite: true });\n });\n }\n /**\n * Returns the current summary buffer as a string\n *\n * @returns {string} string of summary buffer\n */\n stringify() {\n return this._buffer;\n }\n /**\n * If the summary buffer is empty\n *\n * @returns {boolen} true if the buffer is empty\n */\n isEmptyBuffer() {\n return this._buffer.length === 0;\n }\n /**\n * Resets the summary buffer without writing to summary file\n *\n * @returns {Summary} summary instance\n */\n emptyBuffer() {\n this._buffer = '';\n return this;\n }\n /**\n * Adds raw text to the summary buffer\n *\n * @param {string} text content to add\n * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false)\n *\n * @returns {Summary} summary instance\n */\n addRaw(text, addEOL = false) {\n this._buffer += text;\n return addEOL ? this.addEOL() : this;\n }\n /**\n * Adds the operating system-specific end-of-line marker to the buffer\n *\n * @returns {Summary} summary instance\n */\n addEOL() {\n return this.addRaw(os_1.EOL);\n }\n /**\n * Adds an HTML codeblock to the summary buffer\n *\n * @param {string} code content to render within fenced code block\n * @param {string} lang (optional) language to syntax highlight code\n *\n * @returns {Summary} summary instance\n */\n addCodeBlock(code, lang) {\n const attrs = Object.assign({}, (lang && { lang }));\n const element = this.wrap('pre', this.wrap('code', code), attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML list to the summary buffer\n *\n * @param {string[]} items list of items to render\n * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false)\n *\n * @returns {Summary} summary instance\n */\n addList(items, ordered = false) {\n const tag = ordered ? 'ol' : 'ul';\n const listItems = items.map(item => this.wrap('li', item)).join('');\n const element = this.wrap(tag, listItems);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML table to the summary buffer\n *\n * @param {SummaryTableCell[]} rows table rows\n *\n * @returns {Summary} summary instance\n */\n addTable(rows) {\n const tableBody = rows\n .map(row => {\n const cells = row\n .map(cell => {\n if (typeof cell === 'string') {\n return this.wrap('td', cell);\n }\n const { header, data, colspan, rowspan } = cell;\n const tag = header ? 'th' : 'td';\n const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan }));\n return this.wrap(tag, data, attrs);\n })\n .join('');\n return this.wrap('tr', cells);\n })\n .join('');\n const element = this.wrap('table', tableBody);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds a collapsable HTML details element to the summary buffer\n *\n * @param {string} label text for the closed state\n * @param {string} content collapsable content\n *\n * @returns {Summary} summary instance\n */\n addDetails(label, content) {\n const element = this.wrap('details', this.wrap('summary', label) + content);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML image tag to the summary buffer\n *\n * @param {string} src path to the image you to embed\n * @param {string} alt text description of the image\n * @param {SummaryImageOptions} options (optional) addition image attributes\n *\n * @returns {Summary} summary instance\n */\n addImage(src, alt, options) {\n const { width, height } = options || {};\n const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height }));\n const element = this.wrap('img', null, Object.assign({ src, alt }, attrs));\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML section heading element\n *\n * @param {string} text heading text\n * @param {number | string} [level=1] (optional) the heading level, default: 1\n *\n * @returns {Summary} summary instance\n */\n addHeading(text, level) {\n const tag = `h${level}`;\n const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag)\n ? tag\n : 'h1';\n const element = this.wrap(allowedTag, text);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML thematic break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addSeparator() {\n const element = this.wrap('hr', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML line break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addBreak() {\n const element = this.wrap('br', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML blockquote to the summary buffer\n *\n * @param {string} text quote text\n * @param {string} cite (optional) citation url\n *\n * @returns {Summary} summary instance\n */\n addQuote(text, cite) {\n const attrs = Object.assign({}, (cite && { cite }));\n const element = this.wrap('blockquote', text, attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML anchor tag to the summary buffer\n *\n * @param {string} text link text/content\n * @param {string} href hyperlink\n *\n * @returns {Summary} summary instance\n */\n addLink(text, href) {\n const element = this.wrap('a', text, { href });\n return this.addRaw(element).addEOL();\n }\n}\nconst _summary = new Summary();\n/**\n * @deprecated use `core.summary`\n */\nexports.markdownSummary = _summary;\nexports.summary = _summary;\n//# sourceMappingURL=summary.js.map","\"use strict\";\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toCommandProperties = exports.toCommandValue = void 0;\n/**\n * Sanitizes an input into a string so it can be passed into issueCommand safely\n * @param input input to sanitize into a string\n */\nfunction toCommandValue(input) {\n if (input === null || input === undefined) {\n return '';\n }\n else if (typeof input === 'string' || input instanceof String) {\n return input;\n }\n return JSON.stringify(input);\n}\nexports.toCommandValue = toCommandValue;\n/**\n *\n * @param annotationProperties\n * @returns The command properties to send with the actual annotation command\n * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646\n */\nfunction toCommandProperties(annotationProperties) {\n if (!Object.keys(annotationProperties).length) {\n return {};\n }\n return {\n title: annotationProperties.title,\n file: annotationProperties.file,\n line: annotationProperties.startLine,\n endLine: annotationProperties.endLine,\n col: annotationProperties.startColumn,\n endColumn: annotationProperties.endColumn\n };\n}\nexports.toCommandProperties = toCommandProperties;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0;\nclass BasicCredentialHandler {\n constructor(username, password) {\n this.username = username;\n this.password = password;\n }\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BasicCredentialHandler = BasicCredentialHandler;\nclass BearerCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Bearer ${this.token}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BearerCredentialHandler = BearerCredentialHandler;\nclass PersonalAccessTokenCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;\n//# sourceMappingURL=auth.js.map","\"use strict\";\n/* eslint-disable @typescript-eslint/no-explicit-any */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;\nconst http = __importStar(require(\"http\"));\nconst https = __importStar(require(\"https\"));\nconst pm = __importStar(require(\"./proxy\"));\nconst tunnel = __importStar(require(\"tunnel\"));\nvar HttpCodes;\n(function (HttpCodes) {\n HttpCodes[HttpCodes[\"OK\"] = 200] = \"OK\";\n HttpCodes[HttpCodes[\"MultipleChoices\"] = 300] = \"MultipleChoices\";\n HttpCodes[HttpCodes[\"MovedPermanently\"] = 301] = \"MovedPermanently\";\n HttpCodes[HttpCodes[\"ResourceMoved\"] = 302] = \"ResourceMoved\";\n HttpCodes[HttpCodes[\"SeeOther\"] = 303] = \"SeeOther\";\n HttpCodes[HttpCodes[\"NotModified\"] = 304] = \"NotModified\";\n HttpCodes[HttpCodes[\"UseProxy\"] = 305] = \"UseProxy\";\n HttpCodes[HttpCodes[\"SwitchProxy\"] = 306] = \"SwitchProxy\";\n HttpCodes[HttpCodes[\"TemporaryRedirect\"] = 307] = \"TemporaryRedirect\";\n HttpCodes[HttpCodes[\"PermanentRedirect\"] = 308] = \"PermanentRedirect\";\n HttpCodes[HttpCodes[\"BadRequest\"] = 400] = \"BadRequest\";\n HttpCodes[HttpCodes[\"Unauthorized\"] = 401] = \"Unauthorized\";\n HttpCodes[HttpCodes[\"PaymentRequired\"] = 402] = \"PaymentRequired\";\n HttpCodes[HttpCodes[\"Forbidden\"] = 403] = \"Forbidden\";\n HttpCodes[HttpCodes[\"NotFound\"] = 404] = \"NotFound\";\n HttpCodes[HttpCodes[\"MethodNotAllowed\"] = 405] = \"MethodNotAllowed\";\n HttpCodes[HttpCodes[\"NotAcceptable\"] = 406] = \"NotAcceptable\";\n HttpCodes[HttpCodes[\"ProxyAuthenticationRequired\"] = 407] = \"ProxyAuthenticationRequired\";\n HttpCodes[HttpCodes[\"RequestTimeout\"] = 408] = \"RequestTimeout\";\n HttpCodes[HttpCodes[\"Conflict\"] = 409] = \"Conflict\";\n HttpCodes[HttpCodes[\"Gone\"] = 410] = \"Gone\";\n HttpCodes[HttpCodes[\"TooManyRequests\"] = 429] = \"TooManyRequests\";\n HttpCodes[HttpCodes[\"InternalServerError\"] = 500] = \"InternalServerError\";\n HttpCodes[HttpCodes[\"NotImplemented\"] = 501] = \"NotImplemented\";\n HttpCodes[HttpCodes[\"BadGateway\"] = 502] = \"BadGateway\";\n HttpCodes[HttpCodes[\"ServiceUnavailable\"] = 503] = \"ServiceUnavailable\";\n HttpCodes[HttpCodes[\"GatewayTimeout\"] = 504] = \"GatewayTimeout\";\n})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));\nvar Headers;\n(function (Headers) {\n Headers[\"Accept\"] = \"accept\";\n Headers[\"ContentType\"] = \"content-type\";\n})(Headers = exports.Headers || (exports.Headers = {}));\nvar MediaTypes;\n(function (MediaTypes) {\n MediaTypes[\"ApplicationJson\"] = \"application/json\";\n})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));\n/**\n * Returns the proxy URL, depending upon the supplied url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\nfunction getProxyUrl(serverUrl) {\n const proxyUrl = pm.getProxyUrl(new URL(serverUrl));\n return proxyUrl ? proxyUrl.href : '';\n}\nexports.getProxyUrl = getProxyUrl;\nconst HttpRedirectCodes = [\n HttpCodes.MovedPermanently,\n HttpCodes.ResourceMoved,\n HttpCodes.SeeOther,\n HttpCodes.TemporaryRedirect,\n HttpCodes.PermanentRedirect\n];\nconst HttpResponseRetryCodes = [\n HttpCodes.BadGateway,\n HttpCodes.ServiceUnavailable,\n HttpCodes.GatewayTimeout\n];\nconst RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];\nconst ExponentialBackoffCeiling = 10;\nconst ExponentialBackoffTimeSlice = 5;\nclass HttpClientError extends Error {\n constructor(message, statusCode) {\n super(message);\n this.name = 'HttpClientError';\n this.statusCode = statusCode;\n Object.setPrototypeOf(this, HttpClientError.prototype);\n }\n}\nexports.HttpClientError = HttpClientError;\nclass HttpClientResponse {\n constructor(message) {\n this.message = message;\n }\n readBody() {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {\n let output = Buffer.alloc(0);\n this.message.on('data', (chunk) => {\n output = Buffer.concat([output, chunk]);\n });\n this.message.on('end', () => {\n resolve(output.toString());\n });\n }));\n });\n }\n}\nexports.HttpClientResponse = HttpClientResponse;\nfunction isHttps(requestUrl) {\n const parsedUrl = new URL(requestUrl);\n return parsedUrl.protocol === 'https:';\n}\nexports.isHttps = isHttps;\nclass HttpClient {\n constructor(userAgent, handlers, requestOptions) {\n this._ignoreSslError = false;\n this._allowRedirects = true;\n this._allowRedirectDowngrade = false;\n this._maxRedirects = 50;\n this._allowRetries = false;\n this._maxRetries = 1;\n this._keepAlive = false;\n this._disposed = false;\n this.userAgent = userAgent;\n this.handlers = handlers || [];\n this.requestOptions = requestOptions;\n if (requestOptions) {\n if (requestOptions.ignoreSslError != null) {\n this._ignoreSslError = requestOptions.ignoreSslError;\n }\n this._socketTimeout = requestOptions.socketTimeout;\n if (requestOptions.allowRedirects != null) {\n this._allowRedirects = requestOptions.allowRedirects;\n }\n if (requestOptions.allowRedirectDowngrade != null) {\n this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;\n }\n if (requestOptions.maxRedirects != null) {\n this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);\n }\n if (requestOptions.keepAlive != null) {\n this._keepAlive = requestOptions.keepAlive;\n }\n if (requestOptions.allowRetries != null) {\n this._allowRetries = requestOptions.allowRetries;\n }\n if (requestOptions.maxRetries != null) {\n this._maxRetries = requestOptions.maxRetries;\n }\n }\n }\n options(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});\n });\n }\n get(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('GET', requestUrl, null, additionalHeaders || {});\n });\n }\n del(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('DELETE', requestUrl, null, additionalHeaders || {});\n });\n }\n post(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('POST', requestUrl, data, additionalHeaders || {});\n });\n }\n patch(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PATCH', requestUrl, data, additionalHeaders || {});\n });\n }\n put(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PUT', requestUrl, data, additionalHeaders || {});\n });\n }\n head(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('HEAD', requestUrl, null, additionalHeaders || {});\n });\n }\n sendStream(verb, requestUrl, stream, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request(verb, requestUrl, stream, additionalHeaders);\n });\n }\n /**\n * Gets a typed object from an endpoint\n * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise\n */\n getJson(requestUrl, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n const res = yield this.get(requestUrl, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n postJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.post(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n putJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.put(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n patchJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.patch(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n /**\n * Makes a raw http request.\n * All other methods such as get, post, patch, and request ultimately call this.\n * Prefer get, del, post and patch\n */\n request(verb, requestUrl, data, headers) {\n return __awaiter(this, void 0, void 0, function* () {\n if (this._disposed) {\n throw new Error('Client has already been disposed.');\n }\n const parsedUrl = new URL(requestUrl);\n let info = this._prepareRequest(verb, parsedUrl, headers);\n // Only perform retries on reads since writes may not be idempotent.\n const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb)\n ? this._maxRetries + 1\n : 1;\n let numTries = 0;\n let response;\n do {\n response = yield this.requestRaw(info, data);\n // Check if it's an authentication challenge\n if (response &&\n response.message &&\n response.message.statusCode === HttpCodes.Unauthorized) {\n let authenticationHandler;\n for (const handler of this.handlers) {\n if (handler.canHandleAuthentication(response)) {\n authenticationHandler = handler;\n break;\n }\n }\n if (authenticationHandler) {\n return authenticationHandler.handleAuthentication(this, info, data);\n }\n else {\n // We have received an unauthorized response but have no handlers to handle it.\n // Let the response return to the caller.\n return response;\n }\n }\n let redirectsRemaining = this._maxRedirects;\n while (response.message.statusCode &&\n HttpRedirectCodes.includes(response.message.statusCode) &&\n this._allowRedirects &&\n redirectsRemaining > 0) {\n const redirectUrl = response.message.headers['location'];\n if (!redirectUrl) {\n // if there's no location to redirect to, we won't\n break;\n }\n const parsedRedirectUrl = new URL(redirectUrl);\n if (parsedUrl.protocol === 'https:' &&\n parsedUrl.protocol !== parsedRedirectUrl.protocol &&\n !this._allowRedirectDowngrade) {\n throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');\n }\n // we need to finish reading the response before reassigning response\n // which will leak the open socket.\n yield response.readBody();\n // strip authorization header if redirected to a different hostname\n if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {\n for (const header in headers) {\n // header names are case insensitive\n if (header.toLowerCase() === 'authorization') {\n delete headers[header];\n }\n }\n }\n // let's make the request with the new redirectUrl\n info = this._prepareRequest(verb, parsedRedirectUrl, headers);\n response = yield this.requestRaw(info, data);\n redirectsRemaining--;\n }\n if (!response.message.statusCode ||\n !HttpResponseRetryCodes.includes(response.message.statusCode)) {\n // If not a retry code, return immediately instead of retrying\n return response;\n }\n numTries += 1;\n if (numTries < maxTries) {\n yield response.readBody();\n yield this._performExponentialBackoff(numTries);\n }\n } while (numTries < maxTries);\n return response;\n });\n }\n /**\n * Needs to be called if keepAlive is set to true in request options.\n */\n dispose() {\n if (this._agent) {\n this._agent.destroy();\n }\n this._disposed = true;\n }\n /**\n * Raw request.\n * @param info\n * @param data\n */\n requestRaw(info, data) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => {\n function callbackForResult(err, res) {\n if (err) {\n reject(err);\n }\n else if (!res) {\n // If `err` is not passed, then `res` must be passed.\n reject(new Error('Unknown error'));\n }\n else {\n resolve(res);\n }\n }\n this.requestRawWithCallback(info, data, callbackForResult);\n });\n });\n }\n /**\n * Raw request with callback.\n * @param info\n * @param data\n * @param onResult\n */\n requestRawWithCallback(info, data, onResult) {\n if (typeof data === 'string') {\n if (!info.options.headers) {\n info.options.headers = {};\n }\n info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');\n }\n let callbackCalled = false;\n function handleResult(err, res) {\n if (!callbackCalled) {\n callbackCalled = true;\n onResult(err, res);\n }\n }\n const req = info.httpModule.request(info.options, (msg) => {\n const res = new HttpClientResponse(msg);\n handleResult(undefined, res);\n });\n let socket;\n req.on('socket', sock => {\n socket = sock;\n });\n // If we ever get disconnected, we want the socket to timeout eventually\n req.setTimeout(this._socketTimeout || 3 * 60000, () => {\n if (socket) {\n socket.end();\n }\n handleResult(new Error(`Request timeout: ${info.options.path}`));\n });\n req.on('error', function (err) {\n // err has statusCode property\n // res should have headers\n handleResult(err);\n });\n if (data && typeof data === 'string') {\n req.write(data, 'utf8');\n }\n if (data && typeof data !== 'string') {\n data.on('close', function () {\n req.end();\n });\n data.pipe(req);\n }\n else {\n req.end();\n }\n }\n /**\n * Gets an http agent. This function is useful when you need an http agent that handles\n * routing through a proxy server - depending upon the url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\n getAgent(serverUrl) {\n const parsedUrl = new URL(serverUrl);\n return this._getAgent(parsedUrl);\n }\n _prepareRequest(method, requestUrl, headers) {\n const info = {};\n info.parsedUrl = requestUrl;\n const usingSsl = info.parsedUrl.protocol === 'https:';\n info.httpModule = usingSsl ? https : http;\n const defaultPort = usingSsl ? 443 : 80;\n info.options = {};\n info.options.host = info.parsedUrl.hostname;\n info.options.port = info.parsedUrl.port\n ? parseInt(info.parsedUrl.port)\n : defaultPort;\n info.options.path =\n (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');\n info.options.method = method;\n info.options.headers = this._mergeHeaders(headers);\n if (this.userAgent != null) {\n info.options.headers['user-agent'] = this.userAgent;\n }\n info.options.agent = this._getAgent(info.parsedUrl);\n // gives handlers an opportunity to participate\n if (this.handlers) {\n for (const handler of this.handlers) {\n handler.prepareRequest(info.options);\n }\n }\n return info;\n }\n _mergeHeaders(headers) {\n if (this.requestOptions && this.requestOptions.headers) {\n return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {}));\n }\n return lowercaseKeys(headers || {});\n }\n _getExistingOrDefaultHeader(additionalHeaders, header, _default) {\n let clientHeader;\n if (this.requestOptions && this.requestOptions.headers) {\n clientHeader = lowercaseKeys(this.requestOptions.headers)[header];\n }\n return additionalHeaders[header] || clientHeader || _default;\n }\n _getAgent(parsedUrl) {\n let agent;\n const proxyUrl = pm.getProxyUrl(parsedUrl);\n const useProxy = proxyUrl && proxyUrl.hostname;\n if (this._keepAlive && useProxy) {\n agent = this._proxyAgent;\n }\n if (this._keepAlive && !useProxy) {\n agent = this._agent;\n }\n // if agent is already assigned use that agent.\n if (agent) {\n return agent;\n }\n const usingSsl = parsedUrl.protocol === 'https:';\n let maxSockets = 100;\n if (this.requestOptions) {\n maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;\n }\n // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.\n if (proxyUrl && proxyUrl.hostname) {\n const agentOptions = {\n maxSockets,\n keepAlive: this._keepAlive,\n proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && {\n proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`\n })), { host: proxyUrl.hostname, port: proxyUrl.port })\n };\n let tunnelAgent;\n const overHttps = proxyUrl.protocol === 'https:';\n if (usingSsl) {\n tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;\n }\n else {\n tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;\n }\n agent = tunnelAgent(agentOptions);\n this._proxyAgent = agent;\n }\n // if reusing agent across request and tunneling agent isn't assigned create a new agent\n if (this._keepAlive && !agent) {\n const options = { keepAlive: this._keepAlive, maxSockets };\n agent = usingSsl ? new https.Agent(options) : new http.Agent(options);\n this._agent = agent;\n }\n // if not using private agent and tunnel agent isn't setup then use global agent\n if (!agent) {\n agent = usingSsl ? https.globalAgent : http.globalAgent;\n }\n if (usingSsl && this._ignoreSslError) {\n // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process\n // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options\n // we have to cast it to any and change it directly\n agent.options = Object.assign(agent.options || {}, {\n rejectUnauthorized: false\n });\n }\n return agent;\n }\n _performExponentialBackoff(retryNumber) {\n return __awaiter(this, void 0, void 0, function* () {\n retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);\n const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);\n return new Promise(resolve => setTimeout(() => resolve(), ms));\n });\n }\n _processResponse(res, options) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {\n const statusCode = res.message.statusCode || 0;\n const response = {\n statusCode,\n result: null,\n headers: {}\n };\n // not found leads to null obj returned\n if (statusCode === HttpCodes.NotFound) {\n resolve(response);\n }\n // get the result from the body\n function dateTimeDeserializer(key, value) {\n if (typeof value === 'string') {\n const a = new Date(value);\n if (!isNaN(a.valueOf())) {\n return a;\n }\n }\n return value;\n }\n let obj;\n let contents;\n try {\n contents = yield res.readBody();\n if (contents && contents.length > 0) {\n if (options && options.deserializeDates) {\n obj = JSON.parse(contents, dateTimeDeserializer);\n }\n else {\n obj = JSON.parse(contents);\n }\n response.result = obj;\n }\n response.headers = res.message.headers;\n }\n catch (err) {\n // Invalid resource (contents not json); leaving result obj null\n }\n // note that 3xx redirects are handled by the http layer.\n if (statusCode > 299) {\n let msg;\n // if exception/error in body, attempt to get better error\n if (obj && obj.message) {\n msg = obj.message;\n }\n else if (contents && contents.length > 0) {\n // it may be the case that the exception is in the body message as string\n msg = contents;\n }\n else {\n msg = `Failed request: (${statusCode})`;\n }\n const err = new HttpClientError(msg, statusCode);\n err.result = response.result;\n reject(err);\n }\n else {\n resolve(response);\n }\n }));\n });\n }\n}\nexports.HttpClient = HttpClient;\nconst lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.checkBypass = exports.getProxyUrl = void 0;\nfunction getProxyUrl(reqUrl) {\n const usingSsl = reqUrl.protocol === 'https:';\n if (checkBypass(reqUrl)) {\n return undefined;\n }\n const proxyVar = (() => {\n if (usingSsl) {\n return process.env['https_proxy'] || process.env['HTTPS_PROXY'];\n }\n else {\n return process.env['http_proxy'] || process.env['HTTP_PROXY'];\n }\n })();\n if (proxyVar) {\n return new URL(proxyVar);\n }\n else {\n return undefined;\n }\n}\nexports.getProxyUrl = getProxyUrl;\nfunction checkBypass(reqUrl) {\n if (!reqUrl.hostname) {\n return false;\n }\n const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';\n if (!noProxy) {\n return false;\n }\n // Determine the request port\n let reqPort;\n if (reqUrl.port) {\n reqPort = Number(reqUrl.port);\n }\n else if (reqUrl.protocol === 'http:') {\n reqPort = 80;\n }\n else if (reqUrl.protocol === 'https:') {\n reqPort = 443;\n }\n // Format the request hostname and hostname with port\n const upperReqHosts = [reqUrl.hostname.toUpperCase()];\n if (typeof reqPort === 'number') {\n upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);\n }\n // Compare request host against noproxy\n for (const upperNoProxyItem of noProxy\n .split(',')\n .map(x => x.trim().toUpperCase())\n .filter(x => x)) {\n if (upperReqHosts.some(x => x === upperNoProxyItem)) {\n return true;\n }\n }\n return false;\n}\nexports.checkBypass = checkBypass;\n//# sourceMappingURL=proxy.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.CodeGen = exports.Name = exports.nil = exports.stringify = exports.str = exports._ = exports.KeywordCxt = void 0;\nconst core_1 = require(\"./core\");\nconst draft7_1 = require(\"./vocabularies/draft7\");\nconst discriminator_1 = require(\"./vocabularies/discriminator\");\nconst draft7MetaSchema = require(\"./refs/json-schema-draft-07.json\");\nconst META_SUPPORT_DATA = [\"/properties\"];\nconst META_SCHEMA_ID = \"http://json-schema.org/draft-07/schema\";\nclass Ajv extends core_1.default {\n _addVocabularies() {\n super._addVocabularies();\n draft7_1.default.forEach((v) => this.addVocabulary(v));\n if (this.opts.discriminator)\n this.addKeyword(discriminator_1.default);\n }\n _addDefaultMetaSchema() {\n super._addDefaultMetaSchema();\n if (!this.opts.meta)\n return;\n const metaSchema = this.opts.$data\n ? this.$dataMetaSchema(draft7MetaSchema, META_SUPPORT_DATA)\n : draft7MetaSchema;\n this.addMetaSchema(metaSchema, META_SCHEMA_ID, false);\n this.refs[\"http://json-schema.org/schema\"] = META_SCHEMA_ID;\n }\n defaultMeta() {\n return (this.opts.defaultMeta =\n super.defaultMeta() || (this.getSchema(META_SCHEMA_ID) ? META_SCHEMA_ID : undefined));\n }\n}\nmodule.exports = exports = Ajv;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.default = Ajv;\nvar validate_1 = require(\"./compile/validate\");\nObject.defineProperty(exports, \"KeywordCxt\", { enumerable: true, get: function () { return validate_1.KeywordCxt; } });\nvar codegen_1 = require(\"./compile/codegen\");\nObject.defineProperty(exports, \"_\", { enumerable: true, get: function () { return codegen_1._; } });\nObject.defineProperty(exports, \"str\", { enumerable: true, get: function () { return codegen_1.str; } });\nObject.defineProperty(exports, \"stringify\", { enumerable: true, get: function () { return codegen_1.stringify; } });\nObject.defineProperty(exports, \"nil\", { enumerable: true, get: function () { return codegen_1.nil; } });\nObject.defineProperty(exports, \"Name\", { enumerable: true, get: function () { return codegen_1.Name; } });\nObject.defineProperty(exports, \"CodeGen\", { enumerable: true, get: function () { return codegen_1.CodeGen; } });\n//# sourceMappingURL=ajv.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.regexpCode = exports.getEsmExportName = exports.getProperty = exports.safeStringify = exports.stringify = exports.strConcat = exports.addCodeArg = exports.str = exports._ = exports.nil = exports._Code = exports.Name = exports.IDENTIFIER = exports._CodeOrName = void 0;\nclass _CodeOrName {\n}\nexports._CodeOrName = _CodeOrName;\nexports.IDENTIFIER = /^[a-z$_][a-z$_0-9]*$/i;\nclass Name extends _CodeOrName {\n constructor(s) {\n super();\n if (!exports.IDENTIFIER.test(s))\n throw new Error(\"CodeGen: name must be a valid identifier\");\n this.str = s;\n }\n toString() {\n return this.str;\n }\n emptyStr() {\n return false;\n }\n get names() {\n return { [this.str]: 1 };\n }\n}\nexports.Name = Name;\nclass _Code extends _CodeOrName {\n constructor(code) {\n super();\n this._items = typeof code === \"string\" ? [code] : code;\n }\n toString() {\n return this.str;\n }\n emptyStr() {\n if (this._items.length > 1)\n return false;\n const item = this._items[0];\n return item === \"\" || item === '\"\"';\n }\n get str() {\n var _a;\n return ((_a = this._str) !== null && _a !== void 0 ? _a : (this._str = this._items.reduce((s, c) => `${s}${c}`, \"\")));\n }\n get names() {\n var _a;\n return ((_a = this._names) !== null && _a !== void 0 ? _a : (this._names = this._items.reduce((names, c) => {\n if (c instanceof Name)\n names[c.str] = (names[c.str] || 0) + 1;\n return names;\n }, {})));\n }\n}\nexports._Code = _Code;\nexports.nil = new _Code(\"\");\nfunction _(strs, ...args) {\n const code = [strs[0]];\n let i = 0;\n while (i < args.length) {\n addCodeArg(code, args[i]);\n code.push(strs[++i]);\n }\n return new _Code(code);\n}\nexports._ = _;\nconst plus = new _Code(\"+\");\nfunction str(strs, ...args) {\n const expr = [safeStringify(strs[0])];\n let i = 0;\n while (i < args.length) {\n expr.push(plus);\n addCodeArg(expr, args[i]);\n expr.push(plus, safeStringify(strs[++i]));\n }\n optimize(expr);\n return new _Code(expr);\n}\nexports.str = str;\nfunction addCodeArg(code, arg) {\n if (arg instanceof _Code)\n code.push(...arg._items);\n else if (arg instanceof Name)\n code.push(arg);\n else\n code.push(interpolate(arg));\n}\nexports.addCodeArg = addCodeArg;\nfunction optimize(expr) {\n let i = 1;\n while (i < expr.length - 1) {\n if (expr[i] === plus) {\n const res = mergeExprItems(expr[i - 1], expr[i + 1]);\n if (res !== undefined) {\n expr.splice(i - 1, 3, res);\n continue;\n }\n expr[i++] = \"+\";\n }\n i++;\n }\n}\nfunction mergeExprItems(a, b) {\n if (b === '\"\"')\n return a;\n if (a === '\"\"')\n return b;\n if (typeof a == \"string\") {\n if (b instanceof Name || a[a.length - 1] !== '\"')\n return;\n if (typeof b != \"string\")\n return `${a.slice(0, -1)}${b}\"`;\n if (b[0] === '\"')\n return a.slice(0, -1) + b.slice(1);\n return;\n }\n if (typeof b == \"string\" && b[0] === '\"' && !(a instanceof Name))\n return `\"${a}${b.slice(1)}`;\n return;\n}\nfunction strConcat(c1, c2) {\n return c2.emptyStr() ? c1 : c1.emptyStr() ? c2 : str `${c1}${c2}`;\n}\nexports.strConcat = strConcat;\n// TODO do not allow arrays here\nfunction interpolate(x) {\n return typeof x == \"number\" || typeof x == \"boolean\" || x === null\n ? x\n : safeStringify(Array.isArray(x) ? x.join(\",\") : x);\n}\nfunction stringify(x) {\n return new _Code(safeStringify(x));\n}\nexports.stringify = stringify;\nfunction safeStringify(x) {\n return JSON.stringify(x)\n .replace(/\\u2028/g, \"\\\\u2028\")\n .replace(/\\u2029/g, \"\\\\u2029\");\n}\nexports.safeStringify = safeStringify;\nfunction getProperty(key) {\n return typeof key == \"string\" && exports.IDENTIFIER.test(key) ? new _Code(`.${key}`) : _ `[${key}]`;\n}\nexports.getProperty = getProperty;\n//Does best effort to format the name properly\nfunction getEsmExportName(key) {\n if (typeof key == \"string\" && exports.IDENTIFIER.test(key)) {\n return new _Code(`${key}`);\n }\n throw new Error(`CodeGen: invalid export name: ${key}, use explicit $id name mapping`);\n}\nexports.getEsmExportName = getEsmExportName;\nfunction regexpCode(rx) {\n return new _Code(rx.toString());\n}\nexports.regexpCode = regexpCode;\n//# sourceMappingURL=code.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.or = exports.and = exports.not = exports.CodeGen = exports.operators = exports.varKinds = exports.ValueScopeName = exports.ValueScope = exports.Scope = exports.Name = exports.regexpCode = exports.stringify = exports.getProperty = exports.nil = exports.strConcat = exports.str = exports._ = void 0;\nconst code_1 = require(\"./code\");\nconst scope_1 = require(\"./scope\");\nvar code_2 = require(\"./code\");\nObject.defineProperty(exports, \"_\", { enumerable: true, get: function () { return code_2._; } });\nObject.defineProperty(exports, \"str\", { enumerable: true, get: function () { return code_2.str; } });\nObject.defineProperty(exports, \"strConcat\", { enumerable: true, get: function () { return code_2.strConcat; } });\nObject.defineProperty(exports, \"nil\", { enumerable: true, get: function () { return code_2.nil; } });\nObject.defineProperty(exports, \"getProperty\", { enumerable: true, get: function () { return code_2.getProperty; } });\nObject.defineProperty(exports, \"stringify\", { enumerable: true, get: function () { return code_2.stringify; } });\nObject.defineProperty(exports, \"regexpCode\", { enumerable: true, get: function () { return code_2.regexpCode; } });\nObject.defineProperty(exports, \"Name\", { enumerable: true, get: function () { return code_2.Name; } });\nvar scope_2 = require(\"./scope\");\nObject.defineProperty(exports, \"Scope\", { enumerable: true, get: function () { return scope_2.Scope; } });\nObject.defineProperty(exports, \"ValueScope\", { enumerable: true, get: function () { return scope_2.ValueScope; } });\nObject.defineProperty(exports, \"ValueScopeName\", { enumerable: true, get: function () { return scope_2.ValueScopeName; } });\nObject.defineProperty(exports, \"varKinds\", { enumerable: true, get: function () { return scope_2.varKinds; } });\nexports.operators = {\n GT: new code_1._Code(\">\"),\n GTE: new code_1._Code(\">=\"),\n LT: new code_1._Code(\"<\"),\n LTE: new code_1._Code(\"<=\"),\n EQ: new code_1._Code(\"===\"),\n NEQ: new code_1._Code(\"!==\"),\n NOT: new code_1._Code(\"!\"),\n OR: new code_1._Code(\"||\"),\n AND: new code_1._Code(\"&&\"),\n ADD: new code_1._Code(\"+\"),\n};\nclass Node {\n optimizeNodes() {\n return this;\n }\n optimizeNames(_names, _constants) {\n return this;\n }\n}\nclass Def extends Node {\n constructor(varKind, name, rhs) {\n super();\n this.varKind = varKind;\n this.name = name;\n this.rhs = rhs;\n }\n render({ es5, _n }) {\n const varKind = es5 ? scope_1.varKinds.var : this.varKind;\n const rhs = this.rhs === undefined ? \"\" : ` = ${this.rhs}`;\n return `${varKind} ${this.name}${rhs};` + _n;\n }\n optimizeNames(names, constants) {\n if (!names[this.name.str])\n return;\n if (this.rhs)\n this.rhs = optimizeExpr(this.rhs, names, constants);\n return this;\n }\n get names() {\n return this.rhs instanceof code_1._CodeOrName ? this.rhs.names : {};\n }\n}\nclass Assign extends Node {\n constructor(lhs, rhs, sideEffects) {\n super();\n this.lhs = lhs;\n this.rhs = rhs;\n this.sideEffects = sideEffects;\n }\n render({ _n }) {\n return `${this.lhs} = ${this.rhs};` + _n;\n }\n optimizeNames(names, constants) {\n if (this.lhs instanceof code_1.Name && !names[this.lhs.str] && !this.sideEffects)\n return;\n this.rhs = optimizeExpr(this.rhs, names, constants);\n return this;\n }\n get names() {\n const names = this.lhs instanceof code_1.Name ? {} : { ...this.lhs.names };\n return addExprNames(names, this.rhs);\n }\n}\nclass AssignOp extends Assign {\n constructor(lhs, op, rhs, sideEffects) {\n super(lhs, rhs, sideEffects);\n this.op = op;\n }\n render({ _n }) {\n return `${this.lhs} ${this.op}= ${this.rhs};` + _n;\n }\n}\nclass Label extends Node {\n constructor(label) {\n super();\n this.label = label;\n this.names = {};\n }\n render({ _n }) {\n return `${this.label}:` + _n;\n }\n}\nclass Break extends Node {\n constructor(label) {\n super();\n this.label = label;\n this.names = {};\n }\n render({ _n }) {\n const label = this.label ? ` ${this.label}` : \"\";\n return `break${label};` + _n;\n }\n}\nclass Throw extends Node {\n constructor(error) {\n super();\n this.error = error;\n }\n render({ _n }) {\n return `throw ${this.error};` + _n;\n }\n get names() {\n return this.error.names;\n }\n}\nclass AnyCode extends Node {\n constructor(code) {\n super();\n this.code = code;\n }\n render({ _n }) {\n return `${this.code};` + _n;\n }\n optimizeNodes() {\n return `${this.code}` ? this : undefined;\n }\n optimizeNames(names, constants) {\n this.code = optimizeExpr(this.code, names, constants);\n return this;\n }\n get names() {\n return this.code instanceof code_1._CodeOrName ? this.code.names : {};\n }\n}\nclass ParentNode extends Node {\n constructor(nodes = []) {\n super();\n this.nodes = nodes;\n }\n render(opts) {\n return this.nodes.reduce((code, n) => code + n.render(opts), \"\");\n }\n optimizeNodes() {\n const { nodes } = this;\n let i = nodes.length;\n while (i--) {\n const n = nodes[i].optimizeNodes();\n if (Array.isArray(n))\n nodes.splice(i, 1, ...n);\n else if (n)\n nodes[i] = n;\n else\n nodes.splice(i, 1);\n }\n return nodes.length > 0 ? this : undefined;\n }\n optimizeNames(names, constants) {\n const { nodes } = this;\n let i = nodes.length;\n while (i--) {\n // iterating backwards improves 1-pass optimization\n const n = nodes[i];\n if (n.optimizeNames(names, constants))\n continue;\n subtractNames(names, n.names);\n nodes.splice(i, 1);\n }\n return nodes.length > 0 ? this : undefined;\n }\n get names() {\n return this.nodes.reduce((names, n) => addNames(names, n.names), {});\n }\n}\nclass BlockNode extends ParentNode {\n render(opts) {\n return \"{\" + opts._n + super.render(opts) + \"}\" + opts._n;\n }\n}\nclass Root extends ParentNode {\n}\nclass Else extends BlockNode {\n}\nElse.kind = \"else\";\nclass If extends BlockNode {\n constructor(condition, nodes) {\n super(nodes);\n this.condition = condition;\n }\n render(opts) {\n let code = `if(${this.condition})` + super.render(opts);\n if (this.else)\n code += \"else \" + this.else.render(opts);\n return code;\n }\n optimizeNodes() {\n super.optimizeNodes();\n const cond = this.condition;\n if (cond === true)\n return this.nodes; // else is ignored here\n let e = this.else;\n if (e) {\n const ns = e.optimizeNodes();\n e = this.else = Array.isArray(ns) ? new Else(ns) : ns;\n }\n if (e) {\n if (cond === false)\n return e instanceof If ? e : e.nodes;\n if (this.nodes.length)\n return this;\n return new If(not(cond), e instanceof If ? [e] : e.nodes);\n }\n if (cond === false || !this.nodes.length)\n return undefined;\n return this;\n }\n optimizeNames(names, constants) {\n var _a;\n this.else = (_a = this.else) === null || _a === void 0 ? void 0 : _a.optimizeNames(names, constants);\n if (!(super.optimizeNames(names, constants) || this.else))\n return;\n this.condition = optimizeExpr(this.condition, names, constants);\n return this;\n }\n get names() {\n const names = super.names;\n addExprNames(names, this.condition);\n if (this.else)\n addNames(names, this.else.names);\n return names;\n }\n}\nIf.kind = \"if\";\nclass For extends BlockNode {\n}\nFor.kind = \"for\";\nclass ForLoop extends For {\n constructor(iteration) {\n super();\n this.iteration = iteration;\n }\n render(opts) {\n return `for(${this.iteration})` + super.render(opts);\n }\n optimizeNames(names, constants) {\n if (!super.optimizeNames(names, constants))\n return;\n this.iteration = optimizeExpr(this.iteration, names, constants);\n return this;\n }\n get names() {\n return addNames(super.names, this.iteration.names);\n }\n}\nclass ForRange extends For {\n constructor(varKind, name, from, to) {\n super();\n this.varKind = varKind;\n this.name = name;\n this.from = from;\n this.to = to;\n }\n render(opts) {\n const varKind = opts.es5 ? scope_1.varKinds.var : this.varKind;\n const { name, from, to } = this;\n return `for(${varKind} ${name}=${from}; ${name}<${to}; ${name}++)` + super.render(opts);\n }\n get names() {\n const names = addExprNames(super.names, this.from);\n return addExprNames(names, this.to);\n }\n}\nclass ForIter extends For {\n constructor(loop, varKind, name, iterable) {\n super();\n this.loop = loop;\n this.varKind = varKind;\n this.name = name;\n this.iterable = iterable;\n }\n render(opts) {\n return `for(${this.varKind} ${this.name} ${this.loop} ${this.iterable})` + super.render(opts);\n }\n optimizeNames(names, constants) {\n if (!super.optimizeNames(names, constants))\n return;\n this.iterable = optimizeExpr(this.iterable, names, constants);\n return this;\n }\n get names() {\n return addNames(super.names, this.iterable.names);\n }\n}\nclass Func extends BlockNode {\n constructor(name, args, async) {\n super();\n this.name = name;\n this.args = args;\n this.async = async;\n }\n render(opts) {\n const _async = this.async ? \"async \" : \"\";\n return `${_async}function ${this.name}(${this.args})` + super.render(opts);\n }\n}\nFunc.kind = \"func\";\nclass Return extends ParentNode {\n render(opts) {\n return \"return \" + super.render(opts);\n }\n}\nReturn.kind = \"return\";\nclass Try extends BlockNode {\n render(opts) {\n let code = \"try\" + super.render(opts);\n if (this.catch)\n code += this.catch.render(opts);\n if (this.finally)\n code += this.finally.render(opts);\n return code;\n }\n optimizeNodes() {\n var _a, _b;\n super.optimizeNodes();\n (_a = this.catch) === null || _a === void 0 ? void 0 : _a.optimizeNodes();\n (_b = this.finally) === null || _b === void 0 ? void 0 : _b.optimizeNodes();\n return this;\n }\n optimizeNames(names, constants) {\n var _a, _b;\n super.optimizeNames(names, constants);\n (_a = this.catch) === null || _a === void 0 ? void 0 : _a.optimizeNames(names, constants);\n (_b = this.finally) === null || _b === void 0 ? void 0 : _b.optimizeNames(names, constants);\n return this;\n }\n get names() {\n const names = super.names;\n if (this.catch)\n addNames(names, this.catch.names);\n if (this.finally)\n addNames(names, this.finally.names);\n return names;\n }\n}\nclass Catch extends BlockNode {\n constructor(error) {\n super();\n this.error = error;\n }\n render(opts) {\n return `catch(${this.error})` + super.render(opts);\n }\n}\nCatch.kind = \"catch\";\nclass Finally extends BlockNode {\n render(opts) {\n return \"finally\" + super.render(opts);\n }\n}\nFinally.kind = \"finally\";\nclass CodeGen {\n constructor(extScope, opts = {}) {\n this._values = {};\n this._blockStarts = [];\n this._constants = {};\n this.opts = { ...opts, _n: opts.lines ? \"\\n\" : \"\" };\n this._extScope = extScope;\n this._scope = new scope_1.Scope({ parent: extScope });\n this._nodes = [new Root()];\n }\n toString() {\n return this._root.render(this.opts);\n }\n // returns unique name in the internal scope\n name(prefix) {\n return this._scope.name(prefix);\n }\n // reserves unique name in the external scope\n scopeName(prefix) {\n return this._extScope.name(prefix);\n }\n // reserves unique name in the external scope and assigns value to it\n scopeValue(prefixOrName, value) {\n const name = this._extScope.value(prefixOrName, value);\n const vs = this._values[name.prefix] || (this._values[name.prefix] = new Set());\n vs.add(name);\n return name;\n }\n getScopeValue(prefix, keyOrRef) {\n return this._extScope.getValue(prefix, keyOrRef);\n }\n // return code that assigns values in the external scope to the names that are used internally\n // (same names that were returned by gen.scopeName or gen.scopeValue)\n scopeRefs(scopeName) {\n return this._extScope.scopeRefs(scopeName, this._values);\n }\n scopeCode() {\n return this._extScope.scopeCode(this._values);\n }\n _def(varKind, nameOrPrefix, rhs, constant) {\n const name = this._scope.toName(nameOrPrefix);\n if (rhs !== undefined && constant)\n this._constants[name.str] = rhs;\n this._leafNode(new Def(varKind, name, rhs));\n return name;\n }\n // `const` declaration (`var` in es5 mode)\n const(nameOrPrefix, rhs, _constant) {\n return this._def(scope_1.varKinds.const, nameOrPrefix, rhs, _constant);\n }\n // `let` declaration with optional assignment (`var` in es5 mode)\n let(nameOrPrefix, rhs, _constant) {\n return this._def(scope_1.varKinds.let, nameOrPrefix, rhs, _constant);\n }\n // `var` declaration with optional assignment\n var(nameOrPrefix, rhs, _constant) {\n return this._def(scope_1.varKinds.var, nameOrPrefix, rhs, _constant);\n }\n // assignment code\n assign(lhs, rhs, sideEffects) {\n return this._leafNode(new Assign(lhs, rhs, sideEffects));\n }\n // `+=` code\n add(lhs, rhs) {\n return this._leafNode(new AssignOp(lhs, exports.operators.ADD, rhs));\n }\n // appends passed SafeExpr to code or executes Block\n code(c) {\n if (typeof c == \"function\")\n c();\n else if (c !== code_1.nil)\n this._leafNode(new AnyCode(c));\n return this;\n }\n // returns code for object literal for the passed argument list of key-value pairs\n object(...keyValues) {\n const code = [\"{\"];\n for (const [key, value] of keyValues) {\n if (code.length > 1)\n code.push(\",\");\n code.push(key);\n if (key !== value || this.opts.es5) {\n code.push(\":\");\n (0, code_1.addCodeArg)(code, value);\n }\n }\n code.push(\"}\");\n return new code_1._Code(code);\n }\n // `if` clause (or statement if `thenBody` and, optionally, `elseBody` are passed)\n if(condition, thenBody, elseBody) {\n this._blockNode(new If(condition));\n if (thenBody && elseBody) {\n this.code(thenBody).else().code(elseBody).endIf();\n }\n else if (thenBody) {\n this.code(thenBody).endIf();\n }\n else if (elseBody) {\n throw new Error('CodeGen: \"else\" body without \"then\" body');\n }\n return this;\n }\n // `else if` clause - invalid without `if` or after `else` clauses\n elseIf(condition) {\n return this._elseNode(new If(condition));\n }\n // `else` clause - only valid after `if` or `else if` clauses\n else() {\n return this._elseNode(new Else());\n }\n // end `if` statement (needed if gen.if was used only with condition)\n endIf() {\n return this._endBlockNode(If, Else);\n }\n _for(node, forBody) {\n this._blockNode(node);\n if (forBody)\n this.code(forBody).endFor();\n return this;\n }\n // a generic `for` clause (or statement if `forBody` is passed)\n for(iteration, forBody) {\n return this._for(new ForLoop(iteration), forBody);\n }\n // `for` statement for a range of values\n forRange(nameOrPrefix, from, to, forBody, varKind = this.opts.es5 ? scope_1.varKinds.var : scope_1.varKinds.let) {\n const name = this._scope.toName(nameOrPrefix);\n return this._for(new ForRange(varKind, name, from, to), () => forBody(name));\n }\n // `for-of` statement (in es5 mode replace with a normal for loop)\n forOf(nameOrPrefix, iterable, forBody, varKind = scope_1.varKinds.const) {\n const name = this._scope.toName(nameOrPrefix);\n if (this.opts.es5) {\n const arr = iterable instanceof code_1.Name ? iterable : this.var(\"_arr\", iterable);\n return this.forRange(\"_i\", 0, (0, code_1._) `${arr}.length`, (i) => {\n this.var(name, (0, code_1._) `${arr}[${i}]`);\n forBody(name);\n });\n }\n return this._for(new ForIter(\"of\", varKind, name, iterable), () => forBody(name));\n }\n // `for-in` statement.\n // With option `ownProperties` replaced with a `for-of` loop for object keys\n forIn(nameOrPrefix, obj, forBody, varKind = this.opts.es5 ? scope_1.varKinds.var : scope_1.varKinds.const) {\n if (this.opts.ownProperties) {\n return this.forOf(nameOrPrefix, (0, code_1._) `Object.keys(${obj})`, forBody);\n }\n const name = this._scope.toName(nameOrPrefix);\n return this._for(new ForIter(\"in\", varKind, name, obj), () => forBody(name));\n }\n // end `for` loop\n endFor() {\n return this._endBlockNode(For);\n }\n // `label` statement\n label(label) {\n return this._leafNode(new Label(label));\n }\n // `break` statement\n break(label) {\n return this._leafNode(new Break(label));\n }\n // `return` statement\n return(value) {\n const node = new Return();\n this._blockNode(node);\n this.code(value);\n if (node.nodes.length !== 1)\n throw new Error('CodeGen: \"return\" should have one node');\n return this._endBlockNode(Return);\n }\n // `try` statement\n try(tryBody, catchCode, finallyCode) {\n if (!catchCode && !finallyCode)\n throw new Error('CodeGen: \"try\" without \"catch\" and \"finally\"');\n const node = new Try();\n this._blockNode(node);\n this.code(tryBody);\n if (catchCode) {\n const error = this.name(\"e\");\n this._currNode = node.catch = new Catch(error);\n catchCode(error);\n }\n if (finallyCode) {\n this._currNode = node.finally = new Finally();\n this.code(finallyCode);\n }\n return this._endBlockNode(Catch, Finally);\n }\n // `throw` statement\n throw(error) {\n return this._leafNode(new Throw(error));\n }\n // start self-balancing block\n block(body, nodeCount) {\n this._blockStarts.push(this._nodes.length);\n if (body)\n this.code(body).endBlock(nodeCount);\n return this;\n }\n // end the current self-balancing block\n endBlock(nodeCount) {\n const len = this._blockStarts.pop();\n if (len === undefined)\n throw new Error(\"CodeGen: not in self-balancing block\");\n const toClose = this._nodes.length - len;\n if (toClose < 0 || (nodeCount !== undefined && toClose !== nodeCount)) {\n throw new Error(`CodeGen: wrong number of nodes: ${toClose} vs ${nodeCount} expected`);\n }\n this._nodes.length = len;\n return this;\n }\n // `function` heading (or definition if funcBody is passed)\n func(name, args = code_1.nil, async, funcBody) {\n this._blockNode(new Func(name, args, async));\n if (funcBody)\n this.code(funcBody).endFunc();\n return this;\n }\n // end function definition\n endFunc() {\n return this._endBlockNode(Func);\n }\n optimize(n = 1) {\n while (n-- > 0) {\n this._root.optimizeNodes();\n this._root.optimizeNames(this._root.names, this._constants);\n }\n }\n _leafNode(node) {\n this._currNode.nodes.push(node);\n return this;\n }\n _blockNode(node) {\n this._currNode.nodes.push(node);\n this._nodes.push(node);\n }\n _endBlockNode(N1, N2) {\n const n = this._currNode;\n if (n instanceof N1 || (N2 && n instanceof N2)) {\n this._nodes.pop();\n return this;\n }\n throw new Error(`CodeGen: not in block \"${N2 ? `${N1.kind}/${N2.kind}` : N1.kind}\"`);\n }\n _elseNode(node) {\n const n = this._currNode;\n if (!(n instanceof If)) {\n throw new Error('CodeGen: \"else\" without \"if\"');\n }\n this._currNode = n.else = node;\n return this;\n }\n get _root() {\n return this._nodes[0];\n }\n get _currNode() {\n const ns = this._nodes;\n return ns[ns.length - 1];\n }\n set _currNode(node) {\n const ns = this._nodes;\n ns[ns.length - 1] = node;\n }\n}\nexports.CodeGen = CodeGen;\nfunction addNames(names, from) {\n for (const n in from)\n names[n] = (names[n] || 0) + (from[n] || 0);\n return names;\n}\nfunction addExprNames(names, from) {\n return from instanceof code_1._CodeOrName ? addNames(names, from.names) : names;\n}\nfunction optimizeExpr(expr, names, constants) {\n if (expr instanceof code_1.Name)\n return replaceName(expr);\n if (!canOptimize(expr))\n return expr;\n return new code_1._Code(expr._items.reduce((items, c) => {\n if (c instanceof code_1.Name)\n c = replaceName(c);\n if (c instanceof code_1._Code)\n items.push(...c._items);\n else\n items.push(c);\n return items;\n }, []));\n function replaceName(n) {\n const c = constants[n.str];\n if (c === undefined || names[n.str] !== 1)\n return n;\n delete names[n.str];\n return c;\n }\n function canOptimize(e) {\n return (e instanceof code_1._Code &&\n e._items.some((c) => c instanceof code_1.Name && names[c.str] === 1 && constants[c.str] !== undefined));\n }\n}\nfunction subtractNames(names, from) {\n for (const n in from)\n names[n] = (names[n] || 0) - (from[n] || 0);\n}\nfunction not(x) {\n return typeof x == \"boolean\" || typeof x == \"number\" || x === null ? !x : (0, code_1._) `!${par(x)}`;\n}\nexports.not = not;\nconst andCode = mappend(exports.operators.AND);\n// boolean AND (&&) expression with the passed arguments\nfunction and(...args) {\n return args.reduce(andCode);\n}\nexports.and = and;\nconst orCode = mappend(exports.operators.OR);\n// boolean OR (||) expression with the passed arguments\nfunction or(...args) {\n return args.reduce(orCode);\n}\nexports.or = or;\nfunction mappend(op) {\n return (x, y) => (x === code_1.nil ? y : y === code_1.nil ? x : (0, code_1._) `${par(x)} ${op} ${par(y)}`);\n}\nfunction par(x) {\n return x instanceof code_1.Name ? x : (0, code_1._) `(${x})`;\n}\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ValueScope = exports.ValueScopeName = exports.Scope = exports.varKinds = exports.UsedValueState = void 0;\nconst code_1 = require(\"./code\");\nclass ValueError extends Error {\n constructor(name) {\n super(`CodeGen: \"code\" for ${name} not defined`);\n this.value = name.value;\n }\n}\nvar UsedValueState;\n(function (UsedValueState) {\n UsedValueState[UsedValueState[\"Started\"] = 0] = \"Started\";\n UsedValueState[UsedValueState[\"Completed\"] = 1] = \"Completed\";\n})(UsedValueState = exports.UsedValueState || (exports.UsedValueState = {}));\nexports.varKinds = {\n const: new code_1.Name(\"const\"),\n let: new code_1.Name(\"let\"),\n var: new code_1.Name(\"var\"),\n};\nclass Scope {\n constructor({ prefixes, parent } = {}) {\n this._names = {};\n this._prefixes = prefixes;\n this._parent = parent;\n }\n toName(nameOrPrefix) {\n return nameOrPrefix instanceof code_1.Name ? nameOrPrefix : this.name(nameOrPrefix);\n }\n name(prefix) {\n return new code_1.Name(this._newName(prefix));\n }\n _newName(prefix) {\n const ng = this._names[prefix] || this._nameGroup(prefix);\n return `${prefix}${ng.index++}`;\n }\n _nameGroup(prefix) {\n var _a, _b;\n if (((_b = (_a = this._parent) === null || _a === void 0 ? void 0 : _a._prefixes) === null || _b === void 0 ? void 0 : _b.has(prefix)) || (this._prefixes && !this._prefixes.has(prefix))) {\n throw new Error(`CodeGen: prefix \"${prefix}\" is not allowed in this scope`);\n }\n return (this._names[prefix] = { prefix, index: 0 });\n }\n}\nexports.Scope = Scope;\nclass ValueScopeName extends code_1.Name {\n constructor(prefix, nameStr) {\n super(nameStr);\n this.prefix = prefix;\n }\n setValue(value, { property, itemIndex }) {\n this.value = value;\n this.scopePath = (0, code_1._) `.${new code_1.Name(property)}[${itemIndex}]`;\n }\n}\nexports.ValueScopeName = ValueScopeName;\nconst line = (0, code_1._) `\\n`;\nclass ValueScope extends Scope {\n constructor(opts) {\n super(opts);\n this._values = {};\n this._scope = opts.scope;\n this.opts = { ...opts, _n: opts.lines ? line : code_1.nil };\n }\n get() {\n return this._scope;\n }\n name(prefix) {\n return new ValueScopeName(prefix, this._newName(prefix));\n }\n value(nameOrPrefix, value) {\n var _a;\n if (value.ref === undefined)\n throw new Error(\"CodeGen: ref must be passed in value\");\n const name = this.toName(nameOrPrefix);\n const { prefix } = name;\n const valueKey = (_a = value.key) !== null && _a !== void 0 ? _a : value.ref;\n let vs = this._values[prefix];\n if (vs) {\n const _name = vs.get(valueKey);\n if (_name)\n return _name;\n }\n else {\n vs = this._values[prefix] = new Map();\n }\n vs.set(valueKey, name);\n const s = this._scope[prefix] || (this._scope[prefix] = []);\n const itemIndex = s.length;\n s[itemIndex] = value.ref;\n name.setValue(value, { property: prefix, itemIndex });\n return name;\n }\n getValue(prefix, keyOrRef) {\n const vs = this._values[prefix];\n if (!vs)\n return;\n return vs.get(keyOrRef);\n }\n scopeRefs(scopeName, values = this._values) {\n return this._reduceValues(values, (name) => {\n if (name.scopePath === undefined)\n throw new Error(`CodeGen: name \"${name}\" has no value`);\n return (0, code_1._) `${scopeName}${name.scopePath}`;\n });\n }\n scopeCode(values = this._values, usedValues, getCode) {\n return this._reduceValues(values, (name) => {\n if (name.value === undefined)\n throw new Error(`CodeGen: name \"${name}\" has no value`);\n return name.value.code;\n }, usedValues, getCode);\n }\n _reduceValues(values, valueCode, usedValues = {}, getCode) {\n let code = code_1.nil;\n for (const prefix in values) {\n const vs = values[prefix];\n if (!vs)\n continue;\n const nameSet = (usedValues[prefix] = usedValues[prefix] || new Map());\n vs.forEach((name) => {\n if (nameSet.has(name))\n return;\n nameSet.set(name, UsedValueState.Started);\n let c = valueCode(name);\n if (c) {\n const def = this.opts.es5 ? exports.varKinds.var : exports.varKinds.const;\n code = (0, code_1._) `${code}${def} ${name} = ${c};${this.opts._n}`;\n }\n else if ((c = getCode === null || getCode === void 0 ? void 0 : getCode(name))) {\n code = (0, code_1._) `${code}${c}${this.opts._n}`;\n }\n else {\n throw new ValueError(name);\n }\n nameSet.set(name, UsedValueState.Completed);\n });\n }\n return code;\n }\n}\nexports.ValueScope = ValueScope;\n//# sourceMappingURL=scope.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.extendErrors = exports.resetErrorsCount = exports.reportExtraError = exports.reportError = exports.keyword$DataError = exports.keywordError = void 0;\nconst codegen_1 = require(\"./codegen\");\nconst util_1 = require(\"./util\");\nconst names_1 = require(\"./names\");\nexports.keywordError = {\n message: ({ keyword }) => (0, codegen_1.str) `must pass \"${keyword}\" keyword validation`,\n};\nexports.keyword$DataError = {\n message: ({ keyword, schemaType }) => schemaType\n ? (0, codegen_1.str) `\"${keyword}\" keyword must be ${schemaType} ($data)`\n : (0, codegen_1.str) `\"${keyword}\" keyword is invalid ($data)`,\n};\nfunction reportError(cxt, error = exports.keywordError, errorPaths, overrideAllErrors) {\n const { it } = cxt;\n const { gen, compositeRule, allErrors } = it;\n const errObj = errorObjectCode(cxt, error, errorPaths);\n if (overrideAllErrors !== null && overrideAllErrors !== void 0 ? overrideAllErrors : (compositeRule || allErrors)) {\n addError(gen, errObj);\n }\n else {\n returnErrors(it, (0, codegen_1._) `[${errObj}]`);\n }\n}\nexports.reportError = reportError;\nfunction reportExtraError(cxt, error = exports.keywordError, errorPaths) {\n const { it } = cxt;\n const { gen, compositeRule, allErrors } = it;\n const errObj = errorObjectCode(cxt, error, errorPaths);\n addError(gen, errObj);\n if (!(compositeRule || allErrors)) {\n returnErrors(it, names_1.default.vErrors);\n }\n}\nexports.reportExtraError = reportExtraError;\nfunction resetErrorsCount(gen, errsCount) {\n gen.assign(names_1.default.errors, errsCount);\n gen.if((0, codegen_1._) `${names_1.default.vErrors} !== null`, () => gen.if(errsCount, () => gen.assign((0, codegen_1._) `${names_1.default.vErrors}.length`, errsCount), () => gen.assign(names_1.default.vErrors, null)));\n}\nexports.resetErrorsCount = resetErrorsCount;\nfunction extendErrors({ gen, keyword, schemaValue, data, errsCount, it, }) {\n /* istanbul ignore if */\n if (errsCount === undefined)\n throw new Error(\"ajv implementation error\");\n const err = gen.name(\"err\");\n gen.forRange(\"i\", errsCount, names_1.default.errors, (i) => {\n gen.const(err, (0, codegen_1._) `${names_1.default.vErrors}[${i}]`);\n gen.if((0, codegen_1._) `${err}.instancePath === undefined`, () => gen.assign((0, codegen_1._) `${err}.instancePath`, (0, codegen_1.strConcat)(names_1.default.instancePath, it.errorPath)));\n gen.assign((0, codegen_1._) `${err}.schemaPath`, (0, codegen_1.str) `${it.errSchemaPath}/${keyword}`);\n if (it.opts.verbose) {\n gen.assign((0, codegen_1._) `${err}.schema`, schemaValue);\n gen.assign((0, codegen_1._) `${err}.data`, data);\n }\n });\n}\nexports.extendErrors = extendErrors;\nfunction addError(gen, errObj) {\n const err = gen.const(\"err\", errObj);\n gen.if((0, codegen_1._) `${names_1.default.vErrors} === null`, () => gen.assign(names_1.default.vErrors, (0, codegen_1._) `[${err}]`), (0, codegen_1._) `${names_1.default.vErrors}.push(${err})`);\n gen.code((0, codegen_1._) `${names_1.default.errors}++`);\n}\nfunction returnErrors(it, errs) {\n const { gen, validateName, schemaEnv } = it;\n if (schemaEnv.$async) {\n gen.throw((0, codegen_1._) `new ${it.ValidationError}(${errs})`);\n }\n else {\n gen.assign((0, codegen_1._) `${validateName}.errors`, errs);\n gen.return(false);\n }\n}\nconst E = {\n keyword: new codegen_1.Name(\"keyword\"),\n schemaPath: new codegen_1.Name(\"schemaPath\"),\n params: new codegen_1.Name(\"params\"),\n propertyName: new codegen_1.Name(\"propertyName\"),\n message: new codegen_1.Name(\"message\"),\n schema: new codegen_1.Name(\"schema\"),\n parentSchema: new codegen_1.Name(\"parentSchema\"),\n};\nfunction errorObjectCode(cxt, error, errorPaths) {\n const { createErrors } = cxt.it;\n if (createErrors === false)\n return (0, codegen_1._) `{}`;\n return errorObject(cxt, error, errorPaths);\n}\nfunction errorObject(cxt, error, errorPaths = {}) {\n const { gen, it } = cxt;\n const keyValues = [\n errorInstancePath(it, errorPaths),\n errorSchemaPath(cxt, errorPaths),\n ];\n extraErrorProps(cxt, error, keyValues);\n return gen.object(...keyValues);\n}\nfunction errorInstancePath({ errorPath }, { instancePath }) {\n const instPath = instancePath\n ? (0, codegen_1.str) `${errorPath}${(0, util_1.getErrorPath)(instancePath, util_1.Type.Str)}`\n : errorPath;\n return [names_1.default.instancePath, (0, codegen_1.strConcat)(names_1.default.instancePath, instPath)];\n}\nfunction errorSchemaPath({ keyword, it: { errSchemaPath } }, { schemaPath, parentSchema }) {\n let schPath = parentSchema ? errSchemaPath : (0, codegen_1.str) `${errSchemaPath}/${keyword}`;\n if (schemaPath) {\n schPath = (0, codegen_1.str) `${schPath}${(0, util_1.getErrorPath)(schemaPath, util_1.Type.Str)}`;\n }\n return [E.schemaPath, schPath];\n}\nfunction extraErrorProps(cxt, { params, message }, keyValues) {\n const { keyword, data, schemaValue, it } = cxt;\n const { opts, propertyName, topSchemaRef, schemaPath } = it;\n keyValues.push([E.keyword, keyword], [E.params, typeof params == \"function\" ? params(cxt) : params || (0, codegen_1._) `{}`]);\n if (opts.messages) {\n keyValues.push([E.message, typeof message == \"function\" ? message(cxt) : message]);\n }\n if (opts.verbose) {\n keyValues.push([E.schema, schemaValue], [E.parentSchema, (0, codegen_1._) `${topSchemaRef}${schemaPath}`], [names_1.default.data, data]);\n }\n if (propertyName)\n keyValues.push([E.propertyName, propertyName]);\n}\n//# sourceMappingURL=errors.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.resolveSchema = exports.getCompilingSchema = exports.resolveRef = exports.compileSchema = exports.SchemaEnv = void 0;\nconst codegen_1 = require(\"./codegen\");\nconst validation_error_1 = require(\"../runtime/validation_error\");\nconst names_1 = require(\"./names\");\nconst resolve_1 = require(\"./resolve\");\nconst util_1 = require(\"./util\");\nconst validate_1 = require(\"./validate\");\nclass SchemaEnv {\n constructor(env) {\n var _a;\n this.refs = {};\n this.dynamicAnchors = {};\n let schema;\n if (typeof env.schema == \"object\")\n schema = env.schema;\n this.schema = env.schema;\n this.schemaId = env.schemaId;\n this.root = env.root || this;\n this.baseId = (_a = env.baseId) !== null && _a !== void 0 ? _a : (0, resolve_1.normalizeId)(schema === null || schema === void 0 ? void 0 : schema[env.schemaId || \"$id\"]);\n this.schemaPath = env.schemaPath;\n this.localRefs = env.localRefs;\n this.meta = env.meta;\n this.$async = schema === null || schema === void 0 ? void 0 : schema.$async;\n this.refs = {};\n }\n}\nexports.SchemaEnv = SchemaEnv;\n// let codeSize = 0\n// let nodeCount = 0\n// Compiles schema in SchemaEnv\nfunction compileSchema(sch) {\n // TODO refactor - remove compilations\n const _sch = getCompilingSchema.call(this, sch);\n if (_sch)\n return _sch;\n const rootId = (0, resolve_1.getFullPath)(this.opts.uriResolver, sch.root.baseId); // TODO if getFullPath removed 1 tests fails\n const { es5, lines } = this.opts.code;\n const { ownProperties } = this.opts;\n const gen = new codegen_1.CodeGen(this.scope, { es5, lines, ownProperties });\n let _ValidationError;\n if (sch.$async) {\n _ValidationError = gen.scopeValue(\"Error\", {\n ref: validation_error_1.default,\n code: (0, codegen_1._) `require(\"ajv/dist/runtime/validation_error\").default`,\n });\n }\n const validateName = gen.scopeName(\"validate\");\n sch.validateName = validateName;\n const schemaCxt = {\n gen,\n allErrors: this.opts.allErrors,\n data: names_1.default.data,\n parentData: names_1.default.parentData,\n parentDataProperty: names_1.default.parentDataProperty,\n dataNames: [names_1.default.data],\n dataPathArr: [codegen_1.nil],\n dataLevel: 0,\n dataTypes: [],\n definedProperties: new Set(),\n topSchemaRef: gen.scopeValue(\"schema\", this.opts.code.source === true\n ? { ref: sch.schema, code: (0, codegen_1.stringify)(sch.schema) }\n : { ref: sch.schema }),\n validateName,\n ValidationError: _ValidationError,\n schema: sch.schema,\n schemaEnv: sch,\n rootId,\n baseId: sch.baseId || rootId,\n schemaPath: codegen_1.nil,\n errSchemaPath: sch.schemaPath || (this.opts.jtd ? \"\" : \"#\"),\n errorPath: (0, codegen_1._) `\"\"`,\n opts: this.opts,\n self: this,\n };\n let sourceCode;\n try {\n this._compilations.add(sch);\n (0, validate_1.validateFunctionCode)(schemaCxt);\n gen.optimize(this.opts.code.optimize);\n // gen.optimize(1)\n const validateCode = gen.toString();\n sourceCode = `${gen.scopeRefs(names_1.default.scope)}return ${validateCode}`;\n // console.log((codeSize += sourceCode.length), (nodeCount += gen.nodeCount))\n if (this.opts.code.process)\n sourceCode = this.opts.code.process(sourceCode, sch);\n // console.log(\"\\n\\n\\n *** \\n\", sourceCode)\n const makeValidate = new Function(`${names_1.default.self}`, `${names_1.default.scope}`, sourceCode);\n const validate = makeValidate(this, this.scope.get());\n this.scope.value(validateName, { ref: validate });\n validate.errors = null;\n validate.schema = sch.schema;\n validate.schemaEnv = sch;\n if (sch.$async)\n validate.$async = true;\n if (this.opts.code.source === true) {\n validate.source = { validateName, validateCode, scopeValues: gen._values };\n }\n if (this.opts.unevaluated) {\n const { props, items } = schemaCxt;\n validate.evaluated = {\n props: props instanceof codegen_1.Name ? undefined : props,\n items: items instanceof codegen_1.Name ? undefined : items,\n dynamicProps: props instanceof codegen_1.Name,\n dynamicItems: items instanceof codegen_1.Name,\n };\n if (validate.source)\n validate.source.evaluated = (0, codegen_1.stringify)(validate.evaluated);\n }\n sch.validate = validate;\n return sch;\n }\n catch (e) {\n delete sch.validate;\n delete sch.validateName;\n if (sourceCode)\n this.logger.error(\"Error compiling schema, function code:\", sourceCode);\n // console.log(\"\\n\\n\\n *** \\n\", sourceCode, this.opts)\n throw e;\n }\n finally {\n this._compilations.delete(sch);\n }\n}\nexports.compileSchema = compileSchema;\nfunction resolveRef(root, baseId, ref) {\n var _a;\n ref = (0, resolve_1.resolveUrl)(this.opts.uriResolver, baseId, ref);\n const schOrFunc = root.refs[ref];\n if (schOrFunc)\n return schOrFunc;\n let _sch = resolve.call(this, root, ref);\n if (_sch === undefined) {\n const schema = (_a = root.localRefs) === null || _a === void 0 ? void 0 : _a[ref]; // TODO maybe localRefs should hold SchemaEnv\n const { schemaId } = this.opts;\n if (schema)\n _sch = new SchemaEnv({ schema, schemaId, root, baseId });\n }\n if (_sch === undefined)\n return;\n return (root.refs[ref] = inlineOrCompile.call(this, _sch));\n}\nexports.resolveRef = resolveRef;\nfunction inlineOrCompile(sch) {\n if ((0, resolve_1.inlineRef)(sch.schema, this.opts.inlineRefs))\n return sch.schema;\n return sch.validate ? sch : compileSchema.call(this, sch);\n}\n// Index of schema compilation in the currently compiled list\nfunction getCompilingSchema(schEnv) {\n for (const sch of this._compilations) {\n if (sameSchemaEnv(sch, schEnv))\n return sch;\n }\n}\nexports.getCompilingSchema = getCompilingSchema;\nfunction sameSchemaEnv(s1, s2) {\n return s1.schema === s2.schema && s1.root === s2.root && s1.baseId === s2.baseId;\n}\n// resolve and compile the references ($ref)\n// TODO returns AnySchemaObject (if the schema can be inlined) or validation function\nfunction resolve(root, // information about the root schema for the current schema\nref // reference to resolve\n) {\n let sch;\n while (typeof (sch = this.refs[ref]) == \"string\")\n ref = sch;\n return sch || this.schemas[ref] || resolveSchema.call(this, root, ref);\n}\n// Resolve schema, its root and baseId\nfunction resolveSchema(root, // root object with properties schema, refs TODO below SchemaEnv is assigned to it\nref // reference to resolve\n) {\n const p = this.opts.uriResolver.parse(ref);\n const refPath = (0, resolve_1._getFullPath)(this.opts.uriResolver, p);\n let baseId = (0, resolve_1.getFullPath)(this.opts.uriResolver, root.baseId, undefined);\n // TODO `Object.keys(root.schema).length > 0` should not be needed - but removing breaks 2 tests\n if (Object.keys(root.schema).length > 0 && refPath === baseId) {\n return getJsonPointer.call(this, p, root);\n }\n const id = (0, resolve_1.normalizeId)(refPath);\n const schOrRef = this.refs[id] || this.schemas[id];\n if (typeof schOrRef == \"string\") {\n const sch = resolveSchema.call(this, root, schOrRef);\n if (typeof (sch === null || sch === void 0 ? void 0 : sch.schema) !== \"object\")\n return;\n return getJsonPointer.call(this, p, sch);\n }\n if (typeof (schOrRef === null || schOrRef === void 0 ? void 0 : schOrRef.schema) !== \"object\")\n return;\n if (!schOrRef.validate)\n compileSchema.call(this, schOrRef);\n if (id === (0, resolve_1.normalizeId)(ref)) {\n const { schema } = schOrRef;\n const { schemaId } = this.opts;\n const schId = schema[schemaId];\n if (schId)\n baseId = (0, resolve_1.resolveUrl)(this.opts.uriResolver, baseId, schId);\n return new SchemaEnv({ schema, schemaId, root, baseId });\n }\n return getJsonPointer.call(this, p, schOrRef);\n}\nexports.resolveSchema = resolveSchema;\nconst PREVENT_SCOPE_CHANGE = new Set([\n \"properties\",\n \"patternProperties\",\n \"enum\",\n \"dependencies\",\n \"definitions\",\n]);\nfunction getJsonPointer(parsedRef, { baseId, schema, root }) {\n var _a;\n if (((_a = parsedRef.fragment) === null || _a === void 0 ? void 0 : _a[0]) !== \"/\")\n return;\n for (const part of parsedRef.fragment.slice(1).split(\"/\")) {\n if (typeof schema === \"boolean\")\n return;\n const partSchema = schema[(0, util_1.unescapeFragment)(part)];\n if (partSchema === undefined)\n return;\n schema = partSchema;\n // TODO PREVENT_SCOPE_CHANGE could be defined in keyword def?\n const schId = typeof schema === \"object\" && schema[this.opts.schemaId];\n if (!PREVENT_SCOPE_CHANGE.has(part) && schId) {\n baseId = (0, resolve_1.resolveUrl)(this.opts.uriResolver, baseId, schId);\n }\n }\n let env;\n if (typeof schema != \"boolean\" && schema.$ref && !(0, util_1.schemaHasRulesButRef)(schema, this.RULES)) {\n const $ref = (0, resolve_1.resolveUrl)(this.opts.uriResolver, baseId, schema.$ref);\n env = resolveSchema.call(this, root, $ref);\n }\n // even though resolution failed we need to return SchemaEnv to throw exception\n // so that compileAsync loads missing schema.\n const { schemaId } = this.opts;\n env = env || new SchemaEnv({ schema, schemaId, root, baseId });\n if (env.schema !== env.root.schema)\n return env;\n return undefined;\n}\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"./codegen\");\nconst names = {\n // validation function arguments\n data: new codegen_1.Name(\"data\"),\n // args passed from referencing schema\n valCxt: new codegen_1.Name(\"valCxt\"),\n instancePath: new codegen_1.Name(\"instancePath\"),\n parentData: new codegen_1.Name(\"parentData\"),\n parentDataProperty: new codegen_1.Name(\"parentDataProperty\"),\n rootData: new codegen_1.Name(\"rootData\"),\n dynamicAnchors: new codegen_1.Name(\"dynamicAnchors\"),\n // function scoped variables\n vErrors: new codegen_1.Name(\"vErrors\"),\n errors: new codegen_1.Name(\"errors\"),\n this: new codegen_1.Name(\"this\"),\n // \"globals\"\n self: new codegen_1.Name(\"self\"),\n scope: new codegen_1.Name(\"scope\"),\n // JTD serialize/parse name for JSON string and position\n json: new codegen_1.Name(\"json\"),\n jsonPos: new codegen_1.Name(\"jsonPos\"),\n jsonLen: new codegen_1.Name(\"jsonLen\"),\n jsonPart: new codegen_1.Name(\"jsonPart\"),\n};\nexports.default = names;\n//# sourceMappingURL=names.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst resolve_1 = require(\"./resolve\");\nclass MissingRefError extends Error {\n constructor(resolver, baseId, ref, msg) {\n super(msg || `can't resolve reference ${ref} from id ${baseId}`);\n this.missingRef = (0, resolve_1.resolveUrl)(resolver, baseId, ref);\n this.missingSchema = (0, resolve_1.normalizeId)((0, resolve_1.getFullPath)(resolver, this.missingRef));\n }\n}\nexports.default = MissingRefError;\n//# sourceMappingURL=ref_error.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getSchemaRefs = exports.resolveUrl = exports.normalizeId = exports._getFullPath = exports.getFullPath = exports.inlineRef = void 0;\nconst util_1 = require(\"./util\");\nconst equal = require(\"fast-deep-equal\");\nconst traverse = require(\"json-schema-traverse\");\n// TODO refactor to use keyword definitions\nconst SIMPLE_INLINED = new Set([\n \"type\",\n \"format\",\n \"pattern\",\n \"maxLength\",\n \"minLength\",\n \"maxProperties\",\n \"minProperties\",\n \"maxItems\",\n \"minItems\",\n \"maximum\",\n \"minimum\",\n \"uniqueItems\",\n \"multipleOf\",\n \"required\",\n \"enum\",\n \"const\",\n]);\nfunction inlineRef(schema, limit = true) {\n if (typeof schema == \"boolean\")\n return true;\n if (limit === true)\n return !hasRef(schema);\n if (!limit)\n return false;\n return countKeys(schema) <= limit;\n}\nexports.inlineRef = inlineRef;\nconst REF_KEYWORDS = new Set([\n \"$ref\",\n \"$recursiveRef\",\n \"$recursiveAnchor\",\n \"$dynamicRef\",\n \"$dynamicAnchor\",\n]);\nfunction hasRef(schema) {\n for (const key in schema) {\n if (REF_KEYWORDS.has(key))\n return true;\n const sch = schema[key];\n if (Array.isArray(sch) && sch.some(hasRef))\n return true;\n if (typeof sch == \"object\" && hasRef(sch))\n return true;\n }\n return false;\n}\nfunction countKeys(schema) {\n let count = 0;\n for (const key in schema) {\n if (key === \"$ref\")\n return Infinity;\n count++;\n if (SIMPLE_INLINED.has(key))\n continue;\n if (typeof schema[key] == \"object\") {\n (0, util_1.eachItem)(schema[key], (sch) => (count += countKeys(sch)));\n }\n if (count === Infinity)\n return Infinity;\n }\n return count;\n}\nfunction getFullPath(resolver, id = \"\", normalize) {\n if (normalize !== false)\n id = normalizeId(id);\n const p = resolver.parse(id);\n return _getFullPath(resolver, p);\n}\nexports.getFullPath = getFullPath;\nfunction _getFullPath(resolver, p) {\n const serialized = resolver.serialize(p);\n return serialized.split(\"#\")[0] + \"#\";\n}\nexports._getFullPath = _getFullPath;\nconst TRAILING_SLASH_HASH = /#\\/?$/;\nfunction normalizeId(id) {\n return id ? id.replace(TRAILING_SLASH_HASH, \"\") : \"\";\n}\nexports.normalizeId = normalizeId;\nfunction resolveUrl(resolver, baseId, id) {\n id = normalizeId(id);\n return resolver.resolve(baseId, id);\n}\nexports.resolveUrl = resolveUrl;\nconst ANCHOR = /^[a-z_][-a-z0-9._]*$/i;\nfunction getSchemaRefs(schema, baseId) {\n if (typeof schema == \"boolean\")\n return {};\n const { schemaId, uriResolver } = this.opts;\n const schId = normalizeId(schema[schemaId] || baseId);\n const baseIds = { \"\": schId };\n const pathPrefix = getFullPath(uriResolver, schId, false);\n const localRefs = {};\n const schemaRefs = new Set();\n traverse(schema, { allKeys: true }, (sch, jsonPtr, _, parentJsonPtr) => {\n if (parentJsonPtr === undefined)\n return;\n const fullPath = pathPrefix + jsonPtr;\n let baseId = baseIds[parentJsonPtr];\n if (typeof sch[schemaId] == \"string\")\n baseId = addRef.call(this, sch[schemaId]);\n addAnchor.call(this, sch.$anchor);\n addAnchor.call(this, sch.$dynamicAnchor);\n baseIds[jsonPtr] = baseId;\n function addRef(ref) {\n // eslint-disable-next-line @typescript-eslint/unbound-method\n const _resolve = this.opts.uriResolver.resolve;\n ref = normalizeId(baseId ? _resolve(baseId, ref) : ref);\n if (schemaRefs.has(ref))\n throw ambiguos(ref);\n schemaRefs.add(ref);\n let schOrRef = this.refs[ref];\n if (typeof schOrRef == \"string\")\n schOrRef = this.refs[schOrRef];\n if (typeof schOrRef == \"object\") {\n checkAmbiguosRef(sch, schOrRef.schema, ref);\n }\n else if (ref !== normalizeId(fullPath)) {\n if (ref[0] === \"#\") {\n checkAmbiguosRef(sch, localRefs[ref], ref);\n localRefs[ref] = sch;\n }\n else {\n this.refs[ref] = fullPath;\n }\n }\n return ref;\n }\n function addAnchor(anchor) {\n if (typeof anchor == \"string\") {\n if (!ANCHOR.test(anchor))\n throw new Error(`invalid anchor \"${anchor}\"`);\n addRef.call(this, `#${anchor}`);\n }\n }\n });\n return localRefs;\n function checkAmbiguosRef(sch1, sch2, ref) {\n if (sch2 !== undefined && !equal(sch1, sch2))\n throw ambiguos(ref);\n }\n function ambiguos(ref) {\n return new Error(`reference \"${ref}\" resolves to more than one schema`);\n }\n}\nexports.getSchemaRefs = getSchemaRefs;\n//# sourceMappingURL=resolve.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getRules = exports.isJSONType = void 0;\nconst _jsonTypes = [\"string\", \"number\", \"integer\", \"boolean\", \"null\", \"object\", \"array\"];\nconst jsonTypes = new Set(_jsonTypes);\nfunction isJSONType(x) {\n return typeof x == \"string\" && jsonTypes.has(x);\n}\nexports.isJSONType = isJSONType;\nfunction getRules() {\n const groups = {\n number: { type: \"number\", rules: [] },\n string: { type: \"string\", rules: [] },\n array: { type: \"array\", rules: [] },\n object: { type: \"object\", rules: [] },\n };\n return {\n types: { ...groups, integer: true, boolean: true, null: true },\n rules: [{ rules: [] }, groups.number, groups.string, groups.array, groups.object],\n post: { rules: [] },\n all: {},\n keywords: {},\n };\n}\nexports.getRules = getRules;\n//# sourceMappingURL=rules.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.checkStrictMode = exports.getErrorPath = exports.Type = exports.useFunc = exports.setEvaluated = exports.evaluatedPropsToName = exports.mergeEvaluated = exports.eachItem = exports.unescapeJsonPointer = exports.escapeJsonPointer = exports.escapeFragment = exports.unescapeFragment = exports.schemaRefOrVal = exports.schemaHasRulesButRef = exports.schemaHasRules = exports.checkUnknownRules = exports.alwaysValidSchema = exports.toHash = void 0;\nconst codegen_1 = require(\"./codegen\");\nconst code_1 = require(\"./codegen/code\");\n// TODO refactor to use Set\nfunction toHash(arr) {\n const hash = {};\n for (const item of arr)\n hash[item] = true;\n return hash;\n}\nexports.toHash = toHash;\nfunction alwaysValidSchema(it, schema) {\n if (typeof schema == \"boolean\")\n return schema;\n if (Object.keys(schema).length === 0)\n return true;\n checkUnknownRules(it, schema);\n return !schemaHasRules(schema, it.self.RULES.all);\n}\nexports.alwaysValidSchema = alwaysValidSchema;\nfunction checkUnknownRules(it, schema = it.schema) {\n const { opts, self } = it;\n if (!opts.strictSchema)\n return;\n if (typeof schema === \"boolean\")\n return;\n const rules = self.RULES.keywords;\n for (const key in schema) {\n if (!rules[key])\n checkStrictMode(it, `unknown keyword: \"${key}\"`);\n }\n}\nexports.checkUnknownRules = checkUnknownRules;\nfunction schemaHasRules(schema, rules) {\n if (typeof schema == \"boolean\")\n return !schema;\n for (const key in schema)\n if (rules[key])\n return true;\n return false;\n}\nexports.schemaHasRules = schemaHasRules;\nfunction schemaHasRulesButRef(schema, RULES) {\n if (typeof schema == \"boolean\")\n return !schema;\n for (const key in schema)\n if (key !== \"$ref\" && RULES.all[key])\n return true;\n return false;\n}\nexports.schemaHasRulesButRef = schemaHasRulesButRef;\nfunction schemaRefOrVal({ topSchemaRef, schemaPath }, schema, keyword, $data) {\n if (!$data) {\n if (typeof schema == \"number\" || typeof schema == \"boolean\")\n return schema;\n if (typeof schema == \"string\")\n return (0, codegen_1._) `${schema}`;\n }\n return (0, codegen_1._) `${topSchemaRef}${schemaPath}${(0, codegen_1.getProperty)(keyword)}`;\n}\nexports.schemaRefOrVal = schemaRefOrVal;\nfunction unescapeFragment(str) {\n return unescapeJsonPointer(decodeURIComponent(str));\n}\nexports.unescapeFragment = unescapeFragment;\nfunction escapeFragment(str) {\n return encodeURIComponent(escapeJsonPointer(str));\n}\nexports.escapeFragment = escapeFragment;\nfunction escapeJsonPointer(str) {\n if (typeof str == \"number\")\n return `${str}`;\n return str.replace(/~/g, \"~0\").replace(/\\//g, \"~1\");\n}\nexports.escapeJsonPointer = escapeJsonPointer;\nfunction unescapeJsonPointer(str) {\n return str.replace(/~1/g, \"/\").replace(/~0/g, \"~\");\n}\nexports.unescapeJsonPointer = unescapeJsonPointer;\nfunction eachItem(xs, f) {\n if (Array.isArray(xs)) {\n for (const x of xs)\n f(x);\n }\n else {\n f(xs);\n }\n}\nexports.eachItem = eachItem;\nfunction makeMergeEvaluated({ mergeNames, mergeToName, mergeValues, resultToName, }) {\n return (gen, from, to, toName) => {\n const res = to === undefined\n ? from\n : to instanceof codegen_1.Name\n ? (from instanceof codegen_1.Name ? mergeNames(gen, from, to) : mergeToName(gen, from, to), to)\n : from instanceof codegen_1.Name\n ? (mergeToName(gen, to, from), from)\n : mergeValues(from, to);\n return toName === codegen_1.Name && !(res instanceof codegen_1.Name) ? resultToName(gen, res) : res;\n };\n}\nexports.mergeEvaluated = {\n props: makeMergeEvaluated({\n mergeNames: (gen, from, to) => gen.if((0, codegen_1._) `${to} !== true && ${from} !== undefined`, () => {\n gen.if((0, codegen_1._) `${from} === true`, () => gen.assign(to, true), () => gen.assign(to, (0, codegen_1._) `${to} || {}`).code((0, codegen_1._) `Object.assign(${to}, ${from})`));\n }),\n mergeToName: (gen, from, to) => gen.if((0, codegen_1._) `${to} !== true`, () => {\n if (from === true) {\n gen.assign(to, true);\n }\n else {\n gen.assign(to, (0, codegen_1._) `${to} || {}`);\n setEvaluated(gen, to, from);\n }\n }),\n mergeValues: (from, to) => (from === true ? true : { ...from, ...to }),\n resultToName: evaluatedPropsToName,\n }),\n items: makeMergeEvaluated({\n mergeNames: (gen, from, to) => gen.if((0, codegen_1._) `${to} !== true && ${from} !== undefined`, () => gen.assign(to, (0, codegen_1._) `${from} === true ? true : ${to} > ${from} ? ${to} : ${from}`)),\n mergeToName: (gen, from, to) => gen.if((0, codegen_1._) `${to} !== true`, () => gen.assign(to, from === true ? true : (0, codegen_1._) `${to} > ${from} ? ${to} : ${from}`)),\n mergeValues: (from, to) => (from === true ? true : Math.max(from, to)),\n resultToName: (gen, items) => gen.var(\"items\", items),\n }),\n};\nfunction evaluatedPropsToName(gen, ps) {\n if (ps === true)\n return gen.var(\"props\", true);\n const props = gen.var(\"props\", (0, codegen_1._) `{}`);\n if (ps !== undefined)\n setEvaluated(gen, props, ps);\n return props;\n}\nexports.evaluatedPropsToName = evaluatedPropsToName;\nfunction setEvaluated(gen, props, ps) {\n Object.keys(ps).forEach((p) => gen.assign((0, codegen_1._) `${props}${(0, codegen_1.getProperty)(p)}`, true));\n}\nexports.setEvaluated = setEvaluated;\nconst snippets = {};\nfunction useFunc(gen, f) {\n return gen.scopeValue(\"func\", {\n ref: f,\n code: snippets[f.code] || (snippets[f.code] = new code_1._Code(f.code)),\n });\n}\nexports.useFunc = useFunc;\nvar Type;\n(function (Type) {\n Type[Type[\"Num\"] = 0] = \"Num\";\n Type[Type[\"Str\"] = 1] = \"Str\";\n})(Type = exports.Type || (exports.Type = {}));\nfunction getErrorPath(dataProp, dataPropType, jsPropertySyntax) {\n // let path\n if (dataProp instanceof codegen_1.Name) {\n const isNumber = dataPropType === Type.Num;\n return jsPropertySyntax\n ? isNumber\n ? (0, codegen_1._) `\"[\" + ${dataProp} + \"]\"`\n : (0, codegen_1._) `\"['\" + ${dataProp} + \"']\"`\n : isNumber\n ? (0, codegen_1._) `\"/\" + ${dataProp}`\n : (0, codegen_1._) `\"/\" + ${dataProp}.replace(/~/g, \"~0\").replace(/\\\\//g, \"~1\")`; // TODO maybe use global escapePointer\n }\n return jsPropertySyntax ? (0, codegen_1.getProperty)(dataProp).toString() : \"/\" + escapeJsonPointer(dataProp);\n}\nexports.getErrorPath = getErrorPath;\nfunction checkStrictMode(it, msg, mode = it.opts.strictSchema) {\n if (!mode)\n return;\n msg = `strict mode: ${msg}`;\n if (mode === true)\n throw new Error(msg);\n it.self.logger.warn(msg);\n}\nexports.checkStrictMode = checkStrictMode;\n//# sourceMappingURL=util.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.shouldUseRule = exports.shouldUseGroup = exports.schemaHasRulesForType = void 0;\nfunction schemaHasRulesForType({ schema, self }, type) {\n const group = self.RULES.types[type];\n return group && group !== true && shouldUseGroup(schema, group);\n}\nexports.schemaHasRulesForType = schemaHasRulesForType;\nfunction shouldUseGroup(schema, group) {\n return group.rules.some((rule) => shouldUseRule(schema, rule));\n}\nexports.shouldUseGroup = shouldUseGroup;\nfunction shouldUseRule(schema, rule) {\n var _a;\n return (schema[rule.keyword] !== undefined ||\n ((_a = rule.definition.implements) === null || _a === void 0 ? void 0 : _a.some((kwd) => schema[kwd] !== undefined)));\n}\nexports.shouldUseRule = shouldUseRule;\n//# sourceMappingURL=applicability.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.boolOrEmptySchema = exports.topBoolOrEmptySchema = void 0;\nconst errors_1 = require(\"../errors\");\nconst codegen_1 = require(\"../codegen\");\nconst names_1 = require(\"../names\");\nconst boolError = {\n message: \"boolean schema is false\",\n};\nfunction topBoolOrEmptySchema(it) {\n const { gen, schema, validateName } = it;\n if (schema === false) {\n falseSchemaError(it, false);\n }\n else if (typeof schema == \"object\" && schema.$async === true) {\n gen.return(names_1.default.data);\n }\n else {\n gen.assign((0, codegen_1._) `${validateName}.errors`, null);\n gen.return(true);\n }\n}\nexports.topBoolOrEmptySchema = topBoolOrEmptySchema;\nfunction boolOrEmptySchema(it, valid) {\n const { gen, schema } = it;\n if (schema === false) {\n gen.var(valid, false); // TODO var\n falseSchemaError(it);\n }\n else {\n gen.var(valid, true); // TODO var\n }\n}\nexports.boolOrEmptySchema = boolOrEmptySchema;\nfunction falseSchemaError(it, overrideAllErrors) {\n const { gen, data } = it;\n // TODO maybe some other interface should be used for non-keyword validation errors...\n const cxt = {\n gen,\n keyword: \"false schema\",\n data,\n schema: false,\n schemaCode: false,\n schemaValue: false,\n params: {},\n it,\n };\n (0, errors_1.reportError)(cxt, boolError, undefined, overrideAllErrors);\n}\n//# sourceMappingURL=boolSchema.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.reportTypeError = exports.checkDataTypes = exports.checkDataType = exports.coerceAndCheckDataType = exports.getJSONTypes = exports.getSchemaTypes = exports.DataType = void 0;\nconst rules_1 = require(\"../rules\");\nconst applicability_1 = require(\"./applicability\");\nconst errors_1 = require(\"../errors\");\nconst codegen_1 = require(\"../codegen\");\nconst util_1 = require(\"../util\");\nvar DataType;\n(function (DataType) {\n DataType[DataType[\"Correct\"] = 0] = \"Correct\";\n DataType[DataType[\"Wrong\"] = 1] = \"Wrong\";\n})(DataType = exports.DataType || (exports.DataType = {}));\nfunction getSchemaTypes(schema) {\n const types = getJSONTypes(schema.type);\n const hasNull = types.includes(\"null\");\n if (hasNull) {\n if (schema.nullable === false)\n throw new Error(\"type: null contradicts nullable: false\");\n }\n else {\n if (!types.length && schema.nullable !== undefined) {\n throw new Error('\"nullable\" cannot be used without \"type\"');\n }\n if (schema.nullable === true)\n types.push(\"null\");\n }\n return types;\n}\nexports.getSchemaTypes = getSchemaTypes;\nfunction getJSONTypes(ts) {\n const types = Array.isArray(ts) ? ts : ts ? [ts] : [];\n if (types.every(rules_1.isJSONType))\n return types;\n throw new Error(\"type must be JSONType or JSONType[]: \" + types.join(\",\"));\n}\nexports.getJSONTypes = getJSONTypes;\nfunction coerceAndCheckDataType(it, types) {\n const { gen, data, opts } = it;\n const coerceTo = coerceToTypes(types, opts.coerceTypes);\n const checkTypes = types.length > 0 &&\n !(coerceTo.length === 0 && types.length === 1 && (0, applicability_1.schemaHasRulesForType)(it, types[0]));\n if (checkTypes) {\n const wrongType = checkDataTypes(types, data, opts.strictNumbers, DataType.Wrong);\n gen.if(wrongType, () => {\n if (coerceTo.length)\n coerceData(it, types, coerceTo);\n else\n reportTypeError(it);\n });\n }\n return checkTypes;\n}\nexports.coerceAndCheckDataType = coerceAndCheckDataType;\nconst COERCIBLE = new Set([\"string\", \"number\", \"integer\", \"boolean\", \"null\"]);\nfunction coerceToTypes(types, coerceTypes) {\n return coerceTypes\n ? types.filter((t) => COERCIBLE.has(t) || (coerceTypes === \"array\" && t === \"array\"))\n : [];\n}\nfunction coerceData(it, types, coerceTo) {\n const { gen, data, opts } = it;\n const dataType = gen.let(\"dataType\", (0, codegen_1._) `typeof ${data}`);\n const coerced = gen.let(\"coerced\", (0, codegen_1._) `undefined`);\n if (opts.coerceTypes === \"array\") {\n gen.if((0, codegen_1._) `${dataType} == 'object' && Array.isArray(${data}) && ${data}.length == 1`, () => gen\n .assign(data, (0, codegen_1._) `${data}[0]`)\n .assign(dataType, (0, codegen_1._) `typeof ${data}`)\n .if(checkDataTypes(types, data, opts.strictNumbers), () => gen.assign(coerced, data)));\n }\n gen.if((0, codegen_1._) `${coerced} !== undefined`);\n for (const t of coerceTo) {\n if (COERCIBLE.has(t) || (t === \"array\" && opts.coerceTypes === \"array\")) {\n coerceSpecificType(t);\n }\n }\n gen.else();\n reportTypeError(it);\n gen.endIf();\n gen.if((0, codegen_1._) `${coerced} !== undefined`, () => {\n gen.assign(data, coerced);\n assignParentData(it, coerced);\n });\n function coerceSpecificType(t) {\n switch (t) {\n case \"string\":\n gen\n .elseIf((0, codegen_1._) `${dataType} == \"number\" || ${dataType} == \"boolean\"`)\n .assign(coerced, (0, codegen_1._) `\"\" + ${data}`)\n .elseIf((0, codegen_1._) `${data} === null`)\n .assign(coerced, (0, codegen_1._) `\"\"`);\n return;\n case \"number\":\n gen\n .elseIf((0, codegen_1._) `${dataType} == \"boolean\" || ${data} === null\n || (${dataType} == \"string\" && ${data} && ${data} == +${data})`)\n .assign(coerced, (0, codegen_1._) `+${data}`);\n return;\n case \"integer\":\n gen\n .elseIf((0, codegen_1._) `${dataType} === \"boolean\" || ${data} === null\n || (${dataType} === \"string\" && ${data} && ${data} == +${data} && !(${data} % 1))`)\n .assign(coerced, (0, codegen_1._) `+${data}`);\n return;\n case \"boolean\":\n gen\n .elseIf((0, codegen_1._) `${data} === \"false\" || ${data} === 0 || ${data} === null`)\n .assign(coerced, false)\n .elseIf((0, codegen_1._) `${data} === \"true\" || ${data} === 1`)\n .assign(coerced, true);\n return;\n case \"null\":\n gen.elseIf((0, codegen_1._) `${data} === \"\" || ${data} === 0 || ${data} === false`);\n gen.assign(coerced, null);\n return;\n case \"array\":\n gen\n .elseIf((0, codegen_1._) `${dataType} === \"string\" || ${dataType} === \"number\"\n || ${dataType} === \"boolean\" || ${data} === null`)\n .assign(coerced, (0, codegen_1._) `[${data}]`);\n }\n }\n}\nfunction assignParentData({ gen, parentData, parentDataProperty }, expr) {\n // TODO use gen.property\n gen.if((0, codegen_1._) `${parentData} !== undefined`, () => gen.assign((0, codegen_1._) `${parentData}[${parentDataProperty}]`, expr));\n}\nfunction checkDataType(dataType, data, strictNums, correct = DataType.Correct) {\n const EQ = correct === DataType.Correct ? codegen_1.operators.EQ : codegen_1.operators.NEQ;\n let cond;\n switch (dataType) {\n case \"null\":\n return (0, codegen_1._) `${data} ${EQ} null`;\n case \"array\":\n cond = (0, codegen_1._) `Array.isArray(${data})`;\n break;\n case \"object\":\n cond = (0, codegen_1._) `${data} && typeof ${data} == \"object\" && !Array.isArray(${data})`;\n break;\n case \"integer\":\n cond = numCond((0, codegen_1._) `!(${data} % 1) && !isNaN(${data})`);\n break;\n case \"number\":\n cond = numCond();\n break;\n default:\n return (0, codegen_1._) `typeof ${data} ${EQ} ${dataType}`;\n }\n return correct === DataType.Correct ? cond : (0, codegen_1.not)(cond);\n function numCond(_cond = codegen_1.nil) {\n return (0, codegen_1.and)((0, codegen_1._) `typeof ${data} == \"number\"`, _cond, strictNums ? (0, codegen_1._) `isFinite(${data})` : codegen_1.nil);\n }\n}\nexports.checkDataType = checkDataType;\nfunction checkDataTypes(dataTypes, data, strictNums, correct) {\n if (dataTypes.length === 1) {\n return checkDataType(dataTypes[0], data, strictNums, correct);\n }\n let cond;\n const types = (0, util_1.toHash)(dataTypes);\n if (types.array && types.object) {\n const notObj = (0, codegen_1._) `typeof ${data} != \"object\"`;\n cond = types.null ? notObj : (0, codegen_1._) `!${data} || ${notObj}`;\n delete types.null;\n delete types.array;\n delete types.object;\n }\n else {\n cond = codegen_1.nil;\n }\n if (types.number)\n delete types.integer;\n for (const t in types)\n cond = (0, codegen_1.and)(cond, checkDataType(t, data, strictNums, correct));\n return cond;\n}\nexports.checkDataTypes = checkDataTypes;\nconst typeError = {\n message: ({ schema }) => `must be ${schema}`,\n params: ({ schema, schemaValue }) => typeof schema == \"string\" ? (0, codegen_1._) `{type: ${schema}}` : (0, codegen_1._) `{type: ${schemaValue}}`,\n};\nfunction reportTypeError(it) {\n const cxt = getTypeErrorContext(it);\n (0, errors_1.reportError)(cxt, typeError);\n}\nexports.reportTypeError = reportTypeError;\nfunction getTypeErrorContext(it) {\n const { gen, data, schema } = it;\n const schemaCode = (0, util_1.schemaRefOrVal)(it, schema, \"type\");\n return {\n gen,\n keyword: \"type\",\n data,\n schema: schema.type,\n schemaCode,\n schemaValue: schemaCode,\n parentSchema: schema,\n params: {},\n it,\n };\n}\n//# sourceMappingURL=dataType.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.assignDefaults = void 0;\nconst codegen_1 = require(\"../codegen\");\nconst util_1 = require(\"../util\");\nfunction assignDefaults(it, ty) {\n const { properties, items } = it.schema;\n if (ty === \"object\" && properties) {\n for (const key in properties) {\n assignDefault(it, key, properties[key].default);\n }\n }\n else if (ty === \"array\" && Array.isArray(items)) {\n items.forEach((sch, i) => assignDefault(it, i, sch.default));\n }\n}\nexports.assignDefaults = assignDefaults;\nfunction assignDefault(it, prop, defaultValue) {\n const { gen, compositeRule, data, opts } = it;\n if (defaultValue === undefined)\n return;\n const childData = (0, codegen_1._) `${data}${(0, codegen_1.getProperty)(prop)}`;\n if (compositeRule) {\n (0, util_1.checkStrictMode)(it, `default is ignored for: ${childData}`);\n return;\n }\n let condition = (0, codegen_1._) `${childData} === undefined`;\n if (opts.useDefaults === \"empty\") {\n condition = (0, codegen_1._) `${condition} || ${childData} === null || ${childData} === \"\"`;\n }\n // `${childData} === undefined` +\n // (opts.useDefaults === \"empty\" ? ` || ${childData} === null || ${childData} === \"\"` : \"\")\n gen.if(condition, (0, codegen_1._) `${childData} = ${(0, codegen_1.stringify)(defaultValue)}`);\n}\n//# sourceMappingURL=defaults.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getData = exports.KeywordCxt = exports.validateFunctionCode = void 0;\nconst boolSchema_1 = require(\"./boolSchema\");\nconst dataType_1 = require(\"./dataType\");\nconst applicability_1 = require(\"./applicability\");\nconst dataType_2 = require(\"./dataType\");\nconst defaults_1 = require(\"./defaults\");\nconst keyword_1 = require(\"./keyword\");\nconst subschema_1 = require(\"./subschema\");\nconst codegen_1 = require(\"../codegen\");\nconst names_1 = require(\"../names\");\nconst resolve_1 = require(\"../resolve\");\nconst util_1 = require(\"../util\");\nconst errors_1 = require(\"../errors\");\n// schema compilation - generates validation function, subschemaCode (below) is used for subschemas\nfunction validateFunctionCode(it) {\n if (isSchemaObj(it)) {\n checkKeywords(it);\n if (schemaCxtHasRules(it)) {\n topSchemaObjCode(it);\n return;\n }\n }\n validateFunction(it, () => (0, boolSchema_1.topBoolOrEmptySchema)(it));\n}\nexports.validateFunctionCode = validateFunctionCode;\nfunction validateFunction({ gen, validateName, schema, schemaEnv, opts }, body) {\n if (opts.code.es5) {\n gen.func(validateName, (0, codegen_1._) `${names_1.default.data}, ${names_1.default.valCxt}`, schemaEnv.$async, () => {\n gen.code((0, codegen_1._) `\"use strict\"; ${funcSourceUrl(schema, opts)}`);\n destructureValCxtES5(gen, opts);\n gen.code(body);\n });\n }\n else {\n gen.func(validateName, (0, codegen_1._) `${names_1.default.data}, ${destructureValCxt(opts)}`, schemaEnv.$async, () => gen.code(funcSourceUrl(schema, opts)).code(body));\n }\n}\nfunction destructureValCxt(opts) {\n return (0, codegen_1._) `{${names_1.default.instancePath}=\"\", ${names_1.default.parentData}, ${names_1.default.parentDataProperty}, ${names_1.default.rootData}=${names_1.default.data}${opts.dynamicRef ? (0, codegen_1._) `, ${names_1.default.dynamicAnchors}={}` : codegen_1.nil}}={}`;\n}\nfunction destructureValCxtES5(gen, opts) {\n gen.if(names_1.default.valCxt, () => {\n gen.var(names_1.default.instancePath, (0, codegen_1._) `${names_1.default.valCxt}.${names_1.default.instancePath}`);\n gen.var(names_1.default.parentData, (0, codegen_1._) `${names_1.default.valCxt}.${names_1.default.parentData}`);\n gen.var(names_1.default.parentDataProperty, (0, codegen_1._) `${names_1.default.valCxt}.${names_1.default.parentDataProperty}`);\n gen.var(names_1.default.rootData, (0, codegen_1._) `${names_1.default.valCxt}.${names_1.default.rootData}`);\n if (opts.dynamicRef)\n gen.var(names_1.default.dynamicAnchors, (0, codegen_1._) `${names_1.default.valCxt}.${names_1.default.dynamicAnchors}`);\n }, () => {\n gen.var(names_1.default.instancePath, (0, codegen_1._) `\"\"`);\n gen.var(names_1.default.parentData, (0, codegen_1._) `undefined`);\n gen.var(names_1.default.parentDataProperty, (0, codegen_1._) `undefined`);\n gen.var(names_1.default.rootData, names_1.default.data);\n if (opts.dynamicRef)\n gen.var(names_1.default.dynamicAnchors, (0, codegen_1._) `{}`);\n });\n}\nfunction topSchemaObjCode(it) {\n const { schema, opts, gen } = it;\n validateFunction(it, () => {\n if (opts.$comment && schema.$comment)\n commentKeyword(it);\n checkNoDefault(it);\n gen.let(names_1.default.vErrors, null);\n gen.let(names_1.default.errors, 0);\n if (opts.unevaluated)\n resetEvaluated(it);\n typeAndKeywords(it);\n returnResults(it);\n });\n return;\n}\nfunction resetEvaluated(it) {\n // TODO maybe some hook to execute it in the end to check whether props/items are Name, as in assignEvaluated\n const { gen, validateName } = it;\n it.evaluated = gen.const(\"evaluated\", (0, codegen_1._) `${validateName}.evaluated`);\n gen.if((0, codegen_1._) `${it.evaluated}.dynamicProps`, () => gen.assign((0, codegen_1._) `${it.evaluated}.props`, (0, codegen_1._) `undefined`));\n gen.if((0, codegen_1._) `${it.evaluated}.dynamicItems`, () => gen.assign((0, codegen_1._) `${it.evaluated}.items`, (0, codegen_1._) `undefined`));\n}\nfunction funcSourceUrl(schema, opts) {\n const schId = typeof schema == \"object\" && schema[opts.schemaId];\n return schId && (opts.code.source || opts.code.process) ? (0, codegen_1._) `/*# sourceURL=${schId} */` : codegen_1.nil;\n}\n// schema compilation - this function is used recursively to generate code for sub-schemas\nfunction subschemaCode(it, valid) {\n if (isSchemaObj(it)) {\n checkKeywords(it);\n if (schemaCxtHasRules(it)) {\n subSchemaObjCode(it, valid);\n return;\n }\n }\n (0, boolSchema_1.boolOrEmptySchema)(it, valid);\n}\nfunction schemaCxtHasRules({ schema, self }) {\n if (typeof schema == \"boolean\")\n return !schema;\n for (const key in schema)\n if (self.RULES.all[key])\n return true;\n return false;\n}\nfunction isSchemaObj(it) {\n return typeof it.schema != \"boolean\";\n}\nfunction subSchemaObjCode(it, valid) {\n const { schema, gen, opts } = it;\n if (opts.$comment && schema.$comment)\n commentKeyword(it);\n updateContext(it);\n checkAsyncSchema(it);\n const errsCount = gen.const(\"_errs\", names_1.default.errors);\n typeAndKeywords(it, errsCount);\n // TODO var\n gen.var(valid, (0, codegen_1._) `${errsCount} === ${names_1.default.errors}`);\n}\nfunction checkKeywords(it) {\n (0, util_1.checkUnknownRules)(it);\n checkRefsAndKeywords(it);\n}\nfunction typeAndKeywords(it, errsCount) {\n if (it.opts.jtd)\n return schemaKeywords(it, [], false, errsCount);\n const types = (0, dataType_1.getSchemaTypes)(it.schema);\n const checkedTypes = (0, dataType_1.coerceAndCheckDataType)(it, types);\n schemaKeywords(it, types, !checkedTypes, errsCount);\n}\nfunction checkRefsAndKeywords(it) {\n const { schema, errSchemaPath, opts, self } = it;\n if (schema.$ref && opts.ignoreKeywordsWithRef && (0, util_1.schemaHasRulesButRef)(schema, self.RULES)) {\n self.logger.warn(`$ref: keywords ignored in schema at path \"${errSchemaPath}\"`);\n }\n}\nfunction checkNoDefault(it) {\n const { schema, opts } = it;\n if (schema.default !== undefined && opts.useDefaults && opts.strictSchema) {\n (0, util_1.checkStrictMode)(it, \"default is ignored in the schema root\");\n }\n}\nfunction updateContext(it) {\n const schId = it.schema[it.opts.schemaId];\n if (schId)\n it.baseId = (0, resolve_1.resolveUrl)(it.opts.uriResolver, it.baseId, schId);\n}\nfunction checkAsyncSchema(it) {\n if (it.schema.$async && !it.schemaEnv.$async)\n throw new Error(\"async schema in sync schema\");\n}\nfunction commentKeyword({ gen, schemaEnv, schema, errSchemaPath, opts }) {\n const msg = schema.$comment;\n if (opts.$comment === true) {\n gen.code((0, codegen_1._) `${names_1.default.self}.logger.log(${msg})`);\n }\n else if (typeof opts.$comment == \"function\") {\n const schemaPath = (0, codegen_1.str) `${errSchemaPath}/$comment`;\n const rootName = gen.scopeValue(\"root\", { ref: schemaEnv.root });\n gen.code((0, codegen_1._) `${names_1.default.self}.opts.$comment(${msg}, ${schemaPath}, ${rootName}.schema)`);\n }\n}\nfunction returnResults(it) {\n const { gen, schemaEnv, validateName, ValidationError, opts } = it;\n if (schemaEnv.$async) {\n // TODO assign unevaluated\n gen.if((0, codegen_1._) `${names_1.default.errors} === 0`, () => gen.return(names_1.default.data), () => gen.throw((0, codegen_1._) `new ${ValidationError}(${names_1.default.vErrors})`));\n }\n else {\n gen.assign((0, codegen_1._) `${validateName}.errors`, names_1.default.vErrors);\n if (opts.unevaluated)\n assignEvaluated(it);\n gen.return((0, codegen_1._) `${names_1.default.errors} === 0`);\n }\n}\nfunction assignEvaluated({ gen, evaluated, props, items }) {\n if (props instanceof codegen_1.Name)\n gen.assign((0, codegen_1._) `${evaluated}.props`, props);\n if (items instanceof codegen_1.Name)\n gen.assign((0, codegen_1._) `${evaluated}.items`, items);\n}\nfunction schemaKeywords(it, types, typeErrors, errsCount) {\n const { gen, schema, data, allErrors, opts, self } = it;\n const { RULES } = self;\n if (schema.$ref && (opts.ignoreKeywordsWithRef || !(0, util_1.schemaHasRulesButRef)(schema, RULES))) {\n gen.block(() => keywordCode(it, \"$ref\", RULES.all.$ref.definition)); // TODO typecast\n return;\n }\n if (!opts.jtd)\n checkStrictTypes(it, types);\n gen.block(() => {\n for (const group of RULES.rules)\n groupKeywords(group);\n groupKeywords(RULES.post);\n });\n function groupKeywords(group) {\n if (!(0, applicability_1.shouldUseGroup)(schema, group))\n return;\n if (group.type) {\n gen.if((0, dataType_2.checkDataType)(group.type, data, opts.strictNumbers));\n iterateKeywords(it, group);\n if (types.length === 1 && types[0] === group.type && typeErrors) {\n gen.else();\n (0, dataType_2.reportTypeError)(it);\n }\n gen.endIf();\n }\n else {\n iterateKeywords(it, group);\n }\n // TODO make it \"ok\" call?\n if (!allErrors)\n gen.if((0, codegen_1._) `${names_1.default.errors} === ${errsCount || 0}`);\n }\n}\nfunction iterateKeywords(it, group) {\n const { gen, schema, opts: { useDefaults }, } = it;\n if (useDefaults)\n (0, defaults_1.assignDefaults)(it, group.type);\n gen.block(() => {\n for (const rule of group.rules) {\n if ((0, applicability_1.shouldUseRule)(schema, rule)) {\n keywordCode(it, rule.keyword, rule.definition, group.type);\n }\n }\n });\n}\nfunction checkStrictTypes(it, types) {\n if (it.schemaEnv.meta || !it.opts.strictTypes)\n return;\n checkContextTypes(it, types);\n if (!it.opts.allowUnionTypes)\n checkMultipleTypes(it, types);\n checkKeywordTypes(it, it.dataTypes);\n}\nfunction checkContextTypes(it, types) {\n if (!types.length)\n return;\n if (!it.dataTypes.length) {\n it.dataTypes = types;\n return;\n }\n types.forEach((t) => {\n if (!includesType(it.dataTypes, t)) {\n strictTypesError(it, `type \"${t}\" not allowed by context \"${it.dataTypes.join(\",\")}\"`);\n }\n });\n it.dataTypes = it.dataTypes.filter((t) => includesType(types, t));\n}\nfunction checkMultipleTypes(it, ts) {\n if (ts.length > 1 && !(ts.length === 2 && ts.includes(\"null\"))) {\n strictTypesError(it, \"use allowUnionTypes to allow union type keyword\");\n }\n}\nfunction checkKeywordTypes(it, ts) {\n const rules = it.self.RULES.all;\n for (const keyword in rules) {\n const rule = rules[keyword];\n if (typeof rule == \"object\" && (0, applicability_1.shouldUseRule)(it.schema, rule)) {\n const { type } = rule.definition;\n if (type.length && !type.some((t) => hasApplicableType(ts, t))) {\n strictTypesError(it, `missing type \"${type.join(\",\")}\" for keyword \"${keyword}\"`);\n }\n }\n }\n}\nfunction hasApplicableType(schTs, kwdT) {\n return schTs.includes(kwdT) || (kwdT === \"number\" && schTs.includes(\"integer\"));\n}\nfunction includesType(ts, t) {\n return ts.includes(t) || (t === \"integer\" && ts.includes(\"number\"));\n}\nfunction strictTypesError(it, msg) {\n const schemaPath = it.schemaEnv.baseId + it.errSchemaPath;\n msg += ` at \"${schemaPath}\" (strictTypes)`;\n (0, util_1.checkStrictMode)(it, msg, it.opts.strictTypes);\n}\nclass KeywordCxt {\n constructor(it, def, keyword) {\n (0, keyword_1.validateKeywordUsage)(it, def, keyword);\n this.gen = it.gen;\n this.allErrors = it.allErrors;\n this.keyword = keyword;\n this.data = it.data;\n this.schema = it.schema[keyword];\n this.$data = def.$data && it.opts.$data && this.schema && this.schema.$data;\n this.schemaValue = (0, util_1.schemaRefOrVal)(it, this.schema, keyword, this.$data);\n this.schemaType = def.schemaType;\n this.parentSchema = it.schema;\n this.params = {};\n this.it = it;\n this.def = def;\n if (this.$data) {\n this.schemaCode = it.gen.const(\"vSchema\", getData(this.$data, it));\n }\n else {\n this.schemaCode = this.schemaValue;\n if (!(0, keyword_1.validSchemaType)(this.schema, def.schemaType, def.allowUndefined)) {\n throw new Error(`${keyword} value must be ${JSON.stringify(def.schemaType)}`);\n }\n }\n if (\"code\" in def ? def.trackErrors : def.errors !== false) {\n this.errsCount = it.gen.const(\"_errs\", names_1.default.errors);\n }\n }\n result(condition, successAction, failAction) {\n this.failResult((0, codegen_1.not)(condition), successAction, failAction);\n }\n failResult(condition, successAction, failAction) {\n this.gen.if(condition);\n if (failAction)\n failAction();\n else\n this.error();\n if (successAction) {\n this.gen.else();\n successAction();\n if (this.allErrors)\n this.gen.endIf();\n }\n else {\n if (this.allErrors)\n this.gen.endIf();\n else\n this.gen.else();\n }\n }\n pass(condition, failAction) {\n this.failResult((0, codegen_1.not)(condition), undefined, failAction);\n }\n fail(condition) {\n if (condition === undefined) {\n this.error();\n if (!this.allErrors)\n this.gen.if(false); // this branch will be removed by gen.optimize\n return;\n }\n this.gen.if(condition);\n this.error();\n if (this.allErrors)\n this.gen.endIf();\n else\n this.gen.else();\n }\n fail$data(condition) {\n if (!this.$data)\n return this.fail(condition);\n const { schemaCode } = this;\n this.fail((0, codegen_1._) `${schemaCode} !== undefined && (${(0, codegen_1.or)(this.invalid$data(), condition)})`);\n }\n error(append, errorParams, errorPaths) {\n if (errorParams) {\n this.setParams(errorParams);\n this._error(append, errorPaths);\n this.setParams({});\n return;\n }\n this._error(append, errorPaths);\n }\n _error(append, errorPaths) {\n ;\n (append ? errors_1.reportExtraError : errors_1.reportError)(this, this.def.error, errorPaths);\n }\n $dataError() {\n (0, errors_1.reportError)(this, this.def.$dataError || errors_1.keyword$DataError);\n }\n reset() {\n if (this.errsCount === undefined)\n throw new Error('add \"trackErrors\" to keyword definition');\n (0, errors_1.resetErrorsCount)(this.gen, this.errsCount);\n }\n ok(cond) {\n if (!this.allErrors)\n this.gen.if(cond);\n }\n setParams(obj, assign) {\n if (assign)\n Object.assign(this.params, obj);\n else\n this.params = obj;\n }\n block$data(valid, codeBlock, $dataValid = codegen_1.nil) {\n this.gen.block(() => {\n this.check$data(valid, $dataValid);\n codeBlock();\n });\n }\n check$data(valid = codegen_1.nil, $dataValid = codegen_1.nil) {\n if (!this.$data)\n return;\n const { gen, schemaCode, schemaType, def } = this;\n gen.if((0, codegen_1.or)((0, codegen_1._) `${schemaCode} === undefined`, $dataValid));\n if (valid !== codegen_1.nil)\n gen.assign(valid, true);\n if (schemaType.length || def.validateSchema) {\n gen.elseIf(this.invalid$data());\n this.$dataError();\n if (valid !== codegen_1.nil)\n gen.assign(valid, false);\n }\n gen.else();\n }\n invalid$data() {\n const { gen, schemaCode, schemaType, def, it } = this;\n return (0, codegen_1.or)(wrong$DataType(), invalid$DataSchema());\n function wrong$DataType() {\n if (schemaType.length) {\n /* istanbul ignore if */\n if (!(schemaCode instanceof codegen_1.Name))\n throw new Error(\"ajv implementation error\");\n const st = Array.isArray(schemaType) ? schemaType : [schemaType];\n return (0, codegen_1._) `${(0, dataType_2.checkDataTypes)(st, schemaCode, it.opts.strictNumbers, dataType_2.DataType.Wrong)}`;\n }\n return codegen_1.nil;\n }\n function invalid$DataSchema() {\n if (def.validateSchema) {\n const validateSchemaRef = gen.scopeValue(\"validate$data\", { ref: def.validateSchema }); // TODO value.code for standalone\n return (0, codegen_1._) `!${validateSchemaRef}(${schemaCode})`;\n }\n return codegen_1.nil;\n }\n }\n subschema(appl, valid) {\n const subschema = (0, subschema_1.getSubschema)(this.it, appl);\n (0, subschema_1.extendSubschemaData)(subschema, this.it, appl);\n (0, subschema_1.extendSubschemaMode)(subschema, appl);\n const nextContext = { ...this.it, ...subschema, items: undefined, props: undefined };\n subschemaCode(nextContext, valid);\n return nextContext;\n }\n mergeEvaluated(schemaCxt, toName) {\n const { it, gen } = this;\n if (!it.opts.unevaluated)\n return;\n if (it.props !== true && schemaCxt.props !== undefined) {\n it.props = util_1.mergeEvaluated.props(gen, schemaCxt.props, it.props, toName);\n }\n if (it.items !== true && schemaCxt.items !== undefined) {\n it.items = util_1.mergeEvaluated.items(gen, schemaCxt.items, it.items, toName);\n }\n }\n mergeValidEvaluated(schemaCxt, valid) {\n const { it, gen } = this;\n if (it.opts.unevaluated && (it.props !== true || it.items !== true)) {\n gen.if(valid, () => this.mergeEvaluated(schemaCxt, codegen_1.Name));\n return true;\n }\n }\n}\nexports.KeywordCxt = KeywordCxt;\nfunction keywordCode(it, keyword, def, ruleType) {\n const cxt = new KeywordCxt(it, def, keyword);\n if (\"code\" in def) {\n def.code(cxt, ruleType);\n }\n else if (cxt.$data && def.validate) {\n (0, keyword_1.funcKeywordCode)(cxt, def);\n }\n else if (\"macro\" in def) {\n (0, keyword_1.macroKeywordCode)(cxt, def);\n }\n else if (def.compile || def.validate) {\n (0, keyword_1.funcKeywordCode)(cxt, def);\n }\n}\nconst JSON_POINTER = /^\\/(?:[^~]|~0|~1)*$/;\nconst RELATIVE_JSON_POINTER = /^([0-9]+)(#|\\/(?:[^~]|~0|~1)*)?$/;\nfunction getData($data, { dataLevel, dataNames, dataPathArr }) {\n let jsonPointer;\n let data;\n if ($data === \"\")\n return names_1.default.rootData;\n if ($data[0] === \"/\") {\n if (!JSON_POINTER.test($data))\n throw new Error(`Invalid JSON-pointer: ${$data}`);\n jsonPointer = $data;\n data = names_1.default.rootData;\n }\n else {\n const matches = RELATIVE_JSON_POINTER.exec($data);\n if (!matches)\n throw new Error(`Invalid JSON-pointer: ${$data}`);\n const up = +matches[1];\n jsonPointer = matches[2];\n if (jsonPointer === \"#\") {\n if (up >= dataLevel)\n throw new Error(errorMsg(\"property/index\", up));\n return dataPathArr[dataLevel - up];\n }\n if (up > dataLevel)\n throw new Error(errorMsg(\"data\", up));\n data = dataNames[dataLevel - up];\n if (!jsonPointer)\n return data;\n }\n let expr = data;\n const segments = jsonPointer.split(\"/\");\n for (const segment of segments) {\n if (segment) {\n data = (0, codegen_1._) `${data}${(0, codegen_1.getProperty)((0, util_1.unescapeJsonPointer)(segment))}`;\n expr = (0, codegen_1._) `${expr} && ${data}`;\n }\n }\n return expr;\n function errorMsg(pointerType, up) {\n return `Cannot access ${pointerType} ${up} levels up, current level is ${dataLevel}`;\n }\n}\nexports.getData = getData;\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.validateKeywordUsage = exports.validSchemaType = exports.funcKeywordCode = exports.macroKeywordCode = void 0;\nconst codegen_1 = require(\"../codegen\");\nconst names_1 = require(\"../names\");\nconst code_1 = require(\"../../vocabularies/code\");\nconst errors_1 = require(\"../errors\");\nfunction macroKeywordCode(cxt, def) {\n const { gen, keyword, schema, parentSchema, it } = cxt;\n const macroSchema = def.macro.call(it.self, schema, parentSchema, it);\n const schemaRef = useKeyword(gen, keyword, macroSchema);\n if (it.opts.validateSchema !== false)\n it.self.validateSchema(macroSchema, true);\n const valid = gen.name(\"valid\");\n cxt.subschema({\n schema: macroSchema,\n schemaPath: codegen_1.nil,\n errSchemaPath: `${it.errSchemaPath}/${keyword}`,\n topSchemaRef: schemaRef,\n compositeRule: true,\n }, valid);\n cxt.pass(valid, () => cxt.error(true));\n}\nexports.macroKeywordCode = macroKeywordCode;\nfunction funcKeywordCode(cxt, def) {\n var _a;\n const { gen, keyword, schema, parentSchema, $data, it } = cxt;\n checkAsyncKeyword(it, def);\n const validate = !$data && def.compile ? def.compile.call(it.self, schema, parentSchema, it) : def.validate;\n const validateRef = useKeyword(gen, keyword, validate);\n const valid = gen.let(\"valid\");\n cxt.block$data(valid, validateKeyword);\n cxt.ok((_a = def.valid) !== null && _a !== void 0 ? _a : valid);\n function validateKeyword() {\n if (def.errors === false) {\n assignValid();\n if (def.modifying)\n modifyData(cxt);\n reportErrs(() => cxt.error());\n }\n else {\n const ruleErrs = def.async ? validateAsync() : validateSync();\n if (def.modifying)\n modifyData(cxt);\n reportErrs(() => addErrs(cxt, ruleErrs));\n }\n }\n function validateAsync() {\n const ruleErrs = gen.let(\"ruleErrs\", null);\n gen.try(() => assignValid((0, codegen_1._) `await `), (e) => gen.assign(valid, false).if((0, codegen_1._) `${e} instanceof ${it.ValidationError}`, () => gen.assign(ruleErrs, (0, codegen_1._) `${e}.errors`), () => gen.throw(e)));\n return ruleErrs;\n }\n function validateSync() {\n const validateErrs = (0, codegen_1._) `${validateRef}.errors`;\n gen.assign(validateErrs, null);\n assignValid(codegen_1.nil);\n return validateErrs;\n }\n function assignValid(_await = def.async ? (0, codegen_1._) `await ` : codegen_1.nil) {\n const passCxt = it.opts.passContext ? names_1.default.this : names_1.default.self;\n const passSchema = !((\"compile\" in def && !$data) || def.schema === false);\n gen.assign(valid, (0, codegen_1._) `${_await}${(0, code_1.callValidateCode)(cxt, validateRef, passCxt, passSchema)}`, def.modifying);\n }\n function reportErrs(errors) {\n var _a;\n gen.if((0, codegen_1.not)((_a = def.valid) !== null && _a !== void 0 ? _a : valid), errors);\n }\n}\nexports.funcKeywordCode = funcKeywordCode;\nfunction modifyData(cxt) {\n const { gen, data, it } = cxt;\n gen.if(it.parentData, () => gen.assign(data, (0, codegen_1._) `${it.parentData}[${it.parentDataProperty}]`));\n}\nfunction addErrs(cxt, errs) {\n const { gen } = cxt;\n gen.if((0, codegen_1._) `Array.isArray(${errs})`, () => {\n gen\n .assign(names_1.default.vErrors, (0, codegen_1._) `${names_1.default.vErrors} === null ? ${errs} : ${names_1.default.vErrors}.concat(${errs})`)\n .assign(names_1.default.errors, (0, codegen_1._) `${names_1.default.vErrors}.length`);\n (0, errors_1.extendErrors)(cxt);\n }, () => cxt.error());\n}\nfunction checkAsyncKeyword({ schemaEnv }, def) {\n if (def.async && !schemaEnv.$async)\n throw new Error(\"async keyword in sync schema\");\n}\nfunction useKeyword(gen, keyword, result) {\n if (result === undefined)\n throw new Error(`keyword \"${keyword}\" failed to compile`);\n return gen.scopeValue(\"keyword\", typeof result == \"function\" ? { ref: result } : { ref: result, code: (0, codegen_1.stringify)(result) });\n}\nfunction validSchemaType(schema, schemaType, allowUndefined = false) {\n // TODO add tests\n return (!schemaType.length ||\n schemaType.some((st) => st === \"array\"\n ? Array.isArray(schema)\n : st === \"object\"\n ? schema && typeof schema == \"object\" && !Array.isArray(schema)\n : typeof schema == st || (allowUndefined && typeof schema == \"undefined\")));\n}\nexports.validSchemaType = validSchemaType;\nfunction validateKeywordUsage({ schema, opts, self, errSchemaPath }, def, keyword) {\n /* istanbul ignore if */\n if (Array.isArray(def.keyword) ? !def.keyword.includes(keyword) : def.keyword !== keyword) {\n throw new Error(\"ajv implementation error\");\n }\n const deps = def.dependencies;\n if (deps === null || deps === void 0 ? void 0 : deps.some((kwd) => !Object.prototype.hasOwnProperty.call(schema, kwd))) {\n throw new Error(`parent schema must have dependencies of ${keyword}: ${deps.join(\",\")}`);\n }\n if (def.validateSchema) {\n const valid = def.validateSchema(schema[keyword]);\n if (!valid) {\n const msg = `keyword \"${keyword}\" value is invalid at path \"${errSchemaPath}\": ` +\n self.errorsText(def.validateSchema.errors);\n if (opts.validateSchema === \"log\")\n self.logger.error(msg);\n else\n throw new Error(msg);\n }\n }\n}\nexports.validateKeywordUsage = validateKeywordUsage;\n//# sourceMappingURL=keyword.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.extendSubschemaMode = exports.extendSubschemaData = exports.getSubschema = void 0;\nconst codegen_1 = require(\"../codegen\");\nconst util_1 = require(\"../util\");\nfunction getSubschema(it, { keyword, schemaProp, schema, schemaPath, errSchemaPath, topSchemaRef }) {\n if (keyword !== undefined && schema !== undefined) {\n throw new Error('both \"keyword\" and \"schema\" passed, only one allowed');\n }\n if (keyword !== undefined) {\n const sch = it.schema[keyword];\n return schemaProp === undefined\n ? {\n schema: sch,\n schemaPath: (0, codegen_1._) `${it.schemaPath}${(0, codegen_1.getProperty)(keyword)}`,\n errSchemaPath: `${it.errSchemaPath}/${keyword}`,\n }\n : {\n schema: sch[schemaProp],\n schemaPath: (0, codegen_1._) `${it.schemaPath}${(0, codegen_1.getProperty)(keyword)}${(0, codegen_1.getProperty)(schemaProp)}`,\n errSchemaPath: `${it.errSchemaPath}/${keyword}/${(0, util_1.escapeFragment)(schemaProp)}`,\n };\n }\n if (schema !== undefined) {\n if (schemaPath === undefined || errSchemaPath === undefined || topSchemaRef === undefined) {\n throw new Error('\"schemaPath\", \"errSchemaPath\" and \"topSchemaRef\" are required with \"schema\"');\n }\n return {\n schema,\n schemaPath,\n topSchemaRef,\n errSchemaPath,\n };\n }\n throw new Error('either \"keyword\" or \"schema\" must be passed');\n}\nexports.getSubschema = getSubschema;\nfunction extendSubschemaData(subschema, it, { dataProp, dataPropType: dpType, data, dataTypes, propertyName }) {\n if (data !== undefined && dataProp !== undefined) {\n throw new Error('both \"data\" and \"dataProp\" passed, only one allowed');\n }\n const { gen } = it;\n if (dataProp !== undefined) {\n const { errorPath, dataPathArr, opts } = it;\n const nextData = gen.let(\"data\", (0, codegen_1._) `${it.data}${(0, codegen_1.getProperty)(dataProp)}`, true);\n dataContextProps(nextData);\n subschema.errorPath = (0, codegen_1.str) `${errorPath}${(0, util_1.getErrorPath)(dataProp, dpType, opts.jsPropertySyntax)}`;\n subschema.parentDataProperty = (0, codegen_1._) `${dataProp}`;\n subschema.dataPathArr = [...dataPathArr, subschema.parentDataProperty];\n }\n if (data !== undefined) {\n const nextData = data instanceof codegen_1.Name ? data : gen.let(\"data\", data, true); // replaceable if used once?\n dataContextProps(nextData);\n if (propertyName !== undefined)\n subschema.propertyName = propertyName;\n // TODO something is possibly wrong here with not changing parentDataProperty and not appending dataPathArr\n }\n if (dataTypes)\n subschema.dataTypes = dataTypes;\n function dataContextProps(_nextData) {\n subschema.data = _nextData;\n subschema.dataLevel = it.dataLevel + 1;\n subschema.dataTypes = [];\n it.definedProperties = new Set();\n subschema.parentData = it.data;\n subschema.dataNames = [...it.dataNames, _nextData];\n }\n}\nexports.extendSubschemaData = extendSubschemaData;\nfunction extendSubschemaMode(subschema, { jtdDiscriminator, jtdMetadata, compositeRule, createErrors, allErrors }) {\n if (compositeRule !== undefined)\n subschema.compositeRule = compositeRule;\n if (createErrors !== undefined)\n subschema.createErrors = createErrors;\n if (allErrors !== undefined)\n subschema.allErrors = allErrors;\n subschema.jtdDiscriminator = jtdDiscriminator; // not inherited\n subschema.jtdMetadata = jtdMetadata; // not inherited\n}\nexports.extendSubschemaMode = extendSubschemaMode;\n//# sourceMappingURL=subschema.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.CodeGen = exports.Name = exports.nil = exports.stringify = exports.str = exports._ = exports.KeywordCxt = void 0;\nvar validate_1 = require(\"./compile/validate\");\nObject.defineProperty(exports, \"KeywordCxt\", { enumerable: true, get: function () { return validate_1.KeywordCxt; } });\nvar codegen_1 = require(\"./compile/codegen\");\nObject.defineProperty(exports, \"_\", { enumerable: true, get: function () { return codegen_1._; } });\nObject.defineProperty(exports, \"str\", { enumerable: true, get: function () { return codegen_1.str; } });\nObject.defineProperty(exports, \"stringify\", { enumerable: true, get: function () { return codegen_1.stringify; } });\nObject.defineProperty(exports, \"nil\", { enumerable: true, get: function () { return codegen_1.nil; } });\nObject.defineProperty(exports, \"Name\", { enumerable: true, get: function () { return codegen_1.Name; } });\nObject.defineProperty(exports, \"CodeGen\", { enumerable: true, get: function () { return codegen_1.CodeGen; } });\nconst validation_error_1 = require(\"./runtime/validation_error\");\nconst ref_error_1 = require(\"./compile/ref_error\");\nconst rules_1 = require(\"./compile/rules\");\nconst compile_1 = require(\"./compile\");\nconst codegen_2 = require(\"./compile/codegen\");\nconst resolve_1 = require(\"./compile/resolve\");\nconst dataType_1 = require(\"./compile/validate/dataType\");\nconst util_1 = require(\"./compile/util\");\nconst $dataRefSchema = require(\"./refs/data.json\");\nconst uri_1 = require(\"./runtime/uri\");\nconst defaultRegExp = (str, flags) => new RegExp(str, flags);\ndefaultRegExp.code = \"new RegExp\";\nconst META_IGNORE_OPTIONS = [\"removeAdditional\", \"useDefaults\", \"coerceTypes\"];\nconst EXT_SCOPE_NAMES = new Set([\n \"validate\",\n \"serialize\",\n \"parse\",\n \"wrapper\",\n \"root\",\n \"schema\",\n \"keyword\",\n \"pattern\",\n \"formats\",\n \"validate$data\",\n \"func\",\n \"obj\",\n \"Error\",\n]);\nconst removedOptions = {\n errorDataPath: \"\",\n format: \"`validateFormats: false` can be used instead.\",\n nullable: '\"nullable\" keyword is supported by default.',\n jsonPointers: \"Deprecated jsPropertySyntax can be used instead.\",\n extendRefs: \"Deprecated ignoreKeywordsWithRef can be used instead.\",\n missingRefs: \"Pass empty schema with $id that should be ignored to ajv.addSchema.\",\n processCode: \"Use option `code: {process: (code, schemaEnv: object) => string}`\",\n sourceCode: \"Use option `code: {source: true}`\",\n strictDefaults: \"It is default now, see option `strict`.\",\n strictKeywords: \"It is default now, see option `strict`.\",\n uniqueItems: '\"uniqueItems\" keyword is always validated.',\n unknownFormats: \"Disable strict mode or pass `true` to `ajv.addFormat` (or `formats` option).\",\n cache: \"Map is used as cache, schema object as key.\",\n serialize: \"Map is used as cache, schema object as key.\",\n ajvErrors: \"It is default now.\",\n};\nconst deprecatedOptions = {\n ignoreKeywordsWithRef: \"\",\n jsPropertySyntax: \"\",\n unicode: '\"minLength\"/\"maxLength\" account for unicode characters by default.',\n};\nconst MAX_EXPRESSION = 200;\n// eslint-disable-next-line complexity\nfunction requiredOptions(o) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _0;\n const s = o.strict;\n const _optz = (_a = o.code) === null || _a === void 0 ? void 0 : _a.optimize;\n const optimize = _optz === true || _optz === undefined ? 1 : _optz || 0;\n const regExp = (_c = (_b = o.code) === null || _b === void 0 ? void 0 : _b.regExp) !== null && _c !== void 0 ? _c : defaultRegExp;\n const uriResolver = (_d = o.uriResolver) !== null && _d !== void 0 ? _d : uri_1.default;\n return {\n strictSchema: (_f = (_e = o.strictSchema) !== null && _e !== void 0 ? _e : s) !== null && _f !== void 0 ? _f : true,\n strictNumbers: (_h = (_g = o.strictNumbers) !== null && _g !== void 0 ? _g : s) !== null && _h !== void 0 ? _h : true,\n strictTypes: (_k = (_j = o.strictTypes) !== null && _j !== void 0 ? _j : s) !== null && _k !== void 0 ? _k : \"log\",\n strictTuples: (_m = (_l = o.strictTuples) !== null && _l !== void 0 ? _l : s) !== null && _m !== void 0 ? _m : \"log\",\n strictRequired: (_p = (_o = o.strictRequired) !== null && _o !== void 0 ? _o : s) !== null && _p !== void 0 ? _p : false,\n code: o.code ? { ...o.code, optimize, regExp } : { optimize, regExp },\n loopRequired: (_q = o.loopRequired) !== null && _q !== void 0 ? _q : MAX_EXPRESSION,\n loopEnum: (_r = o.loopEnum) !== null && _r !== void 0 ? _r : MAX_EXPRESSION,\n meta: (_s = o.meta) !== null && _s !== void 0 ? _s : true,\n messages: (_t = o.messages) !== null && _t !== void 0 ? _t : true,\n inlineRefs: (_u = o.inlineRefs) !== null && _u !== void 0 ? _u : true,\n schemaId: (_v = o.schemaId) !== null && _v !== void 0 ? _v : \"$id\",\n addUsedSchema: (_w = o.addUsedSchema) !== null && _w !== void 0 ? _w : true,\n validateSchema: (_x = o.validateSchema) !== null && _x !== void 0 ? _x : true,\n validateFormats: (_y = o.validateFormats) !== null && _y !== void 0 ? _y : true,\n unicodeRegExp: (_z = o.unicodeRegExp) !== null && _z !== void 0 ? _z : true,\n int32range: (_0 = o.int32range) !== null && _0 !== void 0 ? _0 : true,\n uriResolver: uriResolver,\n };\n}\nclass Ajv {\n constructor(opts = {}) {\n this.schemas = {};\n this.refs = {};\n this.formats = {};\n this._compilations = new Set();\n this._loading = {};\n this._cache = new Map();\n opts = this.opts = { ...opts, ...requiredOptions(opts) };\n const { es5, lines } = this.opts.code;\n this.scope = new codegen_2.ValueScope({ scope: {}, prefixes: EXT_SCOPE_NAMES, es5, lines });\n this.logger = getLogger(opts.logger);\n const formatOpt = opts.validateFormats;\n opts.validateFormats = false;\n this.RULES = (0, rules_1.getRules)();\n checkOptions.call(this, removedOptions, opts, \"NOT SUPPORTED\");\n checkOptions.call(this, deprecatedOptions, opts, \"DEPRECATED\", \"warn\");\n this._metaOpts = getMetaSchemaOptions.call(this);\n if (opts.formats)\n addInitialFormats.call(this);\n this._addVocabularies();\n this._addDefaultMetaSchema();\n if (opts.keywords)\n addInitialKeywords.call(this, opts.keywords);\n if (typeof opts.meta == \"object\")\n this.addMetaSchema(opts.meta);\n addInitialSchemas.call(this);\n opts.validateFormats = formatOpt;\n }\n _addVocabularies() {\n this.addKeyword(\"$async\");\n }\n _addDefaultMetaSchema() {\n const { $data, meta, schemaId } = this.opts;\n let _dataRefSchema = $dataRefSchema;\n if (schemaId === \"id\") {\n _dataRefSchema = { ...$dataRefSchema };\n _dataRefSchema.id = _dataRefSchema.$id;\n delete _dataRefSchema.$id;\n }\n if (meta && $data)\n this.addMetaSchema(_dataRefSchema, _dataRefSchema[schemaId], false);\n }\n defaultMeta() {\n const { meta, schemaId } = this.opts;\n return (this.opts.defaultMeta = typeof meta == \"object\" ? meta[schemaId] || meta : undefined);\n }\n validate(schemaKeyRef, // key, ref or schema object\n data // to be validated\n ) {\n let v;\n if (typeof schemaKeyRef == \"string\") {\n v = this.getSchema(schemaKeyRef);\n if (!v)\n throw new Error(`no schema with key or ref \"${schemaKeyRef}\"`);\n }\n else {\n v = this.compile(schemaKeyRef);\n }\n const valid = v(data);\n if (!(\"$async\" in v))\n this.errors = v.errors;\n return valid;\n }\n compile(schema, _meta) {\n const sch = this._addSchema(schema, _meta);\n return (sch.validate || this._compileSchemaEnv(sch));\n }\n compileAsync(schema, meta) {\n if (typeof this.opts.loadSchema != \"function\") {\n throw new Error(\"options.loadSchema should be a function\");\n }\n const { loadSchema } = this.opts;\n return runCompileAsync.call(this, schema, meta);\n async function runCompileAsync(_schema, _meta) {\n await loadMetaSchema.call(this, _schema.$schema);\n const sch = this._addSchema(_schema, _meta);\n return sch.validate || _compileAsync.call(this, sch);\n }\n async function loadMetaSchema($ref) {\n if ($ref && !this.getSchema($ref)) {\n await runCompileAsync.call(this, { $ref }, true);\n }\n }\n async function _compileAsync(sch) {\n try {\n return this._compileSchemaEnv(sch);\n }\n catch (e) {\n if (!(e instanceof ref_error_1.default))\n throw e;\n checkLoaded.call(this, e);\n await loadMissingSchema.call(this, e.missingSchema);\n return _compileAsync.call(this, sch);\n }\n }\n function checkLoaded({ missingSchema: ref, missingRef }) {\n if (this.refs[ref]) {\n throw new Error(`AnySchema ${ref} is loaded but ${missingRef} cannot be resolved`);\n }\n }\n async function loadMissingSchema(ref) {\n const _schema = await _loadSchema.call(this, ref);\n if (!this.refs[ref])\n await loadMetaSchema.call(this, _schema.$schema);\n if (!this.refs[ref])\n this.addSchema(_schema, ref, meta);\n }\n async function _loadSchema(ref) {\n const p = this._loading[ref];\n if (p)\n return p;\n try {\n return await (this._loading[ref] = loadSchema(ref));\n }\n finally {\n delete this._loading[ref];\n }\n }\n }\n // Adds schema to the instance\n addSchema(schema, // If array is passed, `key` will be ignored\n key, // Optional schema key. Can be passed to `validate` method instead of schema object or id/ref. One schema per instance can have empty `id` and `key`.\n _meta, // true if schema is a meta-schema. Used internally, addMetaSchema should be used instead.\n _validateSchema = this.opts.validateSchema // false to skip schema validation. Used internally, option validateSchema should be used instead.\n ) {\n if (Array.isArray(schema)) {\n for (const sch of schema)\n this.addSchema(sch, undefined, _meta, _validateSchema);\n return this;\n }\n let id;\n if (typeof schema === \"object\") {\n const { schemaId } = this.opts;\n id = schema[schemaId];\n if (id !== undefined && typeof id != \"string\") {\n throw new Error(`schema ${schemaId} must be string`);\n }\n }\n key = (0, resolve_1.normalizeId)(key || id);\n this._checkUnique(key);\n this.schemas[key] = this._addSchema(schema, _meta, key, _validateSchema, true);\n return this;\n }\n // Add schema that will be used to validate other schemas\n // options in META_IGNORE_OPTIONS are alway set to false\n addMetaSchema(schema, key, // schema key\n _validateSchema = this.opts.validateSchema // false to skip schema validation, can be used to override validateSchema option for meta-schema\n ) {\n this.addSchema(schema, key, true, _validateSchema);\n return this;\n }\n // Validate schema against its meta-schema\n validateSchema(schema, throwOrLogError) {\n if (typeof schema == \"boolean\")\n return true;\n let $schema;\n $schema = schema.$schema;\n if ($schema !== undefined && typeof $schema != \"string\") {\n throw new Error(\"$schema must be a string\");\n }\n $schema = $schema || this.opts.defaultMeta || this.defaultMeta();\n if (!$schema) {\n this.logger.warn(\"meta-schema not available\");\n this.errors = null;\n return true;\n }\n const valid = this.validate($schema, schema);\n if (!valid && throwOrLogError) {\n const message = \"schema is invalid: \" + this.errorsText();\n if (this.opts.validateSchema === \"log\")\n this.logger.error(message);\n else\n throw new Error(message);\n }\n return valid;\n }\n // Get compiled schema by `key` or `ref`.\n // (`key` that was passed to `addSchema` or full schema reference - `schema.$id` or resolved id)\n getSchema(keyRef) {\n let sch;\n while (typeof (sch = getSchEnv.call(this, keyRef)) == \"string\")\n keyRef = sch;\n if (sch === undefined) {\n const { schemaId } = this.opts;\n const root = new compile_1.SchemaEnv({ schema: {}, schemaId });\n sch = compile_1.resolveSchema.call(this, root, keyRef);\n if (!sch)\n return;\n this.refs[keyRef] = sch;\n }\n return (sch.validate || this._compileSchemaEnv(sch));\n }\n // Remove cached schema(s).\n // If no parameter is passed all schemas but meta-schemas are removed.\n // If RegExp is passed all schemas with key/id matching pattern but meta-schemas are removed.\n // Even if schema is referenced by other schemas it still can be removed as other schemas have local references.\n removeSchema(schemaKeyRef) {\n if (schemaKeyRef instanceof RegExp) {\n this._removeAllSchemas(this.schemas, schemaKeyRef);\n this._removeAllSchemas(this.refs, schemaKeyRef);\n return this;\n }\n switch (typeof schemaKeyRef) {\n case \"undefined\":\n this._removeAllSchemas(this.schemas);\n this._removeAllSchemas(this.refs);\n this._cache.clear();\n return this;\n case \"string\": {\n const sch = getSchEnv.call(this, schemaKeyRef);\n if (typeof sch == \"object\")\n this._cache.delete(sch.schema);\n delete this.schemas[schemaKeyRef];\n delete this.refs[schemaKeyRef];\n return this;\n }\n case \"object\": {\n const cacheKey = schemaKeyRef;\n this._cache.delete(cacheKey);\n let id = schemaKeyRef[this.opts.schemaId];\n if (id) {\n id = (0, resolve_1.normalizeId)(id);\n delete this.schemas[id];\n delete this.refs[id];\n }\n return this;\n }\n default:\n throw new Error(\"ajv.removeSchema: invalid parameter\");\n }\n }\n // add \"vocabulary\" - a collection of keywords\n addVocabulary(definitions) {\n for (const def of definitions)\n this.addKeyword(def);\n return this;\n }\n addKeyword(kwdOrDef, def // deprecated\n ) {\n let keyword;\n if (typeof kwdOrDef == \"string\") {\n keyword = kwdOrDef;\n if (typeof def == \"object\") {\n this.logger.warn(\"these parameters are deprecated, see docs for addKeyword\");\n def.keyword = keyword;\n }\n }\n else if (typeof kwdOrDef == \"object\" && def === undefined) {\n def = kwdOrDef;\n keyword = def.keyword;\n if (Array.isArray(keyword) && !keyword.length) {\n throw new Error(\"addKeywords: keyword must be string or non-empty array\");\n }\n }\n else {\n throw new Error(\"invalid addKeywords parameters\");\n }\n checkKeyword.call(this, keyword, def);\n if (!def) {\n (0, util_1.eachItem)(keyword, (kwd) => addRule.call(this, kwd));\n return this;\n }\n keywordMetaschema.call(this, def);\n const definition = {\n ...def,\n type: (0, dataType_1.getJSONTypes)(def.type),\n schemaType: (0, dataType_1.getJSONTypes)(def.schemaType),\n };\n (0, util_1.eachItem)(keyword, definition.type.length === 0\n ? (k) => addRule.call(this, k, definition)\n : (k) => definition.type.forEach((t) => addRule.call(this, k, definition, t)));\n return this;\n }\n getKeyword(keyword) {\n const rule = this.RULES.all[keyword];\n return typeof rule == \"object\" ? rule.definition : !!rule;\n }\n // Remove keyword\n removeKeyword(keyword) {\n // TODO return type should be Ajv\n const { RULES } = this;\n delete RULES.keywords[keyword];\n delete RULES.all[keyword];\n for (const group of RULES.rules) {\n const i = group.rules.findIndex((rule) => rule.keyword === keyword);\n if (i >= 0)\n group.rules.splice(i, 1);\n }\n return this;\n }\n // Add format\n addFormat(name, format) {\n if (typeof format == \"string\")\n format = new RegExp(format);\n this.formats[name] = format;\n return this;\n }\n errorsText(errors = this.errors, // optional array of validation errors\n { separator = \", \", dataVar = \"data\" } = {} // optional options with properties `separator` and `dataVar`\n ) {\n if (!errors || errors.length === 0)\n return \"No errors\";\n return errors\n .map((e) => `${dataVar}${e.instancePath} ${e.message}`)\n .reduce((text, msg) => text + separator + msg);\n }\n $dataMetaSchema(metaSchema, keywordsJsonPointers) {\n const rules = this.RULES.all;\n metaSchema = JSON.parse(JSON.stringify(metaSchema));\n for (const jsonPointer of keywordsJsonPointers) {\n const segments = jsonPointer.split(\"/\").slice(1); // first segment is an empty string\n let keywords = metaSchema;\n for (const seg of segments)\n keywords = keywords[seg];\n for (const key in rules) {\n const rule = rules[key];\n if (typeof rule != \"object\")\n continue;\n const { $data } = rule.definition;\n const schema = keywords[key];\n if ($data && schema)\n keywords[key] = schemaOrData(schema);\n }\n }\n return metaSchema;\n }\n _removeAllSchemas(schemas, regex) {\n for (const keyRef in schemas) {\n const sch = schemas[keyRef];\n if (!regex || regex.test(keyRef)) {\n if (typeof sch == \"string\") {\n delete schemas[keyRef];\n }\n else if (sch && !sch.meta) {\n this._cache.delete(sch.schema);\n delete schemas[keyRef];\n }\n }\n }\n }\n _addSchema(schema, meta, baseId, validateSchema = this.opts.validateSchema, addSchema = this.opts.addUsedSchema) {\n let id;\n const { schemaId } = this.opts;\n if (typeof schema == \"object\") {\n id = schema[schemaId];\n }\n else {\n if (this.opts.jtd)\n throw new Error(\"schema must be object\");\n else if (typeof schema != \"boolean\")\n throw new Error(\"schema must be object or boolean\");\n }\n let sch = this._cache.get(schema);\n if (sch !== undefined)\n return sch;\n baseId = (0, resolve_1.normalizeId)(id || baseId);\n const localRefs = resolve_1.getSchemaRefs.call(this, schema, baseId);\n sch = new compile_1.SchemaEnv({ schema, schemaId, meta, baseId, localRefs });\n this._cache.set(sch.schema, sch);\n if (addSchema && !baseId.startsWith(\"#\")) {\n // TODO atm it is allowed to overwrite schemas without id (instead of not adding them)\n if (baseId)\n this._checkUnique(baseId);\n this.refs[baseId] = sch;\n }\n if (validateSchema)\n this.validateSchema(schema, true);\n return sch;\n }\n _checkUnique(id) {\n if (this.schemas[id] || this.refs[id]) {\n throw new Error(`schema with key or id \"${id}\" already exists`);\n }\n }\n _compileSchemaEnv(sch) {\n if (sch.meta)\n this._compileMetaSchema(sch);\n else\n compile_1.compileSchema.call(this, sch);\n /* istanbul ignore if */\n if (!sch.validate)\n throw new Error(\"ajv implementation error\");\n return sch.validate;\n }\n _compileMetaSchema(sch) {\n const currentOpts = this.opts;\n this.opts = this._metaOpts;\n try {\n compile_1.compileSchema.call(this, sch);\n }\n finally {\n this.opts = currentOpts;\n }\n }\n}\nexports.default = Ajv;\nAjv.ValidationError = validation_error_1.default;\nAjv.MissingRefError = ref_error_1.default;\nfunction checkOptions(checkOpts, options, msg, log = \"error\") {\n for (const key in checkOpts) {\n const opt = key;\n if (opt in options)\n this.logger[log](`${msg}: option ${key}. ${checkOpts[opt]}`);\n }\n}\nfunction getSchEnv(keyRef) {\n keyRef = (0, resolve_1.normalizeId)(keyRef); // TODO tests fail without this line\n return this.schemas[keyRef] || this.refs[keyRef];\n}\nfunction addInitialSchemas() {\n const optsSchemas = this.opts.schemas;\n if (!optsSchemas)\n return;\n if (Array.isArray(optsSchemas))\n this.addSchema(optsSchemas);\n else\n for (const key in optsSchemas)\n this.addSchema(optsSchemas[key], key);\n}\nfunction addInitialFormats() {\n for (const name in this.opts.formats) {\n const format = this.opts.formats[name];\n if (format)\n this.addFormat(name, format);\n }\n}\nfunction addInitialKeywords(defs) {\n if (Array.isArray(defs)) {\n this.addVocabulary(defs);\n return;\n }\n this.logger.warn(\"keywords option as map is deprecated, pass array\");\n for (const keyword in defs) {\n const def = defs[keyword];\n if (!def.keyword)\n def.keyword = keyword;\n this.addKeyword(def);\n }\n}\nfunction getMetaSchemaOptions() {\n const metaOpts = { ...this.opts };\n for (const opt of META_IGNORE_OPTIONS)\n delete metaOpts[opt];\n return metaOpts;\n}\nconst noLogs = { log() { }, warn() { }, error() { } };\nfunction getLogger(logger) {\n if (logger === false)\n return noLogs;\n if (logger === undefined)\n return console;\n if (logger.log && logger.warn && logger.error)\n return logger;\n throw new Error(\"logger must implement log, warn and error methods\");\n}\nconst KEYWORD_NAME = /^[a-z_$][a-z0-9_$:-]*$/i;\nfunction checkKeyword(keyword, def) {\n const { RULES } = this;\n (0, util_1.eachItem)(keyword, (kwd) => {\n if (RULES.keywords[kwd])\n throw new Error(`Keyword ${kwd} is already defined`);\n if (!KEYWORD_NAME.test(kwd))\n throw new Error(`Keyword ${kwd} has invalid name`);\n });\n if (!def)\n return;\n if (def.$data && !(\"code\" in def || \"validate\" in def)) {\n throw new Error('$data keyword must have \"code\" or \"validate\" function');\n }\n}\nfunction addRule(keyword, definition, dataType) {\n var _a;\n const post = definition === null || definition === void 0 ? void 0 : definition.post;\n if (dataType && post)\n throw new Error('keyword with \"post\" flag cannot have \"type\"');\n const { RULES } = this;\n let ruleGroup = post ? RULES.post : RULES.rules.find(({ type: t }) => t === dataType);\n if (!ruleGroup) {\n ruleGroup = { type: dataType, rules: [] };\n RULES.rules.push(ruleGroup);\n }\n RULES.keywords[keyword] = true;\n if (!definition)\n return;\n const rule = {\n keyword,\n definition: {\n ...definition,\n type: (0, dataType_1.getJSONTypes)(definition.type),\n schemaType: (0, dataType_1.getJSONTypes)(definition.schemaType),\n },\n };\n if (definition.before)\n addBeforeRule.call(this, ruleGroup, rule, definition.before);\n else\n ruleGroup.rules.push(rule);\n RULES.all[keyword] = rule;\n (_a = definition.implements) === null || _a === void 0 ? void 0 : _a.forEach((kwd) => this.addKeyword(kwd));\n}\nfunction addBeforeRule(ruleGroup, rule, before) {\n const i = ruleGroup.rules.findIndex((_rule) => _rule.keyword === before);\n if (i >= 0) {\n ruleGroup.rules.splice(i, 0, rule);\n }\n else {\n ruleGroup.rules.push(rule);\n this.logger.warn(`rule ${before} is not defined`);\n }\n}\nfunction keywordMetaschema(def) {\n let { metaSchema } = def;\n if (metaSchema === undefined)\n return;\n if (def.$data && this.opts.$data)\n metaSchema = schemaOrData(metaSchema);\n def.validateSchema = this.compile(metaSchema, true);\n}\nconst $dataRef = {\n $ref: \"https://raw.githubusercontent.com/ajv-validator/ajv/master/lib/refs/data.json#\",\n};\nfunction schemaOrData(schema) {\n return { anyOf: [schema, $dataRef] };\n}\n//# sourceMappingURL=core.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\n// https://github.com/ajv-validator/ajv/issues/889\nconst equal = require(\"fast-deep-equal\");\nequal.code = 'require(\"ajv/dist/runtime/equal\").default';\nexports.default = equal;\n//# sourceMappingURL=equal.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\n// https://mathiasbynens.be/notes/javascript-encoding\n// https://github.com/bestiejs/punycode.js - punycode.ucs2.decode\nfunction ucs2length(str) {\n const len = str.length;\n let length = 0;\n let pos = 0;\n let value;\n while (pos < len) {\n length++;\n value = str.charCodeAt(pos++);\n if (value >= 0xd800 && value <= 0xdbff && pos < len) {\n // high surrogate, and there is a next character\n value = str.charCodeAt(pos);\n if ((value & 0xfc00) === 0xdc00)\n pos++; // low surrogate\n }\n }\n return length;\n}\nexports.default = ucs2length;\nucs2length.code = 'require(\"ajv/dist/runtime/ucs2length\").default';\n//# sourceMappingURL=ucs2length.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst uri = require(\"uri-js\");\nuri.code = 'require(\"ajv/dist/runtime/uri\").default';\nexports.default = uri;\n//# sourceMappingURL=uri.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nclass ValidationError extends Error {\n constructor(errors) {\n super(\"validation failed\");\n this.errors = errors;\n this.ajv = this.validation = true;\n }\n}\nexports.default = ValidationError;\n//# sourceMappingURL=validation_error.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.validateAdditionalItems = void 0;\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst error = {\n message: ({ params: { len } }) => (0, codegen_1.str) `must NOT have more than ${len} items`,\n params: ({ params: { len } }) => (0, codegen_1._) `{limit: ${len}}`,\n};\nconst def = {\n keyword: \"additionalItems\",\n type: \"array\",\n schemaType: [\"boolean\", \"object\"],\n before: \"uniqueItems\",\n error,\n code(cxt) {\n const { parentSchema, it } = cxt;\n const { items } = parentSchema;\n if (!Array.isArray(items)) {\n (0, util_1.checkStrictMode)(it, '\"additionalItems\" is ignored when \"items\" is not an array of schemas');\n return;\n }\n validateAdditionalItems(cxt, items);\n },\n};\nfunction validateAdditionalItems(cxt, items) {\n const { gen, schema, data, keyword, it } = cxt;\n it.items = true;\n const len = gen.const(\"len\", (0, codegen_1._) `${data}.length`);\n if (schema === false) {\n cxt.setParams({ len: items.length });\n cxt.pass((0, codegen_1._) `${len} <= ${items.length}`);\n }\n else if (typeof schema == \"object\" && !(0, util_1.alwaysValidSchema)(it, schema)) {\n const valid = gen.var(\"valid\", (0, codegen_1._) `${len} <= ${items.length}`); // TODO var\n gen.if((0, codegen_1.not)(valid), () => validateItems(valid));\n cxt.ok(valid);\n }\n function validateItems(valid) {\n gen.forRange(\"i\", items.length, len, (i) => {\n cxt.subschema({ keyword, dataProp: i, dataPropType: util_1.Type.Num }, valid);\n if (!it.allErrors)\n gen.if((0, codegen_1.not)(valid), () => gen.break());\n });\n }\n}\nexports.validateAdditionalItems = validateAdditionalItems;\nexports.default = def;\n//# sourceMappingURL=additionalItems.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst code_1 = require(\"../code\");\nconst codegen_1 = require(\"../../compile/codegen\");\nconst names_1 = require(\"../../compile/names\");\nconst util_1 = require(\"../../compile/util\");\nconst error = {\n message: \"must NOT have additional properties\",\n params: ({ params }) => (0, codegen_1._) `{additionalProperty: ${params.additionalProperty}}`,\n};\nconst def = {\n keyword: \"additionalProperties\",\n type: [\"object\"],\n schemaType: [\"boolean\", \"object\"],\n allowUndefined: true,\n trackErrors: true,\n error,\n code(cxt) {\n const { gen, schema, parentSchema, data, errsCount, it } = cxt;\n /* istanbul ignore if */\n if (!errsCount)\n throw new Error(\"ajv implementation error\");\n const { allErrors, opts } = it;\n it.props = true;\n if (opts.removeAdditional !== \"all\" && (0, util_1.alwaysValidSchema)(it, schema))\n return;\n const props = (0, code_1.allSchemaProperties)(parentSchema.properties);\n const patProps = (0, code_1.allSchemaProperties)(parentSchema.patternProperties);\n checkAdditionalProperties();\n cxt.ok((0, codegen_1._) `${errsCount} === ${names_1.default.errors}`);\n function checkAdditionalProperties() {\n gen.forIn(\"key\", data, (key) => {\n if (!props.length && !patProps.length)\n additionalPropertyCode(key);\n else\n gen.if(isAdditional(key), () => additionalPropertyCode(key));\n });\n }\n function isAdditional(key) {\n let definedProp;\n if (props.length > 8) {\n // TODO maybe an option instead of hard-coded 8?\n const propsSchema = (0, util_1.schemaRefOrVal)(it, parentSchema.properties, \"properties\");\n definedProp = (0, code_1.isOwnProperty)(gen, propsSchema, key);\n }\n else if (props.length) {\n definedProp = (0, codegen_1.or)(...props.map((p) => (0, codegen_1._) `${key} === ${p}`));\n }\n else {\n definedProp = codegen_1.nil;\n }\n if (patProps.length) {\n definedProp = (0, codegen_1.or)(definedProp, ...patProps.map((p) => (0, codegen_1._) `${(0, code_1.usePattern)(cxt, p)}.test(${key})`));\n }\n return (0, codegen_1.not)(definedProp);\n }\n function deleteAdditional(key) {\n gen.code((0, codegen_1._) `delete ${data}[${key}]`);\n }\n function additionalPropertyCode(key) {\n if (opts.removeAdditional === \"all\" || (opts.removeAdditional && schema === false)) {\n deleteAdditional(key);\n return;\n }\n if (schema === false) {\n cxt.setParams({ additionalProperty: key });\n cxt.error();\n if (!allErrors)\n gen.break();\n return;\n }\n if (typeof schema == \"object\" && !(0, util_1.alwaysValidSchema)(it, schema)) {\n const valid = gen.name(\"valid\");\n if (opts.removeAdditional === \"failing\") {\n applyAdditionalSchema(key, valid, false);\n gen.if((0, codegen_1.not)(valid), () => {\n cxt.reset();\n deleteAdditional(key);\n });\n }\n else {\n applyAdditionalSchema(key, valid);\n if (!allErrors)\n gen.if((0, codegen_1.not)(valid), () => gen.break());\n }\n }\n }\n function applyAdditionalSchema(key, valid, errors) {\n const subschema = {\n keyword: \"additionalProperties\",\n dataProp: key,\n dataPropType: util_1.Type.Str,\n };\n if (errors === false) {\n Object.assign(subschema, {\n compositeRule: true,\n createErrors: false,\n allErrors: false,\n });\n }\n cxt.subschema(subschema, valid);\n }\n },\n};\nexports.default = def;\n//# sourceMappingURL=additionalProperties.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst util_1 = require(\"../../compile/util\");\nconst def = {\n keyword: \"allOf\",\n schemaType: \"array\",\n code(cxt) {\n const { gen, schema, it } = cxt;\n /* istanbul ignore if */\n if (!Array.isArray(schema))\n throw new Error(\"ajv implementation error\");\n const valid = gen.name(\"valid\");\n schema.forEach((sch, i) => {\n if ((0, util_1.alwaysValidSchema)(it, sch))\n return;\n const schCxt = cxt.subschema({ keyword: \"allOf\", schemaProp: i }, valid);\n cxt.ok(valid);\n cxt.mergeEvaluated(schCxt);\n });\n },\n};\nexports.default = def;\n//# sourceMappingURL=allOf.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst code_1 = require(\"../code\");\nconst def = {\n keyword: \"anyOf\",\n schemaType: \"array\",\n trackErrors: true,\n code: code_1.validateUnion,\n error: { message: \"must match a schema in anyOf\" },\n};\nexports.default = def;\n//# sourceMappingURL=anyOf.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst error = {\n message: ({ params: { min, max } }) => max === undefined\n ? (0, codegen_1.str) `must contain at least ${min} valid item(s)`\n : (0, codegen_1.str) `must contain at least ${min} and no more than ${max} valid item(s)`,\n params: ({ params: { min, max } }) => max === undefined ? (0, codegen_1._) `{minContains: ${min}}` : (0, codegen_1._) `{minContains: ${min}, maxContains: ${max}}`,\n};\nconst def = {\n keyword: \"contains\",\n type: \"array\",\n schemaType: [\"object\", \"boolean\"],\n before: \"uniqueItems\",\n trackErrors: true,\n error,\n code(cxt) {\n const { gen, schema, parentSchema, data, it } = cxt;\n let min;\n let max;\n const { minContains, maxContains } = parentSchema;\n if (it.opts.next) {\n min = minContains === undefined ? 1 : minContains;\n max = maxContains;\n }\n else {\n min = 1;\n }\n const len = gen.const(\"len\", (0, codegen_1._) `${data}.length`);\n cxt.setParams({ min, max });\n if (max === undefined && min === 0) {\n (0, util_1.checkStrictMode)(it, `\"minContains\" == 0 without \"maxContains\": \"contains\" keyword ignored`);\n return;\n }\n if (max !== undefined && min > max) {\n (0, util_1.checkStrictMode)(it, `\"minContains\" > \"maxContains\" is always invalid`);\n cxt.fail();\n return;\n }\n if ((0, util_1.alwaysValidSchema)(it, schema)) {\n let cond = (0, codegen_1._) `${len} >= ${min}`;\n if (max !== undefined)\n cond = (0, codegen_1._) `${cond} && ${len} <= ${max}`;\n cxt.pass(cond);\n return;\n }\n it.items = true;\n const valid = gen.name(\"valid\");\n if (max === undefined && min === 1) {\n validateItems(valid, () => gen.if(valid, () => gen.break()));\n }\n else if (min === 0) {\n gen.let(valid, true);\n if (max !== undefined)\n gen.if((0, codegen_1._) `${data}.length > 0`, validateItemsWithCount);\n }\n else {\n gen.let(valid, false);\n validateItemsWithCount();\n }\n cxt.result(valid, () => cxt.reset());\n function validateItemsWithCount() {\n const schValid = gen.name(\"_valid\");\n const count = gen.let(\"count\", 0);\n validateItems(schValid, () => gen.if(schValid, () => checkLimits(count)));\n }\n function validateItems(_valid, block) {\n gen.forRange(\"i\", 0, len, (i) => {\n cxt.subschema({\n keyword: \"contains\",\n dataProp: i,\n dataPropType: util_1.Type.Num,\n compositeRule: true,\n }, _valid);\n block();\n });\n }\n function checkLimits(count) {\n gen.code((0, codegen_1._) `${count}++`);\n if (max === undefined) {\n gen.if((0, codegen_1._) `${count} >= ${min}`, () => gen.assign(valid, true).break());\n }\n else {\n gen.if((0, codegen_1._) `${count} > ${max}`, () => gen.assign(valid, false).break());\n if (min === 1)\n gen.assign(valid, true);\n else\n gen.if((0, codegen_1._) `${count} >= ${min}`, () => gen.assign(valid, true));\n }\n }\n },\n};\nexports.default = def;\n//# sourceMappingURL=contains.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.validateSchemaDeps = exports.validatePropertyDeps = exports.error = void 0;\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst code_1 = require(\"../code\");\nexports.error = {\n message: ({ params: { property, depsCount, deps } }) => {\n const property_ies = depsCount === 1 ? \"property\" : \"properties\";\n return (0, codegen_1.str) `must have ${property_ies} ${deps} when property ${property} is present`;\n },\n params: ({ params: { property, depsCount, deps, missingProperty } }) => (0, codegen_1._) `{property: ${property},\n missingProperty: ${missingProperty},\n depsCount: ${depsCount},\n deps: ${deps}}`, // TODO change to reference\n};\nconst def = {\n keyword: \"dependencies\",\n type: \"object\",\n schemaType: \"object\",\n error: exports.error,\n code(cxt) {\n const [propDeps, schDeps] = splitDependencies(cxt);\n validatePropertyDeps(cxt, propDeps);\n validateSchemaDeps(cxt, schDeps);\n },\n};\nfunction splitDependencies({ schema }) {\n const propertyDeps = {};\n const schemaDeps = {};\n for (const key in schema) {\n if (key === \"__proto__\")\n continue;\n const deps = Array.isArray(schema[key]) ? propertyDeps : schemaDeps;\n deps[key] = schema[key];\n }\n return [propertyDeps, schemaDeps];\n}\nfunction validatePropertyDeps(cxt, propertyDeps = cxt.schema) {\n const { gen, data, it } = cxt;\n if (Object.keys(propertyDeps).length === 0)\n return;\n const missing = gen.let(\"missing\");\n for (const prop in propertyDeps) {\n const deps = propertyDeps[prop];\n if (deps.length === 0)\n continue;\n const hasProperty = (0, code_1.propertyInData)(gen, data, prop, it.opts.ownProperties);\n cxt.setParams({\n property: prop,\n depsCount: deps.length,\n deps: deps.join(\", \"),\n });\n if (it.allErrors) {\n gen.if(hasProperty, () => {\n for (const depProp of deps) {\n (0, code_1.checkReportMissingProp)(cxt, depProp);\n }\n });\n }\n else {\n gen.if((0, codegen_1._) `${hasProperty} && (${(0, code_1.checkMissingProp)(cxt, deps, missing)})`);\n (0, code_1.reportMissingProp)(cxt, missing);\n gen.else();\n }\n }\n}\nexports.validatePropertyDeps = validatePropertyDeps;\nfunction validateSchemaDeps(cxt, schemaDeps = cxt.schema) {\n const { gen, data, keyword, it } = cxt;\n const valid = gen.name(\"valid\");\n for (const prop in schemaDeps) {\n if ((0, util_1.alwaysValidSchema)(it, schemaDeps[prop]))\n continue;\n gen.if((0, code_1.propertyInData)(gen, data, prop, it.opts.ownProperties), () => {\n const schCxt = cxt.subschema({ keyword, schemaProp: prop }, valid);\n cxt.mergeValidEvaluated(schCxt, valid);\n }, () => gen.var(valid, true) // TODO var\n );\n cxt.ok(valid);\n }\n}\nexports.validateSchemaDeps = validateSchemaDeps;\nexports.default = def;\n//# sourceMappingURL=dependencies.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst error = {\n message: ({ params }) => (0, codegen_1.str) `must match \"${params.ifClause}\" schema`,\n params: ({ params }) => (0, codegen_1._) `{failingKeyword: ${params.ifClause}}`,\n};\nconst def = {\n keyword: \"if\",\n schemaType: [\"object\", \"boolean\"],\n trackErrors: true,\n error,\n code(cxt) {\n const { gen, parentSchema, it } = cxt;\n if (parentSchema.then === undefined && parentSchema.else === undefined) {\n (0, util_1.checkStrictMode)(it, '\"if\" without \"then\" and \"else\" is ignored');\n }\n const hasThen = hasSchema(it, \"then\");\n const hasElse = hasSchema(it, \"else\");\n if (!hasThen && !hasElse)\n return;\n const valid = gen.let(\"valid\", true);\n const schValid = gen.name(\"_valid\");\n validateIf();\n cxt.reset();\n if (hasThen && hasElse) {\n const ifClause = gen.let(\"ifClause\");\n cxt.setParams({ ifClause });\n gen.if(schValid, validateClause(\"then\", ifClause), validateClause(\"else\", ifClause));\n }\n else if (hasThen) {\n gen.if(schValid, validateClause(\"then\"));\n }\n else {\n gen.if((0, codegen_1.not)(schValid), validateClause(\"else\"));\n }\n cxt.pass(valid, () => cxt.error(true));\n function validateIf() {\n const schCxt = cxt.subschema({\n keyword: \"if\",\n compositeRule: true,\n createErrors: false,\n allErrors: false,\n }, schValid);\n cxt.mergeEvaluated(schCxt);\n }\n function validateClause(keyword, ifClause) {\n return () => {\n const schCxt = cxt.subschema({ keyword }, schValid);\n gen.assign(valid, schValid);\n cxt.mergeValidEvaluated(schCxt, valid);\n if (ifClause)\n gen.assign(ifClause, (0, codegen_1._) `${keyword}`);\n else\n cxt.setParams({ ifClause: keyword });\n };\n }\n },\n};\nfunction hasSchema(it, keyword) {\n const schema = it.schema[keyword];\n return schema !== undefined && !(0, util_1.alwaysValidSchema)(it, schema);\n}\nexports.default = def;\n//# sourceMappingURL=if.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst additionalItems_1 = require(\"./additionalItems\");\nconst prefixItems_1 = require(\"./prefixItems\");\nconst items_1 = require(\"./items\");\nconst items2020_1 = require(\"./items2020\");\nconst contains_1 = require(\"./contains\");\nconst dependencies_1 = require(\"./dependencies\");\nconst propertyNames_1 = require(\"./propertyNames\");\nconst additionalProperties_1 = require(\"./additionalProperties\");\nconst properties_1 = require(\"./properties\");\nconst patternProperties_1 = require(\"./patternProperties\");\nconst not_1 = require(\"./not\");\nconst anyOf_1 = require(\"./anyOf\");\nconst oneOf_1 = require(\"./oneOf\");\nconst allOf_1 = require(\"./allOf\");\nconst if_1 = require(\"./if\");\nconst thenElse_1 = require(\"./thenElse\");\nfunction getApplicator(draft2020 = false) {\n const applicator = [\n // any\n not_1.default,\n anyOf_1.default,\n oneOf_1.default,\n allOf_1.default,\n if_1.default,\n thenElse_1.default,\n // object\n propertyNames_1.default,\n additionalProperties_1.default,\n dependencies_1.default,\n properties_1.default,\n patternProperties_1.default,\n ];\n // array\n if (draft2020)\n applicator.push(prefixItems_1.default, items2020_1.default);\n else\n applicator.push(additionalItems_1.default, items_1.default);\n applicator.push(contains_1.default);\n return applicator;\n}\nexports.default = getApplicator;\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.validateTuple = void 0;\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst code_1 = require(\"../code\");\nconst def = {\n keyword: \"items\",\n type: \"array\",\n schemaType: [\"object\", \"array\", \"boolean\"],\n before: \"uniqueItems\",\n code(cxt) {\n const { schema, it } = cxt;\n if (Array.isArray(schema))\n return validateTuple(cxt, \"additionalItems\", schema);\n it.items = true;\n if ((0, util_1.alwaysValidSchema)(it, schema))\n return;\n cxt.ok((0, code_1.validateArray)(cxt));\n },\n};\nfunction validateTuple(cxt, extraItems, schArr = cxt.schema) {\n const { gen, parentSchema, data, keyword, it } = cxt;\n checkStrictTuple(parentSchema);\n if (it.opts.unevaluated && schArr.length && it.items !== true) {\n it.items = util_1.mergeEvaluated.items(gen, schArr.length, it.items);\n }\n const valid = gen.name(\"valid\");\n const len = gen.const(\"len\", (0, codegen_1._) `${data}.length`);\n schArr.forEach((sch, i) => {\n if ((0, util_1.alwaysValidSchema)(it, sch))\n return;\n gen.if((0, codegen_1._) `${len} > ${i}`, () => cxt.subschema({\n keyword,\n schemaProp: i,\n dataProp: i,\n }, valid));\n cxt.ok(valid);\n });\n function checkStrictTuple(sch) {\n const { opts, errSchemaPath } = it;\n const l = schArr.length;\n const fullTuple = l === sch.minItems && (l === sch.maxItems || sch[extraItems] === false);\n if (opts.strictTuples && !fullTuple) {\n const msg = `\"${keyword}\" is ${l}-tuple, but minItems or maxItems/${extraItems} are not specified or different at path \"${errSchemaPath}\"`;\n (0, util_1.checkStrictMode)(it, msg, opts.strictTuples);\n }\n }\n}\nexports.validateTuple = validateTuple;\nexports.default = def;\n//# sourceMappingURL=items.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst code_1 = require(\"../code\");\nconst additionalItems_1 = require(\"./additionalItems\");\nconst error = {\n message: ({ params: { len } }) => (0, codegen_1.str) `must NOT have more than ${len} items`,\n params: ({ params: { len } }) => (0, codegen_1._) `{limit: ${len}}`,\n};\nconst def = {\n keyword: \"items\",\n type: \"array\",\n schemaType: [\"object\", \"boolean\"],\n before: \"uniqueItems\",\n error,\n code(cxt) {\n const { schema, parentSchema, it } = cxt;\n const { prefixItems } = parentSchema;\n it.items = true;\n if ((0, util_1.alwaysValidSchema)(it, schema))\n return;\n if (prefixItems)\n (0, additionalItems_1.validateAdditionalItems)(cxt, prefixItems);\n else\n cxt.ok((0, code_1.validateArray)(cxt));\n },\n};\nexports.default = def;\n//# sourceMappingURL=items2020.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst util_1 = require(\"../../compile/util\");\nconst def = {\n keyword: \"not\",\n schemaType: [\"object\", \"boolean\"],\n trackErrors: true,\n code(cxt) {\n const { gen, schema, it } = cxt;\n if ((0, util_1.alwaysValidSchema)(it, schema)) {\n cxt.fail();\n return;\n }\n const valid = gen.name(\"valid\");\n cxt.subschema({\n keyword: \"not\",\n compositeRule: true,\n createErrors: false,\n allErrors: false,\n }, valid);\n cxt.failResult(valid, () => cxt.reset(), () => cxt.error());\n },\n error: { message: \"must NOT be valid\" },\n};\nexports.default = def;\n//# sourceMappingURL=not.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst error = {\n message: \"must match exactly one schema in oneOf\",\n params: ({ params }) => (0, codegen_1._) `{passingSchemas: ${params.passing}}`,\n};\nconst def = {\n keyword: \"oneOf\",\n schemaType: \"array\",\n trackErrors: true,\n error,\n code(cxt) {\n const { gen, schema, parentSchema, it } = cxt;\n /* istanbul ignore if */\n if (!Array.isArray(schema))\n throw new Error(\"ajv implementation error\");\n if (it.opts.discriminator && parentSchema.discriminator)\n return;\n const schArr = schema;\n const valid = gen.let(\"valid\", false);\n const passing = gen.let(\"passing\", null);\n const schValid = gen.name(\"_valid\");\n cxt.setParams({ passing });\n // TODO possibly fail straight away (with warning or exception) if there are two empty always valid schemas\n gen.block(validateOneOf);\n cxt.result(valid, () => cxt.reset(), () => cxt.error(true));\n function validateOneOf() {\n schArr.forEach((sch, i) => {\n let schCxt;\n if ((0, util_1.alwaysValidSchema)(it, sch)) {\n gen.var(schValid, true);\n }\n else {\n schCxt = cxt.subschema({\n keyword: \"oneOf\",\n schemaProp: i,\n compositeRule: true,\n }, schValid);\n }\n if (i > 0) {\n gen\n .if((0, codegen_1._) `${schValid} && ${valid}`)\n .assign(valid, false)\n .assign(passing, (0, codegen_1._) `[${passing}, ${i}]`)\n .else();\n }\n gen.if(schValid, () => {\n gen.assign(valid, true);\n gen.assign(passing, i);\n if (schCxt)\n cxt.mergeEvaluated(schCxt, codegen_1.Name);\n });\n });\n }\n },\n};\nexports.default = def;\n//# sourceMappingURL=oneOf.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst code_1 = require(\"../code\");\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst util_2 = require(\"../../compile/util\");\nconst def = {\n keyword: \"patternProperties\",\n type: \"object\",\n schemaType: \"object\",\n code(cxt) {\n const { gen, schema, data, parentSchema, it } = cxt;\n const { opts } = it;\n const patterns = (0, code_1.allSchemaProperties)(schema);\n const alwaysValidPatterns = patterns.filter((p) => (0, util_1.alwaysValidSchema)(it, schema[p]));\n if (patterns.length === 0 ||\n (alwaysValidPatterns.length === patterns.length &&\n (!it.opts.unevaluated || it.props === true))) {\n return;\n }\n const checkProperties = opts.strictSchema && !opts.allowMatchingProperties && parentSchema.properties;\n const valid = gen.name(\"valid\");\n if (it.props !== true && !(it.props instanceof codegen_1.Name)) {\n it.props = (0, util_2.evaluatedPropsToName)(gen, it.props);\n }\n const { props } = it;\n validatePatternProperties();\n function validatePatternProperties() {\n for (const pat of patterns) {\n if (checkProperties)\n checkMatchingProperties(pat);\n if (it.allErrors) {\n validateProperties(pat);\n }\n else {\n gen.var(valid, true); // TODO var\n validateProperties(pat);\n gen.if(valid);\n }\n }\n }\n function checkMatchingProperties(pat) {\n for (const prop in checkProperties) {\n if (new RegExp(pat).test(prop)) {\n (0, util_1.checkStrictMode)(it, `property ${prop} matches pattern ${pat} (use allowMatchingProperties)`);\n }\n }\n }\n function validateProperties(pat) {\n gen.forIn(\"key\", data, (key) => {\n gen.if((0, codegen_1._) `${(0, code_1.usePattern)(cxt, pat)}.test(${key})`, () => {\n const alwaysValid = alwaysValidPatterns.includes(pat);\n if (!alwaysValid) {\n cxt.subschema({\n keyword: \"patternProperties\",\n schemaProp: pat,\n dataProp: key,\n dataPropType: util_2.Type.Str,\n }, valid);\n }\n if (it.opts.unevaluated && props !== true) {\n gen.assign((0, codegen_1._) `${props}[${key}]`, true);\n }\n else if (!alwaysValid && !it.allErrors) {\n // can short-circuit if `unevaluatedProperties` is not supported (opts.next === false)\n // or if all properties were evaluated (props === true)\n gen.if((0, codegen_1.not)(valid), () => gen.break());\n }\n });\n });\n }\n },\n};\nexports.default = def;\n//# sourceMappingURL=patternProperties.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst items_1 = require(\"./items\");\nconst def = {\n keyword: \"prefixItems\",\n type: \"array\",\n schemaType: [\"array\"],\n before: \"uniqueItems\",\n code: (cxt) => (0, items_1.validateTuple)(cxt, \"items\"),\n};\nexports.default = def;\n//# sourceMappingURL=prefixItems.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst validate_1 = require(\"../../compile/validate\");\nconst code_1 = require(\"../code\");\nconst util_1 = require(\"../../compile/util\");\nconst additionalProperties_1 = require(\"./additionalProperties\");\nconst def = {\n keyword: \"properties\",\n type: \"object\",\n schemaType: \"object\",\n code(cxt) {\n const { gen, schema, parentSchema, data, it } = cxt;\n if (it.opts.removeAdditional === \"all\" && parentSchema.additionalProperties === undefined) {\n additionalProperties_1.default.code(new validate_1.KeywordCxt(it, additionalProperties_1.default, \"additionalProperties\"));\n }\n const allProps = (0, code_1.allSchemaProperties)(schema);\n for (const prop of allProps) {\n it.definedProperties.add(prop);\n }\n if (it.opts.unevaluated && allProps.length && it.props !== true) {\n it.props = util_1.mergeEvaluated.props(gen, (0, util_1.toHash)(allProps), it.props);\n }\n const properties = allProps.filter((p) => !(0, util_1.alwaysValidSchema)(it, schema[p]));\n if (properties.length === 0)\n return;\n const valid = gen.name(\"valid\");\n for (const prop of properties) {\n if (hasDefault(prop)) {\n applyPropertySchema(prop);\n }\n else {\n gen.if((0, code_1.propertyInData)(gen, data, prop, it.opts.ownProperties));\n applyPropertySchema(prop);\n if (!it.allErrors)\n gen.else().var(valid, true);\n gen.endIf();\n }\n cxt.it.definedProperties.add(prop);\n cxt.ok(valid);\n }\n function hasDefault(prop) {\n return it.opts.useDefaults && !it.compositeRule && schema[prop].default !== undefined;\n }\n function applyPropertySchema(prop) {\n cxt.subschema({\n keyword: \"properties\",\n schemaProp: prop,\n dataProp: prop,\n }, valid);\n }\n },\n};\nexports.default = def;\n//# sourceMappingURL=properties.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst error = {\n message: \"property name must be valid\",\n params: ({ params }) => (0, codegen_1._) `{propertyName: ${params.propertyName}}`,\n};\nconst def = {\n keyword: \"propertyNames\",\n type: \"object\",\n schemaType: [\"object\", \"boolean\"],\n error,\n code(cxt) {\n const { gen, schema, data, it } = cxt;\n if ((0, util_1.alwaysValidSchema)(it, schema))\n return;\n const valid = gen.name(\"valid\");\n gen.forIn(\"key\", data, (key) => {\n cxt.setParams({ propertyName: key });\n cxt.subschema({\n keyword: \"propertyNames\",\n data: key,\n dataTypes: [\"string\"],\n propertyName: key,\n compositeRule: true,\n }, valid);\n gen.if((0, codegen_1.not)(valid), () => {\n cxt.error(true);\n if (!it.allErrors)\n gen.break();\n });\n });\n cxt.ok(valid);\n },\n};\nexports.default = def;\n//# sourceMappingURL=propertyNames.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst util_1 = require(\"../../compile/util\");\nconst def = {\n keyword: [\"then\", \"else\"],\n schemaType: [\"object\", \"boolean\"],\n code({ keyword, parentSchema, it }) {\n if (parentSchema.if === undefined)\n (0, util_1.checkStrictMode)(it, `\"${keyword}\" without \"if\" is ignored`);\n },\n};\nexports.default = def;\n//# sourceMappingURL=thenElse.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.validateUnion = exports.validateArray = exports.usePattern = exports.callValidateCode = exports.schemaProperties = exports.allSchemaProperties = exports.noPropertyInData = exports.propertyInData = exports.isOwnProperty = exports.hasPropFunc = exports.reportMissingProp = exports.checkMissingProp = exports.checkReportMissingProp = void 0;\nconst codegen_1 = require(\"../compile/codegen\");\nconst util_1 = require(\"../compile/util\");\nconst names_1 = require(\"../compile/names\");\nconst util_2 = require(\"../compile/util\");\nfunction checkReportMissingProp(cxt, prop) {\n const { gen, data, it } = cxt;\n gen.if(noPropertyInData(gen, data, prop, it.opts.ownProperties), () => {\n cxt.setParams({ missingProperty: (0, codegen_1._) `${prop}` }, true);\n cxt.error();\n });\n}\nexports.checkReportMissingProp = checkReportMissingProp;\nfunction checkMissingProp({ gen, data, it: { opts } }, properties, missing) {\n return (0, codegen_1.or)(...properties.map((prop) => (0, codegen_1.and)(noPropertyInData(gen, data, prop, opts.ownProperties), (0, codegen_1._) `${missing} = ${prop}`)));\n}\nexports.checkMissingProp = checkMissingProp;\nfunction reportMissingProp(cxt, missing) {\n cxt.setParams({ missingProperty: missing }, true);\n cxt.error();\n}\nexports.reportMissingProp = reportMissingProp;\nfunction hasPropFunc(gen) {\n return gen.scopeValue(\"func\", {\n // eslint-disable-next-line @typescript-eslint/unbound-method\n ref: Object.prototype.hasOwnProperty,\n code: (0, codegen_1._) `Object.prototype.hasOwnProperty`,\n });\n}\nexports.hasPropFunc = hasPropFunc;\nfunction isOwnProperty(gen, data, property) {\n return (0, codegen_1._) `${hasPropFunc(gen)}.call(${data}, ${property})`;\n}\nexports.isOwnProperty = isOwnProperty;\nfunction propertyInData(gen, data, property, ownProperties) {\n const cond = (0, codegen_1._) `${data}${(0, codegen_1.getProperty)(property)} !== undefined`;\n return ownProperties ? (0, codegen_1._) `${cond} && ${isOwnProperty(gen, data, property)}` : cond;\n}\nexports.propertyInData = propertyInData;\nfunction noPropertyInData(gen, data, property, ownProperties) {\n const cond = (0, codegen_1._) `${data}${(0, codegen_1.getProperty)(property)} === undefined`;\n return ownProperties ? (0, codegen_1.or)(cond, (0, codegen_1.not)(isOwnProperty(gen, data, property))) : cond;\n}\nexports.noPropertyInData = noPropertyInData;\nfunction allSchemaProperties(schemaMap) {\n return schemaMap ? Object.keys(schemaMap).filter((p) => p !== \"__proto__\") : [];\n}\nexports.allSchemaProperties = allSchemaProperties;\nfunction schemaProperties(it, schemaMap) {\n return allSchemaProperties(schemaMap).filter((p) => !(0, util_1.alwaysValidSchema)(it, schemaMap[p]));\n}\nexports.schemaProperties = schemaProperties;\nfunction callValidateCode({ schemaCode, data, it: { gen, topSchemaRef, schemaPath, errorPath }, it }, func, context, passSchema) {\n const dataAndSchema = passSchema ? (0, codegen_1._) `${schemaCode}, ${data}, ${topSchemaRef}${schemaPath}` : data;\n const valCxt = [\n [names_1.default.instancePath, (0, codegen_1.strConcat)(names_1.default.instancePath, errorPath)],\n [names_1.default.parentData, it.parentData],\n [names_1.default.parentDataProperty, it.parentDataProperty],\n [names_1.default.rootData, names_1.default.rootData],\n ];\n if (it.opts.dynamicRef)\n valCxt.push([names_1.default.dynamicAnchors, names_1.default.dynamicAnchors]);\n const args = (0, codegen_1._) `${dataAndSchema}, ${gen.object(...valCxt)}`;\n return context !== codegen_1.nil ? (0, codegen_1._) `${func}.call(${context}, ${args})` : (0, codegen_1._) `${func}(${args})`;\n}\nexports.callValidateCode = callValidateCode;\nconst newRegExp = (0, codegen_1._) `new RegExp`;\nfunction usePattern({ gen, it: { opts } }, pattern) {\n const u = opts.unicodeRegExp ? \"u\" : \"\";\n const { regExp } = opts.code;\n const rx = regExp(pattern, u);\n return gen.scopeValue(\"pattern\", {\n key: rx.toString(),\n ref: rx,\n code: (0, codegen_1._) `${regExp.code === \"new RegExp\" ? newRegExp : (0, util_2.useFunc)(gen, regExp)}(${pattern}, ${u})`,\n });\n}\nexports.usePattern = usePattern;\nfunction validateArray(cxt) {\n const { gen, data, keyword, it } = cxt;\n const valid = gen.name(\"valid\");\n if (it.allErrors) {\n const validArr = gen.let(\"valid\", true);\n validateItems(() => gen.assign(validArr, false));\n return validArr;\n }\n gen.var(valid, true);\n validateItems(() => gen.break());\n return valid;\n function validateItems(notValid) {\n const len = gen.const(\"len\", (0, codegen_1._) `${data}.length`);\n gen.forRange(\"i\", 0, len, (i) => {\n cxt.subschema({\n keyword,\n dataProp: i,\n dataPropType: util_1.Type.Num,\n }, valid);\n gen.if((0, codegen_1.not)(valid), notValid);\n });\n }\n}\nexports.validateArray = validateArray;\nfunction validateUnion(cxt) {\n const { gen, schema, keyword, it } = cxt;\n /* istanbul ignore if */\n if (!Array.isArray(schema))\n throw new Error(\"ajv implementation error\");\n const alwaysValid = schema.some((sch) => (0, util_1.alwaysValidSchema)(it, sch));\n if (alwaysValid && !it.opts.unevaluated)\n return;\n const valid = gen.let(\"valid\", false);\n const schValid = gen.name(\"_valid\");\n gen.block(() => schema.forEach((_sch, i) => {\n const schCxt = cxt.subschema({\n keyword,\n schemaProp: i,\n compositeRule: true,\n }, schValid);\n gen.assign(valid, (0, codegen_1._) `${valid} || ${schValid}`);\n const merged = cxt.mergeValidEvaluated(schCxt, schValid);\n // can short-circuit if `unevaluatedProperties/Items` not supported (opts.unevaluated !== true)\n // or if all properties and items were evaluated (it.props === true && it.items === true)\n if (!merged)\n gen.if((0, codegen_1.not)(valid));\n }));\n cxt.result(valid, () => cxt.reset(), () => cxt.error(true));\n}\nexports.validateUnion = validateUnion;\n//# sourceMappingURL=code.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst def = {\n keyword: \"id\",\n code() {\n throw new Error('NOT SUPPORTED: keyword \"id\", use \"$id\" for schema ID');\n },\n};\nexports.default = def;\n//# sourceMappingURL=id.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst id_1 = require(\"./id\");\nconst ref_1 = require(\"./ref\");\nconst core = [\n \"$schema\",\n \"$id\",\n \"$defs\",\n \"$vocabulary\",\n { keyword: \"$comment\" },\n \"definitions\",\n id_1.default,\n ref_1.default,\n];\nexports.default = core;\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.callRef = exports.getValidate = void 0;\nconst ref_error_1 = require(\"../../compile/ref_error\");\nconst code_1 = require(\"../code\");\nconst codegen_1 = require(\"../../compile/codegen\");\nconst names_1 = require(\"../../compile/names\");\nconst compile_1 = require(\"../../compile\");\nconst util_1 = require(\"../../compile/util\");\nconst def = {\n keyword: \"$ref\",\n schemaType: \"string\",\n code(cxt) {\n const { gen, schema: $ref, it } = cxt;\n const { baseId, schemaEnv: env, validateName, opts, self } = it;\n const { root } = env;\n if (($ref === \"#\" || $ref === \"#/\") && baseId === root.baseId)\n return callRootRef();\n const schOrEnv = compile_1.resolveRef.call(self, root, baseId, $ref);\n if (schOrEnv === undefined)\n throw new ref_error_1.default(it.opts.uriResolver, baseId, $ref);\n if (schOrEnv instanceof compile_1.SchemaEnv)\n return callValidate(schOrEnv);\n return inlineRefSchema(schOrEnv);\n function callRootRef() {\n if (env === root)\n return callRef(cxt, validateName, env, env.$async);\n const rootName = gen.scopeValue(\"root\", { ref: root });\n return callRef(cxt, (0, codegen_1._) `${rootName}.validate`, root, root.$async);\n }\n function callValidate(sch) {\n const v = getValidate(cxt, sch);\n callRef(cxt, v, sch, sch.$async);\n }\n function inlineRefSchema(sch) {\n const schName = gen.scopeValue(\"schema\", opts.code.source === true ? { ref: sch, code: (0, codegen_1.stringify)(sch) } : { ref: sch });\n const valid = gen.name(\"valid\");\n const schCxt = cxt.subschema({\n schema: sch,\n dataTypes: [],\n schemaPath: codegen_1.nil,\n topSchemaRef: schName,\n errSchemaPath: $ref,\n }, valid);\n cxt.mergeEvaluated(schCxt);\n cxt.ok(valid);\n }\n },\n};\nfunction getValidate(cxt, sch) {\n const { gen } = cxt;\n return sch.validate\n ? gen.scopeValue(\"validate\", { ref: sch.validate })\n : (0, codegen_1._) `${gen.scopeValue(\"wrapper\", { ref: sch })}.validate`;\n}\nexports.getValidate = getValidate;\nfunction callRef(cxt, v, sch, $async) {\n const { gen, it } = cxt;\n const { allErrors, schemaEnv: env, opts } = it;\n const passCxt = opts.passContext ? names_1.default.this : codegen_1.nil;\n if ($async)\n callAsyncRef();\n else\n callSyncRef();\n function callAsyncRef() {\n if (!env.$async)\n throw new Error(\"async schema referenced by sync schema\");\n const valid = gen.let(\"valid\");\n gen.try(() => {\n gen.code((0, codegen_1._) `await ${(0, code_1.callValidateCode)(cxt, v, passCxt)}`);\n addEvaluatedFrom(v); // TODO will not work with async, it has to be returned with the result\n if (!allErrors)\n gen.assign(valid, true);\n }, (e) => {\n gen.if((0, codegen_1._) `!(${e} instanceof ${it.ValidationError})`, () => gen.throw(e));\n addErrorsFrom(e);\n if (!allErrors)\n gen.assign(valid, false);\n });\n cxt.ok(valid);\n }\n function callSyncRef() {\n cxt.result((0, code_1.callValidateCode)(cxt, v, passCxt), () => addEvaluatedFrom(v), () => addErrorsFrom(v));\n }\n function addErrorsFrom(source) {\n const errs = (0, codegen_1._) `${source}.errors`;\n gen.assign(names_1.default.vErrors, (0, codegen_1._) `${names_1.default.vErrors} === null ? ${errs} : ${names_1.default.vErrors}.concat(${errs})`); // TODO tagged\n gen.assign(names_1.default.errors, (0, codegen_1._) `${names_1.default.vErrors}.length`);\n }\n function addEvaluatedFrom(source) {\n var _a;\n if (!it.opts.unevaluated)\n return;\n const schEvaluated = (_a = sch === null || sch === void 0 ? void 0 : sch.validate) === null || _a === void 0 ? void 0 : _a.evaluated;\n // TODO refactor\n if (it.props !== true) {\n if (schEvaluated && !schEvaluated.dynamicProps) {\n if (schEvaluated.props !== undefined) {\n it.props = util_1.mergeEvaluated.props(gen, schEvaluated.props, it.props);\n }\n }\n else {\n const props = gen.var(\"props\", (0, codegen_1._) `${source}.evaluated.props`);\n it.props = util_1.mergeEvaluated.props(gen, props, it.props, codegen_1.Name);\n }\n }\n if (it.items !== true) {\n if (schEvaluated && !schEvaluated.dynamicItems) {\n if (schEvaluated.items !== undefined) {\n it.items = util_1.mergeEvaluated.items(gen, schEvaluated.items, it.items);\n }\n }\n else {\n const items = gen.var(\"items\", (0, codegen_1._) `${source}.evaluated.items`);\n it.items = util_1.mergeEvaluated.items(gen, items, it.items, codegen_1.Name);\n }\n }\n }\n}\nexports.callRef = callRef;\nexports.default = def;\n//# sourceMappingURL=ref.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst types_1 = require(\"../discriminator/types\");\nconst compile_1 = require(\"../../compile\");\nconst util_1 = require(\"../../compile/util\");\nconst error = {\n message: ({ params: { discrError, tagName } }) => discrError === types_1.DiscrError.Tag\n ? `tag \"${tagName}\" must be string`\n : `value of tag \"${tagName}\" must be in oneOf`,\n params: ({ params: { discrError, tag, tagName } }) => (0, codegen_1._) `{error: ${discrError}, tag: ${tagName}, tagValue: ${tag}}`,\n};\nconst def = {\n keyword: \"discriminator\",\n type: \"object\",\n schemaType: \"object\",\n error,\n code(cxt) {\n const { gen, data, schema, parentSchema, it } = cxt;\n const { oneOf } = parentSchema;\n if (!it.opts.discriminator) {\n throw new Error(\"discriminator: requires discriminator option\");\n }\n const tagName = schema.propertyName;\n if (typeof tagName != \"string\")\n throw new Error(\"discriminator: requires propertyName\");\n if (schema.mapping)\n throw new Error(\"discriminator: mapping is not supported\");\n if (!oneOf)\n throw new Error(\"discriminator: requires oneOf keyword\");\n const valid = gen.let(\"valid\", false);\n const tag = gen.const(\"tag\", (0, codegen_1._) `${data}${(0, codegen_1.getProperty)(tagName)}`);\n gen.if((0, codegen_1._) `typeof ${tag} == \"string\"`, () => validateMapping(), () => cxt.error(false, { discrError: types_1.DiscrError.Tag, tag, tagName }));\n cxt.ok(valid);\n function validateMapping() {\n const mapping = getMapping();\n gen.if(false);\n for (const tagValue in mapping) {\n gen.elseIf((0, codegen_1._) `${tag} === ${tagValue}`);\n gen.assign(valid, applyTagSchema(mapping[tagValue]));\n }\n gen.else();\n cxt.error(false, { discrError: types_1.DiscrError.Mapping, tag, tagName });\n gen.endIf();\n }\n function applyTagSchema(schemaProp) {\n const _valid = gen.name(\"valid\");\n const schCxt = cxt.subschema({ keyword: \"oneOf\", schemaProp }, _valid);\n cxt.mergeEvaluated(schCxt, codegen_1.Name);\n return _valid;\n }\n function getMapping() {\n var _a;\n const oneOfMapping = {};\n const topRequired = hasRequired(parentSchema);\n let tagRequired = true;\n for (let i = 0; i < oneOf.length; i++) {\n let sch = oneOf[i];\n if ((sch === null || sch === void 0 ? void 0 : sch.$ref) && !(0, util_1.schemaHasRulesButRef)(sch, it.self.RULES)) {\n sch = compile_1.resolveRef.call(it.self, it.schemaEnv.root, it.baseId, sch === null || sch === void 0 ? void 0 : sch.$ref);\n if (sch instanceof compile_1.SchemaEnv)\n sch = sch.schema;\n }\n const propSch = (_a = sch === null || sch === void 0 ? void 0 : sch.properties) === null || _a === void 0 ? void 0 : _a[tagName];\n if (typeof propSch != \"object\") {\n throw new Error(`discriminator: oneOf subschemas (or referenced schemas) must have \"properties/${tagName}\"`);\n }\n tagRequired = tagRequired && (topRequired || hasRequired(sch));\n addMappings(propSch, i);\n }\n if (!tagRequired)\n throw new Error(`discriminator: \"${tagName}\" must be required`);\n return oneOfMapping;\n function hasRequired({ required }) {\n return Array.isArray(required) && required.includes(tagName);\n }\n function addMappings(sch, i) {\n if (sch.const) {\n addMapping(sch.const, i);\n }\n else if (sch.enum) {\n for (const tagValue of sch.enum) {\n addMapping(tagValue, i);\n }\n }\n else {\n throw new Error(`discriminator: \"properties/${tagName}\" must have \"const\" or \"enum\"`);\n }\n }\n function addMapping(tagValue, i) {\n if (typeof tagValue != \"string\" || tagValue in oneOfMapping) {\n throw new Error(`discriminator: \"${tagName}\" values must be unique strings`);\n }\n oneOfMapping[tagValue] = i;\n }\n }\n },\n};\nexports.default = def;\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.DiscrError = void 0;\nvar DiscrError;\n(function (DiscrError) {\n DiscrError[\"Tag\"] = \"tag\";\n DiscrError[\"Mapping\"] = \"mapping\";\n})(DiscrError = exports.DiscrError || (exports.DiscrError = {}));\n//# sourceMappingURL=types.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst core_1 = require(\"./core\");\nconst validation_1 = require(\"./validation\");\nconst applicator_1 = require(\"./applicator\");\nconst format_1 = require(\"./format\");\nconst metadata_1 = require(\"./metadata\");\nconst draft7Vocabularies = [\n core_1.default,\n validation_1.default,\n (0, applicator_1.default)(),\n format_1.default,\n metadata_1.metadataVocabulary,\n metadata_1.contentVocabulary,\n];\nexports.default = draft7Vocabularies;\n//# sourceMappingURL=draft7.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst error = {\n message: ({ schemaCode }) => (0, codegen_1.str) `must match format \"${schemaCode}\"`,\n params: ({ schemaCode }) => (0, codegen_1._) `{format: ${schemaCode}}`,\n};\nconst def = {\n keyword: \"format\",\n type: [\"number\", \"string\"],\n schemaType: \"string\",\n $data: true,\n error,\n code(cxt, ruleType) {\n const { gen, data, $data, schema, schemaCode, it } = cxt;\n const { opts, errSchemaPath, schemaEnv, self } = it;\n if (!opts.validateFormats)\n return;\n if ($data)\n validate$DataFormat();\n else\n validateFormat();\n function validate$DataFormat() {\n const fmts = gen.scopeValue(\"formats\", {\n ref: self.formats,\n code: opts.code.formats,\n });\n const fDef = gen.const(\"fDef\", (0, codegen_1._) `${fmts}[${schemaCode}]`);\n const fType = gen.let(\"fType\");\n const format = gen.let(\"format\");\n // TODO simplify\n gen.if((0, codegen_1._) `typeof ${fDef} == \"object\" && !(${fDef} instanceof RegExp)`, () => gen.assign(fType, (0, codegen_1._) `${fDef}.type || \"string\"`).assign(format, (0, codegen_1._) `${fDef}.validate`), () => gen.assign(fType, (0, codegen_1._) `\"string\"`).assign(format, fDef));\n cxt.fail$data((0, codegen_1.or)(unknownFmt(), invalidFmt()));\n function unknownFmt() {\n if (opts.strictSchema === false)\n return codegen_1.nil;\n return (0, codegen_1._) `${schemaCode} && !${format}`;\n }\n function invalidFmt() {\n const callFormat = schemaEnv.$async\n ? (0, codegen_1._) `(${fDef}.async ? await ${format}(${data}) : ${format}(${data}))`\n : (0, codegen_1._) `${format}(${data})`;\n const validData = (0, codegen_1._) `(typeof ${format} == \"function\" ? ${callFormat} : ${format}.test(${data}))`;\n return (0, codegen_1._) `${format} && ${format} !== true && ${fType} === ${ruleType} && !${validData}`;\n }\n }\n function validateFormat() {\n const formatDef = self.formats[schema];\n if (!formatDef) {\n unknownFormat();\n return;\n }\n if (formatDef === true)\n return;\n const [fmtType, format, fmtRef] = getFormat(formatDef);\n if (fmtType === ruleType)\n cxt.pass(validCondition());\n function unknownFormat() {\n if (opts.strictSchema === false) {\n self.logger.warn(unknownMsg());\n return;\n }\n throw new Error(unknownMsg());\n function unknownMsg() {\n return `unknown format \"${schema}\" ignored in schema at path \"${errSchemaPath}\"`;\n }\n }\n function getFormat(fmtDef) {\n const code = fmtDef instanceof RegExp\n ? (0, codegen_1.regexpCode)(fmtDef)\n : opts.code.formats\n ? (0, codegen_1._) `${opts.code.formats}${(0, codegen_1.getProperty)(schema)}`\n : undefined;\n const fmt = gen.scopeValue(\"formats\", { key: schema, ref: fmtDef, code });\n if (typeof fmtDef == \"object\" && !(fmtDef instanceof RegExp)) {\n return [fmtDef.type || \"string\", fmtDef.validate, (0, codegen_1._) `${fmt}.validate`];\n }\n return [\"string\", fmtDef, fmt];\n }\n function validCondition() {\n if (typeof formatDef == \"object\" && !(formatDef instanceof RegExp) && formatDef.async) {\n if (!schemaEnv.$async)\n throw new Error(\"async format in sync schema\");\n return (0, codegen_1._) `await ${fmtRef}(${data})`;\n }\n return typeof format == \"function\" ? (0, codegen_1._) `${fmtRef}(${data})` : (0, codegen_1._) `${fmtRef}.test(${data})`;\n }\n }\n },\n};\nexports.default = def;\n//# sourceMappingURL=format.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst format_1 = require(\"./format\");\nconst format = [format_1.default];\nexports.default = format;\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.contentVocabulary = exports.metadataVocabulary = void 0;\nexports.metadataVocabulary = [\n \"title\",\n \"description\",\n \"default\",\n \"deprecated\",\n \"readOnly\",\n \"writeOnly\",\n \"examples\",\n];\nexports.contentVocabulary = [\n \"contentMediaType\",\n \"contentEncoding\",\n \"contentSchema\",\n];\n//# sourceMappingURL=metadata.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst equal_1 = require(\"../../runtime/equal\");\nconst error = {\n message: \"must be equal to constant\",\n params: ({ schemaCode }) => (0, codegen_1._) `{allowedValue: ${schemaCode}}`,\n};\nconst def = {\n keyword: \"const\",\n $data: true,\n error,\n code(cxt) {\n const { gen, data, $data, schemaCode, schema } = cxt;\n if ($data || (schema && typeof schema == \"object\")) {\n cxt.fail$data((0, codegen_1._) `!${(0, util_1.useFunc)(gen, equal_1.default)}(${data}, ${schemaCode})`);\n }\n else {\n cxt.fail((0, codegen_1._) `${schema} !== ${data}`);\n }\n },\n};\nexports.default = def;\n//# sourceMappingURL=const.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst equal_1 = require(\"../../runtime/equal\");\nconst error = {\n message: \"must be equal to one of the allowed values\",\n params: ({ schemaCode }) => (0, codegen_1._) `{allowedValues: ${schemaCode}}`,\n};\nconst def = {\n keyword: \"enum\",\n schemaType: \"array\",\n $data: true,\n error,\n code(cxt) {\n const { gen, data, $data, schema, schemaCode, it } = cxt;\n if (!$data && schema.length === 0)\n throw new Error(\"enum must have non-empty array\");\n const useLoop = schema.length >= it.opts.loopEnum;\n let eql;\n const getEql = () => (eql !== null && eql !== void 0 ? eql : (eql = (0, util_1.useFunc)(gen, equal_1.default)));\n let valid;\n if (useLoop || $data) {\n valid = gen.let(\"valid\");\n cxt.block$data(valid, loopEnum);\n }\n else {\n /* istanbul ignore if */\n if (!Array.isArray(schema))\n throw new Error(\"ajv implementation error\");\n const vSchema = gen.const(\"vSchema\", schemaCode);\n valid = (0, codegen_1.or)(...schema.map((_x, i) => equalCode(vSchema, i)));\n }\n cxt.pass(valid);\n function loopEnum() {\n gen.assign(valid, false);\n gen.forOf(\"v\", schemaCode, (v) => gen.if((0, codegen_1._) `${getEql()}(${data}, ${v})`, () => gen.assign(valid, true).break()));\n }\n function equalCode(vSchema, i) {\n const sch = schema[i];\n return typeof sch === \"object\" && sch !== null\n ? (0, codegen_1._) `${getEql()}(${data}, ${vSchema}[${i}])`\n : (0, codegen_1._) `${data} === ${sch}`;\n }\n },\n};\nexports.default = def;\n//# sourceMappingURL=enum.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst limitNumber_1 = require(\"./limitNumber\");\nconst multipleOf_1 = require(\"./multipleOf\");\nconst limitLength_1 = require(\"./limitLength\");\nconst pattern_1 = require(\"./pattern\");\nconst limitProperties_1 = require(\"./limitProperties\");\nconst required_1 = require(\"./required\");\nconst limitItems_1 = require(\"./limitItems\");\nconst uniqueItems_1 = require(\"./uniqueItems\");\nconst const_1 = require(\"./const\");\nconst enum_1 = require(\"./enum\");\nconst validation = [\n // number\n limitNumber_1.default,\n multipleOf_1.default,\n // string\n limitLength_1.default,\n pattern_1.default,\n // object\n limitProperties_1.default,\n required_1.default,\n // array\n limitItems_1.default,\n uniqueItems_1.default,\n // any\n { keyword: \"type\", schemaType: [\"string\", \"array\"] },\n { keyword: \"nullable\", schemaType: \"boolean\" },\n const_1.default,\n enum_1.default,\n];\nexports.default = validation;\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst error = {\n message({ keyword, schemaCode }) {\n const comp = keyword === \"maxItems\" ? \"more\" : \"fewer\";\n return (0, codegen_1.str) `must NOT have ${comp} than ${schemaCode} items`;\n },\n params: ({ schemaCode }) => (0, codegen_1._) `{limit: ${schemaCode}}`,\n};\nconst def = {\n keyword: [\"maxItems\", \"minItems\"],\n type: \"array\",\n schemaType: \"number\",\n $data: true,\n error,\n code(cxt) {\n const { keyword, data, schemaCode } = cxt;\n const op = keyword === \"maxItems\" ? codegen_1.operators.GT : codegen_1.operators.LT;\n cxt.fail$data((0, codegen_1._) `${data}.length ${op} ${schemaCode}`);\n },\n};\nexports.default = def;\n//# sourceMappingURL=limitItems.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst ucs2length_1 = require(\"../../runtime/ucs2length\");\nconst error = {\n message({ keyword, schemaCode }) {\n const comp = keyword === \"maxLength\" ? \"more\" : \"fewer\";\n return (0, codegen_1.str) `must NOT have ${comp} than ${schemaCode} characters`;\n },\n params: ({ schemaCode }) => (0, codegen_1._) `{limit: ${schemaCode}}`,\n};\nconst def = {\n keyword: [\"maxLength\", \"minLength\"],\n type: \"string\",\n schemaType: \"number\",\n $data: true,\n error,\n code(cxt) {\n const { keyword, data, schemaCode, it } = cxt;\n const op = keyword === \"maxLength\" ? codegen_1.operators.GT : codegen_1.operators.LT;\n const len = it.opts.unicode === false ? (0, codegen_1._) `${data}.length` : (0, codegen_1._) `${(0, util_1.useFunc)(cxt.gen, ucs2length_1.default)}(${data})`;\n cxt.fail$data((0, codegen_1._) `${len} ${op} ${schemaCode}`);\n },\n};\nexports.default = def;\n//# sourceMappingURL=limitLength.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst ops = codegen_1.operators;\nconst KWDs = {\n maximum: { okStr: \"<=\", ok: ops.LTE, fail: ops.GT },\n minimum: { okStr: \">=\", ok: ops.GTE, fail: ops.LT },\n exclusiveMaximum: { okStr: \"<\", ok: ops.LT, fail: ops.GTE },\n exclusiveMinimum: { okStr: \">\", ok: ops.GT, fail: ops.LTE },\n};\nconst error = {\n message: ({ keyword, schemaCode }) => (0, codegen_1.str) `must be ${KWDs[keyword].okStr} ${schemaCode}`,\n params: ({ keyword, schemaCode }) => (0, codegen_1._) `{comparison: ${KWDs[keyword].okStr}, limit: ${schemaCode}}`,\n};\nconst def = {\n keyword: Object.keys(KWDs),\n type: \"number\",\n schemaType: \"number\",\n $data: true,\n error,\n code(cxt) {\n const { keyword, data, schemaCode } = cxt;\n cxt.fail$data((0, codegen_1._) `${data} ${KWDs[keyword].fail} ${schemaCode} || isNaN(${data})`);\n },\n};\nexports.default = def;\n//# sourceMappingURL=limitNumber.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst error = {\n message({ keyword, schemaCode }) {\n const comp = keyword === \"maxProperties\" ? \"more\" : \"fewer\";\n return (0, codegen_1.str) `must NOT have ${comp} than ${schemaCode} properties`;\n },\n params: ({ schemaCode }) => (0, codegen_1._) `{limit: ${schemaCode}}`,\n};\nconst def = {\n keyword: [\"maxProperties\", \"minProperties\"],\n type: \"object\",\n schemaType: \"number\",\n $data: true,\n error,\n code(cxt) {\n const { keyword, data, schemaCode } = cxt;\n const op = keyword === \"maxProperties\" ? codegen_1.operators.GT : codegen_1.operators.LT;\n cxt.fail$data((0, codegen_1._) `Object.keys(${data}).length ${op} ${schemaCode}`);\n },\n};\nexports.default = def;\n//# sourceMappingURL=limitProperties.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst codegen_1 = require(\"../../compile/codegen\");\nconst error = {\n message: ({ schemaCode }) => (0, codegen_1.str) `must be multiple of ${schemaCode}`,\n params: ({ schemaCode }) => (0, codegen_1._) `{multipleOf: ${schemaCode}}`,\n};\nconst def = {\n keyword: \"multipleOf\",\n type: \"number\",\n schemaType: \"number\",\n $data: true,\n error,\n code(cxt) {\n const { gen, data, schemaCode, it } = cxt;\n // const bdt = bad$DataType(schemaCode, def.schemaType, $data)\n const prec = it.opts.multipleOfPrecision;\n const res = gen.let(\"res\");\n const invalid = prec\n ? (0, codegen_1._) `Math.abs(Math.round(${res}) - ${res}) > 1e-${prec}`\n : (0, codegen_1._) `${res} !== parseInt(${res})`;\n cxt.fail$data((0, codegen_1._) `(${schemaCode} === 0 || (${res} = ${data}/${schemaCode}, ${invalid}))`);\n },\n};\nexports.default = def;\n//# sourceMappingURL=multipleOf.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst code_1 = require(\"../code\");\nconst codegen_1 = require(\"../../compile/codegen\");\nconst error = {\n message: ({ schemaCode }) => (0, codegen_1.str) `must match pattern \"${schemaCode}\"`,\n params: ({ schemaCode }) => (0, codegen_1._) `{pattern: ${schemaCode}}`,\n};\nconst def = {\n keyword: \"pattern\",\n type: \"string\",\n schemaType: \"string\",\n $data: true,\n error,\n code(cxt) {\n const { data, $data, schema, schemaCode, it } = cxt;\n // TODO regexp should be wrapped in try/catchs\n const u = it.opts.unicodeRegExp ? \"u\" : \"\";\n const regExp = $data ? (0, codegen_1._) `(new RegExp(${schemaCode}, ${u}))` : (0, code_1.usePattern)(cxt, schema);\n cxt.fail$data((0, codegen_1._) `!${regExp}.test(${data})`);\n },\n};\nexports.default = def;\n//# sourceMappingURL=pattern.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst code_1 = require(\"../code\");\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst error = {\n message: ({ params: { missingProperty } }) => (0, codegen_1.str) `must have required property '${missingProperty}'`,\n params: ({ params: { missingProperty } }) => (0, codegen_1._) `{missingProperty: ${missingProperty}}`,\n};\nconst def = {\n keyword: \"required\",\n type: \"object\",\n schemaType: \"array\",\n $data: true,\n error,\n code(cxt) {\n const { gen, schema, schemaCode, data, $data, it } = cxt;\n const { opts } = it;\n if (!$data && schema.length === 0)\n return;\n const useLoop = schema.length >= opts.loopRequired;\n if (it.allErrors)\n allErrorsMode();\n else\n exitOnErrorMode();\n if (opts.strictRequired) {\n const props = cxt.parentSchema.properties;\n const { definedProperties } = cxt.it;\n for (const requiredKey of schema) {\n if ((props === null || props === void 0 ? void 0 : props[requiredKey]) === undefined && !definedProperties.has(requiredKey)) {\n const schemaPath = it.schemaEnv.baseId + it.errSchemaPath;\n const msg = `required property \"${requiredKey}\" is not defined at \"${schemaPath}\" (strictRequired)`;\n (0, util_1.checkStrictMode)(it, msg, it.opts.strictRequired);\n }\n }\n }\n function allErrorsMode() {\n if (useLoop || $data) {\n cxt.block$data(codegen_1.nil, loopAllRequired);\n }\n else {\n for (const prop of schema) {\n (0, code_1.checkReportMissingProp)(cxt, prop);\n }\n }\n }\n function exitOnErrorMode() {\n const missing = gen.let(\"missing\");\n if (useLoop || $data) {\n const valid = gen.let(\"valid\", true);\n cxt.block$data(valid, () => loopUntilMissing(missing, valid));\n cxt.ok(valid);\n }\n else {\n gen.if((0, code_1.checkMissingProp)(cxt, schema, missing));\n (0, code_1.reportMissingProp)(cxt, missing);\n gen.else();\n }\n }\n function loopAllRequired() {\n gen.forOf(\"prop\", schemaCode, (prop) => {\n cxt.setParams({ missingProperty: prop });\n gen.if((0, code_1.noPropertyInData)(gen, data, prop, opts.ownProperties), () => cxt.error());\n });\n }\n function loopUntilMissing(missing, valid) {\n cxt.setParams({ missingProperty: missing });\n gen.forOf(missing, schemaCode, () => {\n gen.assign(valid, (0, code_1.propertyInData)(gen, data, missing, opts.ownProperties));\n gen.if((0, codegen_1.not)(valid), () => {\n cxt.error();\n gen.break();\n });\n }, codegen_1.nil);\n }\n },\n};\nexports.default = def;\n//# sourceMappingURL=required.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst dataType_1 = require(\"../../compile/validate/dataType\");\nconst codegen_1 = require(\"../../compile/codegen\");\nconst util_1 = require(\"../../compile/util\");\nconst equal_1 = require(\"../../runtime/equal\");\nconst error = {\n message: ({ params: { i, j } }) => (0, codegen_1.str) `must NOT have duplicate items (items ## ${j} and ${i} are identical)`,\n params: ({ params: { i, j } }) => (0, codegen_1._) `{i: ${i}, j: ${j}}`,\n};\nconst def = {\n keyword: \"uniqueItems\",\n type: \"array\",\n schemaType: \"boolean\",\n $data: true,\n error,\n code(cxt) {\n const { gen, data, $data, schema, parentSchema, schemaCode, it } = cxt;\n if (!$data && !schema)\n return;\n const valid = gen.let(\"valid\");\n const itemTypes = parentSchema.items ? (0, dataType_1.getSchemaTypes)(parentSchema.items) : [];\n cxt.block$data(valid, validateUniqueItems, (0, codegen_1._) `${schemaCode} === false`);\n cxt.ok(valid);\n function validateUniqueItems() {\n const i = gen.let(\"i\", (0, codegen_1._) `${data}.length`);\n const j = gen.let(\"j\");\n cxt.setParams({ i, j });\n gen.assign(valid, true);\n gen.if((0, codegen_1._) `${i} > 1`, () => (canOptimize() ? loopN : loopN2)(i, j));\n }\n function canOptimize() {\n return itemTypes.length > 0 && !itemTypes.some((t) => t === \"object\" || t === \"array\");\n }\n function loopN(i, j) {\n const item = gen.name(\"item\");\n const wrongType = (0, dataType_1.checkDataTypes)(itemTypes, item, it.opts.strictNumbers, dataType_1.DataType.Wrong);\n const indices = gen.const(\"indices\", (0, codegen_1._) `{}`);\n gen.for((0, codegen_1._) `;${i}--;`, () => {\n gen.let(item, (0, codegen_1._) `${data}[${i}]`);\n gen.if(wrongType, (0, codegen_1._) `continue`);\n if (itemTypes.length > 1)\n gen.if((0, codegen_1._) `typeof ${item} == \"string\"`, (0, codegen_1._) `${item} += \"_\"`);\n gen\n .if((0, codegen_1._) `typeof ${indices}[${item}] == \"number\"`, () => {\n gen.assign(j, (0, codegen_1._) `${indices}[${item}]`);\n cxt.error();\n gen.assign(valid, false).break();\n })\n .code((0, codegen_1._) `${indices}[${item}] = ${i}`);\n });\n }\n function loopN2(i, j) {\n const eql = (0, util_1.useFunc)(gen, equal_1.default);\n const outer = gen.name(\"outer\");\n gen.label(outer).for((0, codegen_1._) `;${i}--;`, () => gen.for((0, codegen_1._) `${j} = ${i}; ${j}--;`, () => gen.if((0, codegen_1._) `${eql}(${data}[${i}], ${data}[${j}])`, () => {\n cxt.error();\n gen.assign(valid, false).break(outer);\n })));\n }\n },\n};\nexports.default = def;\n//# sourceMappingURL=uniqueItems.js.map","'use strict';\n\n// do not edit .js files directly - edit src/index.jst\n\n\n\nmodule.exports = function equal(a, b) {\n if (a === b) return true;\n\n if (a && b && typeof a == 'object' && typeof b == 'object') {\n if (a.constructor !== b.constructor) return false;\n\n var length, i, keys;\n if (Array.isArray(a)) {\n length = a.length;\n if (length != b.length) return false;\n for (i = length; i-- !== 0;)\n if (!equal(a[i], b[i])) return false;\n return true;\n }\n\n\n\n if (a.constructor === RegExp) return a.source === b.source && a.flags === b.flags;\n if (a.valueOf !== Object.prototype.valueOf) return a.valueOf() === b.valueOf();\n if (a.toString !== Object.prototype.toString) return a.toString() === b.toString();\n\n keys = Object.keys(a);\n length = keys.length;\n if (length !== Object.keys(b).length) return false;\n\n for (i = length; i-- !== 0;)\n if (!Object.prototype.hasOwnProperty.call(b, keys[i])) return false;\n\n for (i = length; i-- !== 0;) {\n var key = keys[i];\n\n if (!equal(a[key], b[key])) return false;\n }\n\n return true;\n }\n\n // true if both NaN, false otherwise\n return a!==a && b!==b;\n};\n","'use strict';\n\nvar traverse = module.exports = function (schema, opts, cb) {\n // Legacy support for v0.3.1 and earlier.\n if (typeof opts == 'function') {\n cb = opts;\n opts = {};\n }\n\n cb = opts.cb || cb;\n var pre = (typeof cb == 'function') ? cb : cb.pre || function() {};\n var post = cb.post || function() {};\n\n _traverse(opts, pre, post, schema, '', schema);\n};\n\n\ntraverse.keywords = {\n additionalItems: true,\n items: true,\n contains: true,\n additionalProperties: true,\n propertyNames: true,\n not: true,\n if: true,\n then: true,\n else: true\n};\n\ntraverse.arrayKeywords = {\n items: true,\n allOf: true,\n anyOf: true,\n oneOf: true\n};\n\ntraverse.propsKeywords = {\n $defs: true,\n definitions: true,\n properties: true,\n patternProperties: true,\n dependencies: true\n};\n\ntraverse.skipKeywords = {\n default: true,\n enum: true,\n const: true,\n required: true,\n maximum: true,\n minimum: true,\n exclusiveMaximum: true,\n exclusiveMinimum: true,\n multipleOf: true,\n maxLength: true,\n minLength: true,\n pattern: true,\n format: true,\n maxItems: true,\n minItems: true,\n uniqueItems: true,\n maxProperties: true,\n minProperties: true\n};\n\n\nfunction _traverse(opts, pre, post, schema, jsonPtr, rootSchema, parentJsonPtr, parentKeyword, parentSchema, keyIndex) {\n if (schema && typeof schema == 'object' && !Array.isArray(schema)) {\n pre(schema, jsonPtr, rootSchema, parentJsonPtr, parentKeyword, parentSchema, keyIndex);\n for (var key in schema) {\n var sch = schema[key];\n if (Array.isArray(sch)) {\n if (key in traverse.arrayKeywords) {\n for (var i=0; i= this.maxSockets) {\n // We are over limit so we'll add it to the queue.\n self.requests.push(options);\n return;\n }\n\n // If we are under maxSockets create a new one.\n self.createSocket(options, function(socket) {\n socket.on('free', onFree);\n socket.on('close', onCloseOrRemove);\n socket.on('agentRemove', onCloseOrRemove);\n req.onSocket(socket);\n\n function onFree() {\n self.emit('free', socket, options);\n }\n\n function onCloseOrRemove(err) {\n self.removeSocket(socket);\n socket.removeListener('free', onFree);\n socket.removeListener('close', onCloseOrRemove);\n socket.removeListener('agentRemove', onCloseOrRemove);\n }\n });\n};\n\nTunnelingAgent.prototype.createSocket = function createSocket(options, cb) {\n var self = this;\n var placeholder = {};\n self.sockets.push(placeholder);\n\n var connectOptions = mergeOptions({}, self.proxyOptions, {\n method: 'CONNECT',\n path: options.host + ':' + options.port,\n agent: false,\n headers: {\n host: options.host + ':' + options.port\n }\n });\n if (options.localAddress) {\n connectOptions.localAddress = options.localAddress;\n }\n if (connectOptions.proxyAuth) {\n connectOptions.headers = connectOptions.headers || {};\n connectOptions.headers['Proxy-Authorization'] = 'Basic ' +\n new Buffer(connectOptions.proxyAuth).toString('base64');\n }\n\n debug('making CONNECT request');\n var connectReq = self.request(connectOptions);\n connectReq.useChunkedEncodingByDefault = false; // for v0.6\n connectReq.once('response', onResponse); // for v0.6\n connectReq.once('upgrade', onUpgrade); // for v0.6\n connectReq.once('connect', onConnect); // for v0.7 or later\n connectReq.once('error', onError);\n connectReq.end();\n\n function onResponse(res) {\n // Very hacky. This is necessary to avoid http-parser leaks.\n res.upgrade = true;\n }\n\n function onUpgrade(res, socket, head) {\n // Hacky.\n process.nextTick(function() {\n onConnect(res, socket, head);\n });\n }\n\n function onConnect(res, socket, head) {\n connectReq.removeAllListeners();\n socket.removeAllListeners();\n\n if (res.statusCode !== 200) {\n debug('tunneling socket could not be established, statusCode=%d',\n res.statusCode);\n socket.destroy();\n var error = new Error('tunneling socket could not be established, ' +\n 'statusCode=' + res.statusCode);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n if (head.length > 0) {\n debug('got illegal response body from proxy');\n socket.destroy();\n var error = new Error('got illegal response body from proxy');\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n debug('tunneling connection has established');\n self.sockets[self.sockets.indexOf(placeholder)] = socket;\n return cb(socket);\n }\n\n function onError(cause) {\n connectReq.removeAllListeners();\n\n debug('tunneling socket could not be established, cause=%s\\n',\n cause.message, cause.stack);\n var error = new Error('tunneling socket could not be established, ' +\n 'cause=' + cause.message);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n }\n};\n\nTunnelingAgent.prototype.removeSocket = function removeSocket(socket) {\n var pos = this.sockets.indexOf(socket)\n if (pos === -1) {\n return;\n }\n this.sockets.splice(pos, 1);\n\n var pending = this.requests.shift();\n if (pending) {\n // If we have pending requests and a socket gets closed a new one\n // needs to be created to take over in the pool for the one that closed.\n this.createSocket(pending, function(socket) {\n pending.request.onSocket(socket);\n });\n }\n};\n\nfunction createSecureSocket(options, cb) {\n var self = this;\n TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {\n var hostHeader = options.request.getHeader('host');\n var tlsOptions = mergeOptions({}, self.options, {\n socket: socket,\n servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host\n });\n\n // 0 is dummy port for v0.6\n var secureSocket = tls.connect(0, tlsOptions);\n self.sockets[self.sockets.indexOf(socket)] = secureSocket;\n cb(secureSocket);\n });\n}\n\n\nfunction toOptions(host, port, localAddress) {\n if (typeof host === 'string') { // since v0.10\n return {\n host: host,\n port: port,\n localAddress: localAddress\n };\n }\n return host; // for v0.11 or later\n}\n\nfunction mergeOptions(target) {\n for (var i = 1, len = arguments.length; i < len; ++i) {\n var overrides = arguments[i];\n if (typeof overrides === 'object') {\n var keys = Object.keys(overrides);\n for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {\n var k = keys[j];\n if (overrides[k] !== undefined) {\n target[k] = overrides[k];\n }\n }\n }\n }\n return target;\n}\n\n\nvar debug;\nif (process.env.NODE_DEBUG && /\\btunnel\\b/.test(process.env.NODE_DEBUG)) {\n debug = function() {\n var args = Array.prototype.slice.call(arguments);\n if (typeof args[0] === 'string') {\n args[0] = 'TUNNEL: ' + args[0];\n } else {\n args.unshift('TUNNEL:');\n }\n console.error.apply(console, args);\n }\n} else {\n debug = function() {};\n}\nexports.debug = debug; // for test\n","/** @license URI.js v4.4.1 (c) 2011 Gary Court. License: http://github.com/garycourt/uri-js */\n(function (global, factory) {\n\ttypeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :\n\ttypeof define === 'function' && define.amd ? define(['exports'], factory) :\n\t(factory((global.URI = global.URI || {})));\n}(this, (function (exports) { 'use strict';\n\nfunction merge() {\n for (var _len = arguments.length, sets = Array(_len), _key = 0; _key < _len; _key++) {\n sets[_key] = arguments[_key];\n }\n\n if (sets.length > 1) {\n sets[0] = sets[0].slice(0, -1);\n var xl = sets.length - 1;\n for (var x = 1; x < xl; ++x) {\n sets[x] = sets[x].slice(1, -1);\n }\n sets[xl] = sets[xl].slice(1);\n return sets.join('');\n } else {\n return sets[0];\n }\n}\nfunction subexp(str) {\n return \"(?:\" + str + \")\";\n}\nfunction typeOf(o) {\n return o === undefined ? \"undefined\" : o === null ? \"null\" : Object.prototype.toString.call(o).split(\" \").pop().split(\"]\").shift().toLowerCase();\n}\nfunction toUpperCase(str) {\n return str.toUpperCase();\n}\nfunction toArray(obj) {\n return obj !== undefined && obj !== null ? obj instanceof Array ? obj : typeof obj.length !== \"number\" || obj.split || obj.setInterval || obj.call ? [obj] : Array.prototype.slice.call(obj) : [];\n}\nfunction assign(target, source) {\n var obj = target;\n if (source) {\n for (var key in source) {\n obj[key] = source[key];\n }\n }\n return obj;\n}\n\nfunction buildExps(isIRI) {\n var ALPHA$$ = \"[A-Za-z]\",\n CR$ = \"[\\\\x0D]\",\n DIGIT$$ = \"[0-9]\",\n DQUOTE$$ = \"[\\\\x22]\",\n HEXDIG$$ = merge(DIGIT$$, \"[A-Fa-f]\"),\n //case-insensitive\n LF$$ = \"[\\\\x0A]\",\n SP$$ = \"[\\\\x20]\",\n PCT_ENCODED$ = subexp(subexp(\"%[EFef]\" + HEXDIG$$ + \"%\" + HEXDIG$$ + HEXDIG$$ + \"%\" + HEXDIG$$ + HEXDIG$$) + \"|\" + subexp(\"%[89A-Fa-f]\" + HEXDIG$$ + \"%\" + HEXDIG$$ + HEXDIG$$) + \"|\" + subexp(\"%\" + HEXDIG$$ + HEXDIG$$)),\n //expanded\n GEN_DELIMS$$ = \"[\\\\:\\\\/\\\\?\\\\#\\\\[\\\\]\\\\@]\",\n SUB_DELIMS$$ = \"[\\\\!\\\\$\\\\&\\\\'\\\\(\\\\)\\\\*\\\\+\\\\,\\\\;\\\\=]\",\n RESERVED$$ = merge(GEN_DELIMS$$, SUB_DELIMS$$),\n UCSCHAR$$ = isIRI ? \"[\\\\xA0-\\\\u200D\\\\u2010-\\\\u2029\\\\u202F-\\\\uD7FF\\\\uF900-\\\\uFDCF\\\\uFDF0-\\\\uFFEF]\" : \"[]\",\n //subset, excludes bidi control characters\n IPRIVATE$$ = isIRI ? \"[\\\\uE000-\\\\uF8FF]\" : \"[]\",\n //subset\n UNRESERVED$$ = merge(ALPHA$$, DIGIT$$, \"[\\\\-\\\\.\\\\_\\\\~]\", UCSCHAR$$),\n SCHEME$ = subexp(ALPHA$$ + merge(ALPHA$$, DIGIT$$, \"[\\\\+\\\\-\\\\.]\") + \"*\"),\n USERINFO$ = subexp(subexp(PCT_ENCODED$ + \"|\" + merge(UNRESERVED$$, SUB_DELIMS$$, \"[\\\\:]\")) + \"*\"),\n DEC_OCTET$ = subexp(subexp(\"25[0-5]\") + \"|\" + subexp(\"2[0-4]\" + DIGIT$$) + \"|\" + subexp(\"1\" + DIGIT$$ + DIGIT$$) + \"|\" + subexp(\"[1-9]\" + DIGIT$$) + \"|\" + DIGIT$$),\n DEC_OCTET_RELAXED$ = subexp(subexp(\"25[0-5]\") + \"|\" + subexp(\"2[0-4]\" + DIGIT$$) + \"|\" + subexp(\"1\" + DIGIT$$ + DIGIT$$) + \"|\" + subexp(\"0?[1-9]\" + DIGIT$$) + \"|0?0?\" + DIGIT$$),\n //relaxed parsing rules\n IPV4ADDRESS$ = subexp(DEC_OCTET_RELAXED$ + \"\\\\.\" + DEC_OCTET_RELAXED$ + \"\\\\.\" + DEC_OCTET_RELAXED$ + \"\\\\.\" + DEC_OCTET_RELAXED$),\n H16$ = subexp(HEXDIG$$ + \"{1,4}\"),\n LS32$ = subexp(subexp(H16$ + \"\\\\:\" + H16$) + \"|\" + IPV4ADDRESS$),\n IPV6ADDRESS1$ = subexp(subexp(H16$ + \"\\\\:\") + \"{6}\" + LS32$),\n // 6( h16 \":\" ) ls32\n IPV6ADDRESS2$ = subexp(\"\\\\:\\\\:\" + subexp(H16$ + \"\\\\:\") + \"{5}\" + LS32$),\n // \"::\" 5( h16 \":\" ) ls32\n IPV6ADDRESS3$ = subexp(subexp(H16$) + \"?\\\\:\\\\:\" + subexp(H16$ + \"\\\\:\") + \"{4}\" + LS32$),\n //[ h16 ] \"::\" 4( h16 \":\" ) ls32\n IPV6ADDRESS4$ = subexp(subexp(subexp(H16$ + \"\\\\:\") + \"{0,1}\" + H16$) + \"?\\\\:\\\\:\" + subexp(H16$ + \"\\\\:\") + \"{3}\" + LS32$),\n //[ *1( h16 \":\" ) h16 ] \"::\" 3( h16 \":\" ) ls32\n IPV6ADDRESS5$ = subexp(subexp(subexp(H16$ + \"\\\\:\") + \"{0,2}\" + H16$) + \"?\\\\:\\\\:\" + subexp(H16$ + \"\\\\:\") + \"{2}\" + LS32$),\n //[ *2( h16 \":\" ) h16 ] \"::\" 2( h16 \":\" ) ls32\n IPV6ADDRESS6$ = subexp(subexp(subexp(H16$ + \"\\\\:\") + \"{0,3}\" + H16$) + \"?\\\\:\\\\:\" + H16$ + \"\\\\:\" + LS32$),\n //[ *3( h16 \":\" ) h16 ] \"::\" h16 \":\" ls32\n IPV6ADDRESS7$ = subexp(subexp(subexp(H16$ + \"\\\\:\") + \"{0,4}\" + H16$) + \"?\\\\:\\\\:\" + LS32$),\n //[ *4( h16 \":\" ) h16 ] \"::\" ls32\n IPV6ADDRESS8$ = subexp(subexp(subexp(H16$ + \"\\\\:\") + \"{0,5}\" + H16$) + \"?\\\\:\\\\:\" + H16$),\n //[ *5( h16 \":\" ) h16 ] \"::\" h16\n IPV6ADDRESS9$ = subexp(subexp(subexp(H16$ + \"\\\\:\") + \"{0,6}\" + H16$) + \"?\\\\:\\\\:\"),\n //[ *6( h16 \":\" ) h16 ] \"::\"\n IPV6ADDRESS$ = subexp([IPV6ADDRESS1$, IPV6ADDRESS2$, IPV6ADDRESS3$, IPV6ADDRESS4$, IPV6ADDRESS5$, IPV6ADDRESS6$, IPV6ADDRESS7$, IPV6ADDRESS8$, IPV6ADDRESS9$].join(\"|\")),\n ZONEID$ = subexp(subexp(UNRESERVED$$ + \"|\" + PCT_ENCODED$) + \"+\"),\n //RFC 6874\n IPV6ADDRZ$ = subexp(IPV6ADDRESS$ + \"\\\\%25\" + ZONEID$),\n //RFC 6874\n IPV6ADDRZ_RELAXED$ = subexp(IPV6ADDRESS$ + subexp(\"\\\\%25|\\\\%(?!\" + HEXDIG$$ + \"{2})\") + ZONEID$),\n //RFC 6874, with relaxed parsing rules\n IPVFUTURE$ = subexp(\"[vV]\" + HEXDIG$$ + \"+\\\\.\" + merge(UNRESERVED$$, SUB_DELIMS$$, \"[\\\\:]\") + \"+\"),\n IP_LITERAL$ = subexp(\"\\\\[\" + subexp(IPV6ADDRZ_RELAXED$ + \"|\" + IPV6ADDRESS$ + \"|\" + IPVFUTURE$) + \"\\\\]\"),\n //RFC 6874\n REG_NAME$ = subexp(subexp(PCT_ENCODED$ + \"|\" + merge(UNRESERVED$$, SUB_DELIMS$$)) + \"*\"),\n HOST$ = subexp(IP_LITERAL$ + \"|\" + IPV4ADDRESS$ + \"(?!\" + REG_NAME$ + \")\" + \"|\" + REG_NAME$),\n PORT$ = subexp(DIGIT$$ + \"*\"),\n AUTHORITY$ = subexp(subexp(USERINFO$ + \"@\") + \"?\" + HOST$ + subexp(\"\\\\:\" + PORT$) + \"?\"),\n PCHAR$ = subexp(PCT_ENCODED$ + \"|\" + merge(UNRESERVED$$, SUB_DELIMS$$, \"[\\\\:\\\\@]\")),\n SEGMENT$ = subexp(PCHAR$ + \"*\"),\n SEGMENT_NZ$ = subexp(PCHAR$ + \"+\"),\n SEGMENT_NZ_NC$ = subexp(subexp(PCT_ENCODED$ + \"|\" + merge(UNRESERVED$$, SUB_DELIMS$$, \"[\\\\@]\")) + \"+\"),\n PATH_ABEMPTY$ = subexp(subexp(\"\\\\/\" + SEGMENT$) + \"*\"),\n PATH_ABSOLUTE$ = subexp(\"\\\\/\" + subexp(SEGMENT_NZ$ + PATH_ABEMPTY$) + \"?\"),\n //simplified\n PATH_NOSCHEME$ = subexp(SEGMENT_NZ_NC$ + PATH_ABEMPTY$),\n //simplified\n PATH_ROOTLESS$ = subexp(SEGMENT_NZ$ + PATH_ABEMPTY$),\n //simplified\n PATH_EMPTY$ = \"(?!\" + PCHAR$ + \")\",\n PATH$ = subexp(PATH_ABEMPTY$ + \"|\" + PATH_ABSOLUTE$ + \"|\" + PATH_NOSCHEME$ + \"|\" + PATH_ROOTLESS$ + \"|\" + PATH_EMPTY$),\n QUERY$ = subexp(subexp(PCHAR$ + \"|\" + merge(\"[\\\\/\\\\?]\", IPRIVATE$$)) + \"*\"),\n FRAGMENT$ = subexp(subexp(PCHAR$ + \"|[\\\\/\\\\?]\") + \"*\"),\n HIER_PART$ = subexp(subexp(\"\\\\/\\\\/\" + AUTHORITY$ + PATH_ABEMPTY$) + \"|\" + PATH_ABSOLUTE$ + \"|\" + PATH_ROOTLESS$ + \"|\" + PATH_EMPTY$),\n URI$ = subexp(SCHEME$ + \"\\\\:\" + HIER_PART$ + subexp(\"\\\\?\" + QUERY$) + \"?\" + subexp(\"\\\\#\" + FRAGMENT$) + \"?\"),\n RELATIVE_PART$ = subexp(subexp(\"\\\\/\\\\/\" + AUTHORITY$ + PATH_ABEMPTY$) + \"|\" + PATH_ABSOLUTE$ + \"|\" + PATH_NOSCHEME$ + \"|\" + PATH_EMPTY$),\n RELATIVE$ = subexp(RELATIVE_PART$ + subexp(\"\\\\?\" + QUERY$) + \"?\" + subexp(\"\\\\#\" + FRAGMENT$) + \"?\"),\n URI_REFERENCE$ = subexp(URI$ + \"|\" + RELATIVE$),\n ABSOLUTE_URI$ = subexp(SCHEME$ + \"\\\\:\" + HIER_PART$ + subexp(\"\\\\?\" + QUERY$) + \"?\"),\n GENERIC_REF$ = \"^(\" + SCHEME$ + \")\\\\:\" + subexp(subexp(\"\\\\/\\\\/(\" + subexp(\"(\" + USERINFO$ + \")@\") + \"?(\" + HOST$ + \")\" + subexp(\"\\\\:(\" + PORT$ + \")\") + \"?)\") + \"?(\" + PATH_ABEMPTY$ + \"|\" + PATH_ABSOLUTE$ + \"|\" + PATH_ROOTLESS$ + \"|\" + PATH_EMPTY$ + \")\") + subexp(\"\\\\?(\" + QUERY$ + \")\") + \"?\" + subexp(\"\\\\#(\" + FRAGMENT$ + \")\") + \"?$\",\n RELATIVE_REF$ = \"^(){0}\" + subexp(subexp(\"\\\\/\\\\/(\" + subexp(\"(\" + USERINFO$ + \")@\") + \"?(\" + HOST$ + \")\" + subexp(\"\\\\:(\" + PORT$ + \")\") + \"?)\") + \"?(\" + PATH_ABEMPTY$ + \"|\" + PATH_ABSOLUTE$ + \"|\" + PATH_NOSCHEME$ + \"|\" + PATH_EMPTY$ + \")\") + subexp(\"\\\\?(\" + QUERY$ + \")\") + \"?\" + subexp(\"\\\\#(\" + FRAGMENT$ + \")\") + \"?$\",\n ABSOLUTE_REF$ = \"^(\" + SCHEME$ + \")\\\\:\" + subexp(subexp(\"\\\\/\\\\/(\" + subexp(\"(\" + USERINFO$ + \")@\") + \"?(\" + HOST$ + \")\" + subexp(\"\\\\:(\" + PORT$ + \")\") + \"?)\") + \"?(\" + PATH_ABEMPTY$ + \"|\" + PATH_ABSOLUTE$ + \"|\" + PATH_ROOTLESS$ + \"|\" + PATH_EMPTY$ + \")\") + subexp(\"\\\\?(\" + QUERY$ + \")\") + \"?$\",\n SAMEDOC_REF$ = \"^\" + subexp(\"\\\\#(\" + FRAGMENT$ + \")\") + \"?$\",\n AUTHORITY_REF$ = \"^\" + subexp(\"(\" + USERINFO$ + \")@\") + \"?(\" + HOST$ + \")\" + subexp(\"\\\\:(\" + PORT$ + \")\") + \"?$\";\n return {\n NOT_SCHEME: new RegExp(merge(\"[^]\", ALPHA$$, DIGIT$$, \"[\\\\+\\\\-\\\\.]\"), \"g\"),\n NOT_USERINFO: new RegExp(merge(\"[^\\\\%\\\\:]\", UNRESERVED$$, SUB_DELIMS$$), \"g\"),\n NOT_HOST: new RegExp(merge(\"[^\\\\%\\\\[\\\\]\\\\:]\", UNRESERVED$$, SUB_DELIMS$$), \"g\"),\n NOT_PATH: new RegExp(merge(\"[^\\\\%\\\\/\\\\:\\\\@]\", UNRESERVED$$, SUB_DELIMS$$), \"g\"),\n NOT_PATH_NOSCHEME: new RegExp(merge(\"[^\\\\%\\\\/\\\\@]\", UNRESERVED$$, SUB_DELIMS$$), \"g\"),\n NOT_QUERY: new RegExp(merge(\"[^\\\\%]\", UNRESERVED$$, SUB_DELIMS$$, \"[\\\\:\\\\@\\\\/\\\\?]\", IPRIVATE$$), \"g\"),\n NOT_FRAGMENT: new RegExp(merge(\"[^\\\\%]\", UNRESERVED$$, SUB_DELIMS$$, \"[\\\\:\\\\@\\\\/\\\\?]\"), \"g\"),\n ESCAPE: new RegExp(merge(\"[^]\", UNRESERVED$$, SUB_DELIMS$$), \"g\"),\n UNRESERVED: new RegExp(UNRESERVED$$, \"g\"),\n OTHER_CHARS: new RegExp(merge(\"[^\\\\%]\", UNRESERVED$$, RESERVED$$), \"g\"),\n PCT_ENCODED: new RegExp(PCT_ENCODED$, \"g\"),\n IPV4ADDRESS: new RegExp(\"^(\" + IPV4ADDRESS$ + \")$\"),\n IPV6ADDRESS: new RegExp(\"^\\\\[?(\" + IPV6ADDRESS$ + \")\" + subexp(subexp(\"\\\\%25|\\\\%(?!\" + HEXDIG$$ + \"{2})\") + \"(\" + ZONEID$ + \")\") + \"?\\\\]?$\") //RFC 6874, with relaxed parsing rules\n };\n}\nvar URI_PROTOCOL = buildExps(false);\n\nvar IRI_PROTOCOL = buildExps(true);\n\nvar slicedToArray = function () {\n function sliceIterator(arr, i) {\n var _arr = [];\n var _n = true;\n var _d = false;\n var _e = undefined;\n\n try {\n for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) {\n _arr.push(_s.value);\n\n if (i && _arr.length === i) break;\n }\n } catch (err) {\n _d = true;\n _e = err;\n } finally {\n try {\n if (!_n && _i[\"return\"]) _i[\"return\"]();\n } finally {\n if (_d) throw _e;\n }\n }\n\n return _arr;\n }\n\n return function (arr, i) {\n if (Array.isArray(arr)) {\n return arr;\n } else if (Symbol.iterator in Object(arr)) {\n return sliceIterator(arr, i);\n } else {\n throw new TypeError(\"Invalid attempt to destructure non-iterable instance\");\n }\n };\n}();\n\n\n\n\n\n\n\n\n\n\n\n\n\nvar toConsumableArray = function (arr) {\n if (Array.isArray(arr)) {\n for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) arr2[i] = arr[i];\n\n return arr2;\n } else {\n return Array.from(arr);\n }\n};\n\n/** Highest positive signed 32-bit float value */\n\nvar maxInt = 2147483647; // aka. 0x7FFFFFFF or 2^31-1\n\n/** Bootstring parameters */\nvar base = 36;\nvar tMin = 1;\nvar tMax = 26;\nvar skew = 38;\nvar damp = 700;\nvar initialBias = 72;\nvar initialN = 128; // 0x80\nvar delimiter = '-'; // '\\x2D'\n\n/** Regular expressions */\nvar regexPunycode = /^xn--/;\nvar regexNonASCII = /[^\\0-\\x7E]/; // non-ASCII chars\nvar regexSeparators = /[\\x2E\\u3002\\uFF0E\\uFF61]/g; // RFC 3490 separators\n\n/** Error messages */\nvar errors = {\n\t'overflow': 'Overflow: input needs wider integers to process',\n\t'not-basic': 'Illegal input >= 0x80 (not a basic code point)',\n\t'invalid-input': 'Invalid input'\n};\n\n/** Convenience shortcuts */\nvar baseMinusTMin = base - tMin;\nvar floor = Math.floor;\nvar stringFromCharCode = String.fromCharCode;\n\n/*--------------------------------------------------------------------------*/\n\n/**\n * A generic error utility function.\n * @private\n * @param {String} type The error type.\n * @returns {Error} Throws a `RangeError` with the applicable error message.\n */\nfunction error$1(type) {\n\tthrow new RangeError(errors[type]);\n}\n\n/**\n * A generic `Array#map` utility function.\n * @private\n * @param {Array} array The array to iterate over.\n * @param {Function} callback The function that gets called for every array\n * item.\n * @returns {Array} A new array of values returned by the callback function.\n */\nfunction map(array, fn) {\n\tvar result = [];\n\tvar length = array.length;\n\twhile (length--) {\n\t\tresult[length] = fn(array[length]);\n\t}\n\treturn result;\n}\n\n/**\n * A simple `Array#map`-like wrapper to work with domain name strings or email\n * addresses.\n * @private\n * @param {String} domain The domain name or email address.\n * @param {Function} callback The function that gets called for every\n * character.\n * @returns {Array} A new string of characters returned by the callback\n * function.\n */\nfunction mapDomain(string, fn) {\n\tvar parts = string.split('@');\n\tvar result = '';\n\tif (parts.length > 1) {\n\t\t// In email addresses, only the domain name should be punycoded. Leave\n\t\t// the local part (i.e. everything up to `@`) intact.\n\t\tresult = parts[0] + '@';\n\t\tstring = parts[1];\n\t}\n\t// Avoid `split(regex)` for IE8 compatibility. See #17.\n\tstring = string.replace(regexSeparators, '\\x2E');\n\tvar labels = string.split('.');\n\tvar encoded = map(labels, fn).join('.');\n\treturn result + encoded;\n}\n\n/**\n * Creates an array containing the numeric code points of each Unicode\n * character in the string. While JavaScript uses UCS-2 internally,\n * this function will convert a pair of surrogate halves (each of which\n * UCS-2 exposes as separate characters) into a single code point,\n * matching UTF-16.\n * @see `punycode.ucs2.encode`\n * @see \n * @memberOf punycode.ucs2\n * @name decode\n * @param {String} string The Unicode input string (UCS-2).\n * @returns {Array} The new array of code points.\n */\nfunction ucs2decode(string) {\n\tvar output = [];\n\tvar counter = 0;\n\tvar length = string.length;\n\twhile (counter < length) {\n\t\tvar value = string.charCodeAt(counter++);\n\t\tif (value >= 0xD800 && value <= 0xDBFF && counter < length) {\n\t\t\t// It's a high surrogate, and there is a next character.\n\t\t\tvar extra = string.charCodeAt(counter++);\n\t\t\tif ((extra & 0xFC00) == 0xDC00) {\n\t\t\t\t// Low surrogate.\n\t\t\t\toutput.push(((value & 0x3FF) << 10) + (extra & 0x3FF) + 0x10000);\n\t\t\t} else {\n\t\t\t\t// It's an unmatched surrogate; only append this code unit, in case the\n\t\t\t\t// next code unit is the high surrogate of a surrogate pair.\n\t\t\t\toutput.push(value);\n\t\t\t\tcounter--;\n\t\t\t}\n\t\t} else {\n\t\t\toutput.push(value);\n\t\t}\n\t}\n\treturn output;\n}\n\n/**\n * Creates a string based on an array of numeric code points.\n * @see `punycode.ucs2.decode`\n * @memberOf punycode.ucs2\n * @name encode\n * @param {Array} codePoints The array of numeric code points.\n * @returns {String} The new Unicode string (UCS-2).\n */\nvar ucs2encode = function ucs2encode(array) {\n\treturn String.fromCodePoint.apply(String, toConsumableArray(array));\n};\n\n/**\n * Converts a basic code point into a digit/integer.\n * @see `digitToBasic()`\n * @private\n * @param {Number} codePoint The basic numeric code point value.\n * @returns {Number} The numeric value of a basic code point (for use in\n * representing integers) in the range `0` to `base - 1`, or `base` if\n * the code point does not represent a value.\n */\nvar basicToDigit = function basicToDigit(codePoint) {\n\tif (codePoint - 0x30 < 0x0A) {\n\t\treturn codePoint - 0x16;\n\t}\n\tif (codePoint - 0x41 < 0x1A) {\n\t\treturn codePoint - 0x41;\n\t}\n\tif (codePoint - 0x61 < 0x1A) {\n\t\treturn codePoint - 0x61;\n\t}\n\treturn base;\n};\n\n/**\n * Converts a digit/integer into a basic code point.\n * @see `basicToDigit()`\n * @private\n * @param {Number} digit The numeric value of a basic code point.\n * @returns {Number} The basic code point whose value (when used for\n * representing integers) is `digit`, which needs to be in the range\n * `0` to `base - 1`. If `flag` is non-zero, the uppercase form is\n * used; else, the lowercase form is used. The behavior is undefined\n * if `flag` is non-zero and `digit` has no uppercase form.\n */\nvar digitToBasic = function digitToBasic(digit, flag) {\n\t// 0..25 map to ASCII a..z or A..Z\n\t// 26..35 map to ASCII 0..9\n\treturn digit + 22 + 75 * (digit < 26) - ((flag != 0) << 5);\n};\n\n/**\n * Bias adaptation function as per section 3.4 of RFC 3492.\n * https://tools.ietf.org/html/rfc3492#section-3.4\n * @private\n */\nvar adapt = function adapt(delta, numPoints, firstTime) {\n\tvar k = 0;\n\tdelta = firstTime ? floor(delta / damp) : delta >> 1;\n\tdelta += floor(delta / numPoints);\n\tfor (; /* no initialization */delta > baseMinusTMin * tMax >> 1; k += base) {\n\t\tdelta = floor(delta / baseMinusTMin);\n\t}\n\treturn floor(k + (baseMinusTMin + 1) * delta / (delta + skew));\n};\n\n/**\n * Converts a Punycode string of ASCII-only symbols to a string of Unicode\n * symbols.\n * @memberOf punycode\n * @param {String} input The Punycode string of ASCII-only symbols.\n * @returns {String} The resulting string of Unicode symbols.\n */\nvar decode = function decode(input) {\n\t// Don't use UCS-2.\n\tvar output = [];\n\tvar inputLength = input.length;\n\tvar i = 0;\n\tvar n = initialN;\n\tvar bias = initialBias;\n\n\t// Handle the basic code points: let `basic` be the number of input code\n\t// points before the last delimiter, or `0` if there is none, then copy\n\t// the first basic code points to the output.\n\n\tvar basic = input.lastIndexOf(delimiter);\n\tif (basic < 0) {\n\t\tbasic = 0;\n\t}\n\n\tfor (var j = 0; j < basic; ++j) {\n\t\t// if it's not a basic code point\n\t\tif (input.charCodeAt(j) >= 0x80) {\n\t\t\terror$1('not-basic');\n\t\t}\n\t\toutput.push(input.charCodeAt(j));\n\t}\n\n\t// Main decoding loop: start just after the last delimiter if any basic code\n\t// points were copied; start at the beginning otherwise.\n\n\tfor (var index = basic > 0 ? basic + 1 : 0; index < inputLength;) /* no final expression */{\n\n\t\t// `index` is the index of the next character to be consumed.\n\t\t// Decode a generalized variable-length integer into `delta`,\n\t\t// which gets added to `i`. The overflow checking is easier\n\t\t// if we increase `i` as we go, then subtract off its starting\n\t\t// value at the end to obtain `delta`.\n\t\tvar oldi = i;\n\t\tfor (var w = 1, k = base;; /* no condition */k += base) {\n\n\t\t\tif (index >= inputLength) {\n\t\t\t\terror$1('invalid-input');\n\t\t\t}\n\n\t\t\tvar digit = basicToDigit(input.charCodeAt(index++));\n\n\t\t\tif (digit >= base || digit > floor((maxInt - i) / w)) {\n\t\t\t\terror$1('overflow');\n\t\t\t}\n\n\t\t\ti += digit * w;\n\t\t\tvar t = k <= bias ? tMin : k >= bias + tMax ? tMax : k - bias;\n\n\t\t\tif (digit < t) {\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tvar baseMinusT = base - t;\n\t\t\tif (w > floor(maxInt / baseMinusT)) {\n\t\t\t\terror$1('overflow');\n\t\t\t}\n\n\t\t\tw *= baseMinusT;\n\t\t}\n\n\t\tvar out = output.length + 1;\n\t\tbias = adapt(i - oldi, out, oldi == 0);\n\n\t\t// `i` was supposed to wrap around from `out` to `0`,\n\t\t// incrementing `n` each time, so we'll fix that now:\n\t\tif (floor(i / out) > maxInt - n) {\n\t\t\terror$1('overflow');\n\t\t}\n\n\t\tn += floor(i / out);\n\t\ti %= out;\n\n\t\t// Insert `n` at position `i` of the output.\n\t\toutput.splice(i++, 0, n);\n\t}\n\n\treturn String.fromCodePoint.apply(String, output);\n};\n\n/**\n * Converts a string of Unicode symbols (e.g. a domain name label) to a\n * Punycode string of ASCII-only symbols.\n * @memberOf punycode\n * @param {String} input The string of Unicode symbols.\n * @returns {String} The resulting Punycode string of ASCII-only symbols.\n */\nvar encode = function encode(input) {\n\tvar output = [];\n\n\t// Convert the input in UCS-2 to an array of Unicode code points.\n\tinput = ucs2decode(input);\n\n\t// Cache the length.\n\tvar inputLength = input.length;\n\n\t// Initialize the state.\n\tvar n = initialN;\n\tvar delta = 0;\n\tvar bias = initialBias;\n\n\t// Handle the basic code points.\n\tvar _iteratorNormalCompletion = true;\n\tvar _didIteratorError = false;\n\tvar _iteratorError = undefined;\n\n\ttry {\n\t\tfor (var _iterator = input[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {\n\t\t\tvar _currentValue2 = _step.value;\n\n\t\t\tif (_currentValue2 < 0x80) {\n\t\t\t\toutput.push(stringFromCharCode(_currentValue2));\n\t\t\t}\n\t\t}\n\t} catch (err) {\n\t\t_didIteratorError = true;\n\t\t_iteratorError = err;\n\t} finally {\n\t\ttry {\n\t\t\tif (!_iteratorNormalCompletion && _iterator.return) {\n\t\t\t\t_iterator.return();\n\t\t\t}\n\t\t} finally {\n\t\t\tif (_didIteratorError) {\n\t\t\t\tthrow _iteratorError;\n\t\t\t}\n\t\t}\n\t}\n\n\tvar basicLength = output.length;\n\tvar handledCPCount = basicLength;\n\n\t// `handledCPCount` is the number of code points that have been handled;\n\t// `basicLength` is the number of basic code points.\n\n\t// Finish the basic string with a delimiter unless it's empty.\n\tif (basicLength) {\n\t\toutput.push(delimiter);\n\t}\n\n\t// Main encoding loop:\n\twhile (handledCPCount < inputLength) {\n\n\t\t// All non-basic code points < n have been handled already. Find the next\n\t\t// larger one:\n\t\tvar m = maxInt;\n\t\tvar _iteratorNormalCompletion2 = true;\n\t\tvar _didIteratorError2 = false;\n\t\tvar _iteratorError2 = undefined;\n\n\t\ttry {\n\t\t\tfor (var _iterator2 = input[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) {\n\t\t\t\tvar currentValue = _step2.value;\n\n\t\t\t\tif (currentValue >= n && currentValue < m) {\n\t\t\t\t\tm = currentValue;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// Increase `delta` enough to advance the decoder's state to ,\n\t\t\t// but guard against overflow.\n\t\t} catch (err) {\n\t\t\t_didIteratorError2 = true;\n\t\t\t_iteratorError2 = err;\n\t\t} finally {\n\t\t\ttry {\n\t\t\t\tif (!_iteratorNormalCompletion2 && _iterator2.return) {\n\t\t\t\t\t_iterator2.return();\n\t\t\t\t}\n\t\t\t} finally {\n\t\t\t\tif (_didIteratorError2) {\n\t\t\t\t\tthrow _iteratorError2;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tvar handledCPCountPlusOne = handledCPCount + 1;\n\t\tif (m - n > floor((maxInt - delta) / handledCPCountPlusOne)) {\n\t\t\terror$1('overflow');\n\t\t}\n\n\t\tdelta += (m - n) * handledCPCountPlusOne;\n\t\tn = m;\n\n\t\tvar _iteratorNormalCompletion3 = true;\n\t\tvar _didIteratorError3 = false;\n\t\tvar _iteratorError3 = undefined;\n\n\t\ttry {\n\t\t\tfor (var _iterator3 = input[Symbol.iterator](), _step3; !(_iteratorNormalCompletion3 = (_step3 = _iterator3.next()).done); _iteratorNormalCompletion3 = true) {\n\t\t\t\tvar _currentValue = _step3.value;\n\n\t\t\t\tif (_currentValue < n && ++delta > maxInt) {\n\t\t\t\t\terror$1('overflow');\n\t\t\t\t}\n\t\t\t\tif (_currentValue == n) {\n\t\t\t\t\t// Represent delta as a generalized variable-length integer.\n\t\t\t\t\tvar q = delta;\n\t\t\t\t\tfor (var k = base;; /* no condition */k += base) {\n\t\t\t\t\t\tvar t = k <= bias ? tMin : k >= bias + tMax ? tMax : k - bias;\n\t\t\t\t\t\tif (q < t) {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t\tvar qMinusT = q - t;\n\t\t\t\t\t\tvar baseMinusT = base - t;\n\t\t\t\t\t\toutput.push(stringFromCharCode(digitToBasic(t + qMinusT % baseMinusT, 0)));\n\t\t\t\t\t\tq = floor(qMinusT / baseMinusT);\n\t\t\t\t\t}\n\n\t\t\t\t\toutput.push(stringFromCharCode(digitToBasic(q, 0)));\n\t\t\t\t\tbias = adapt(delta, handledCPCountPlusOne, handledCPCount == basicLength);\n\t\t\t\t\tdelta = 0;\n\t\t\t\t\t++handledCPCount;\n\t\t\t\t}\n\t\t\t}\n\t\t} catch (err) {\n\t\t\t_didIteratorError3 = true;\n\t\t\t_iteratorError3 = err;\n\t\t} finally {\n\t\t\ttry {\n\t\t\t\tif (!_iteratorNormalCompletion3 && _iterator3.return) {\n\t\t\t\t\t_iterator3.return();\n\t\t\t\t}\n\t\t\t} finally {\n\t\t\t\tif (_didIteratorError3) {\n\t\t\t\t\tthrow _iteratorError3;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\t++delta;\n\t\t++n;\n\t}\n\treturn output.join('');\n};\n\n/**\n * Converts a Punycode string representing a domain name or an email address\n * to Unicode. Only the Punycoded parts of the input will be converted, i.e.\n * it doesn't matter if you call it on a string that has already been\n * converted to Unicode.\n * @memberOf punycode\n * @param {String} input The Punycoded domain name or email address to\n * convert to Unicode.\n * @returns {String} The Unicode representation of the given Punycode\n * string.\n */\nvar toUnicode = function toUnicode(input) {\n\treturn mapDomain(input, function (string) {\n\t\treturn regexPunycode.test(string) ? decode(string.slice(4).toLowerCase()) : string;\n\t});\n};\n\n/**\n * Converts a Unicode string representing a domain name or an email address to\n * Punycode. Only the non-ASCII parts of the domain name will be converted,\n * i.e. it doesn't matter if you call it with a domain that's already in\n * ASCII.\n * @memberOf punycode\n * @param {String} input The domain name or email address to convert, as a\n * Unicode string.\n * @returns {String} The Punycode representation of the given domain name or\n * email address.\n */\nvar toASCII = function toASCII(input) {\n\treturn mapDomain(input, function (string) {\n\t\treturn regexNonASCII.test(string) ? 'xn--' + encode(string) : string;\n\t});\n};\n\n/*--------------------------------------------------------------------------*/\n\n/** Define the public API */\nvar punycode = {\n\t/**\n * A string representing the current Punycode.js version number.\n * @memberOf punycode\n * @type String\n */\n\t'version': '2.1.0',\n\t/**\n * An object of methods to convert from JavaScript's internal character\n * representation (UCS-2) to Unicode code points, and back.\n * @see \n * @memberOf punycode\n * @type Object\n */\n\t'ucs2': {\n\t\t'decode': ucs2decode,\n\t\t'encode': ucs2encode\n\t},\n\t'decode': decode,\n\t'encode': encode,\n\t'toASCII': toASCII,\n\t'toUnicode': toUnicode\n};\n\n/**\n * URI.js\n *\n * @fileoverview An RFC 3986 compliant, scheme extendable URI parsing/validating/resolving library for JavaScript.\n * @author Gary Court\n * @see http://github.com/garycourt/uri-js\n */\n/**\n * Copyright 2011 Gary Court. All rights reserved.\n *\n * Redistribution and use in source and binary forms, with or without modification, are\n * permitted provided that the following conditions are met:\n *\n * 1. Redistributions of source code must retain the above copyright notice, this list of\n * conditions and the following disclaimer.\n *\n * 2. Redistributions in binary form must reproduce the above copyright notice, this list\n * of conditions and the following disclaimer in the documentation and/or other materials\n * provided with the distribution.\n *\n * THIS SOFTWARE IS PROVIDED BY GARY COURT ``AS IS'' AND ANY EXPRESS OR IMPLIED\n * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND\n * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GARY COURT OR\n * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\n * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON\n * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\n * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF\n * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n *\n * The views and conclusions contained in the software and documentation are those of the\n * authors and should not be interpreted as representing official policies, either expressed\n * or implied, of Gary Court.\n */\nvar SCHEMES = {};\nfunction pctEncChar(chr) {\n var c = chr.charCodeAt(0);\n var e = void 0;\n if (c < 16) e = \"%0\" + c.toString(16).toUpperCase();else if (c < 128) e = \"%\" + c.toString(16).toUpperCase();else if (c < 2048) e = \"%\" + (c >> 6 | 192).toString(16).toUpperCase() + \"%\" + (c & 63 | 128).toString(16).toUpperCase();else e = \"%\" + (c >> 12 | 224).toString(16).toUpperCase() + \"%\" + (c >> 6 & 63 | 128).toString(16).toUpperCase() + \"%\" + (c & 63 | 128).toString(16).toUpperCase();\n return e;\n}\nfunction pctDecChars(str) {\n var newStr = \"\";\n var i = 0;\n var il = str.length;\n while (i < il) {\n var c = parseInt(str.substr(i + 1, 2), 16);\n if (c < 128) {\n newStr += String.fromCharCode(c);\n i += 3;\n } else if (c >= 194 && c < 224) {\n if (il - i >= 6) {\n var c2 = parseInt(str.substr(i + 4, 2), 16);\n newStr += String.fromCharCode((c & 31) << 6 | c2 & 63);\n } else {\n newStr += str.substr(i, 6);\n }\n i += 6;\n } else if (c >= 224) {\n if (il - i >= 9) {\n var _c = parseInt(str.substr(i + 4, 2), 16);\n var c3 = parseInt(str.substr(i + 7, 2), 16);\n newStr += String.fromCharCode((c & 15) << 12 | (_c & 63) << 6 | c3 & 63);\n } else {\n newStr += str.substr(i, 9);\n }\n i += 9;\n } else {\n newStr += str.substr(i, 3);\n i += 3;\n }\n }\n return newStr;\n}\nfunction _normalizeComponentEncoding(components, protocol) {\n function decodeUnreserved(str) {\n var decStr = pctDecChars(str);\n return !decStr.match(protocol.UNRESERVED) ? str : decStr;\n }\n if (components.scheme) components.scheme = String(components.scheme).replace(protocol.PCT_ENCODED, decodeUnreserved).toLowerCase().replace(protocol.NOT_SCHEME, \"\");\n if (components.userinfo !== undefined) components.userinfo = String(components.userinfo).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_USERINFO, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase);\n if (components.host !== undefined) components.host = String(components.host).replace(protocol.PCT_ENCODED, decodeUnreserved).toLowerCase().replace(protocol.NOT_HOST, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase);\n if (components.path !== undefined) components.path = String(components.path).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(components.scheme ? protocol.NOT_PATH : protocol.NOT_PATH_NOSCHEME, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase);\n if (components.query !== undefined) components.query = String(components.query).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_QUERY, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase);\n if (components.fragment !== undefined) components.fragment = String(components.fragment).replace(protocol.PCT_ENCODED, decodeUnreserved).replace(protocol.NOT_FRAGMENT, pctEncChar).replace(protocol.PCT_ENCODED, toUpperCase);\n return components;\n}\n\nfunction _stripLeadingZeros(str) {\n return str.replace(/^0*(.*)/, \"$1\") || \"0\";\n}\nfunction _normalizeIPv4(host, protocol) {\n var matches = host.match(protocol.IPV4ADDRESS) || [];\n\n var _matches = slicedToArray(matches, 2),\n address = _matches[1];\n\n if (address) {\n return address.split(\".\").map(_stripLeadingZeros).join(\".\");\n } else {\n return host;\n }\n}\nfunction _normalizeIPv6(host, protocol) {\n var matches = host.match(protocol.IPV6ADDRESS) || [];\n\n var _matches2 = slicedToArray(matches, 3),\n address = _matches2[1],\n zone = _matches2[2];\n\n if (address) {\n var _address$toLowerCase$ = address.toLowerCase().split('::').reverse(),\n _address$toLowerCase$2 = slicedToArray(_address$toLowerCase$, 2),\n last = _address$toLowerCase$2[0],\n first = _address$toLowerCase$2[1];\n\n var firstFields = first ? first.split(\":\").map(_stripLeadingZeros) : [];\n var lastFields = last.split(\":\").map(_stripLeadingZeros);\n var isLastFieldIPv4Address = protocol.IPV4ADDRESS.test(lastFields[lastFields.length - 1]);\n var fieldCount = isLastFieldIPv4Address ? 7 : 8;\n var lastFieldsStart = lastFields.length - fieldCount;\n var fields = Array(fieldCount);\n for (var x = 0; x < fieldCount; ++x) {\n fields[x] = firstFields[x] || lastFields[lastFieldsStart + x] || '';\n }\n if (isLastFieldIPv4Address) {\n fields[fieldCount - 1] = _normalizeIPv4(fields[fieldCount - 1], protocol);\n }\n var allZeroFields = fields.reduce(function (acc, field, index) {\n if (!field || field === \"0\") {\n var lastLongest = acc[acc.length - 1];\n if (lastLongest && lastLongest.index + lastLongest.length === index) {\n lastLongest.length++;\n } else {\n acc.push({ index: index, length: 1 });\n }\n }\n return acc;\n }, []);\n var longestZeroFields = allZeroFields.sort(function (a, b) {\n return b.length - a.length;\n })[0];\n var newHost = void 0;\n if (longestZeroFields && longestZeroFields.length > 1) {\n var newFirst = fields.slice(0, longestZeroFields.index);\n var newLast = fields.slice(longestZeroFields.index + longestZeroFields.length);\n newHost = newFirst.join(\":\") + \"::\" + newLast.join(\":\");\n } else {\n newHost = fields.join(\":\");\n }\n if (zone) {\n newHost += \"%\" + zone;\n }\n return newHost;\n } else {\n return host;\n }\n}\nvar URI_PARSE = /^(?:([^:\\/?#]+):)?(?:\\/\\/((?:([^\\/?#@]*)@)?(\\[[^\\/?#\\]]+\\]|[^\\/?#:]*)(?:\\:(\\d*))?))?([^?#]*)(?:\\?([^#]*))?(?:#((?:.|\\n|\\r)*))?/i;\nvar NO_MATCH_IS_UNDEFINED = \"\".match(/(){0}/)[1] === undefined;\nfunction parse(uriString) {\n var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n var components = {};\n var protocol = options.iri !== false ? IRI_PROTOCOL : URI_PROTOCOL;\n if (options.reference === \"suffix\") uriString = (options.scheme ? options.scheme + \":\" : \"\") + \"//\" + uriString;\n var matches = uriString.match(URI_PARSE);\n if (matches) {\n if (NO_MATCH_IS_UNDEFINED) {\n //store each component\n components.scheme = matches[1];\n components.userinfo = matches[3];\n components.host = matches[4];\n components.port = parseInt(matches[5], 10);\n components.path = matches[6] || \"\";\n components.query = matches[7];\n components.fragment = matches[8];\n //fix port number\n if (isNaN(components.port)) {\n components.port = matches[5];\n }\n } else {\n //IE FIX for improper RegExp matching\n //store each component\n components.scheme = matches[1] || undefined;\n components.userinfo = uriString.indexOf(\"@\") !== -1 ? matches[3] : undefined;\n components.host = uriString.indexOf(\"//\") !== -1 ? matches[4] : undefined;\n components.port = parseInt(matches[5], 10);\n components.path = matches[6] || \"\";\n components.query = uriString.indexOf(\"?\") !== -1 ? matches[7] : undefined;\n components.fragment = uriString.indexOf(\"#\") !== -1 ? matches[8] : undefined;\n //fix port number\n if (isNaN(components.port)) {\n components.port = uriString.match(/\\/\\/(?:.|\\n)*\\:(?:\\/|\\?|\\#|$)/) ? matches[4] : undefined;\n }\n }\n if (components.host) {\n //normalize IP hosts\n components.host = _normalizeIPv6(_normalizeIPv4(components.host, protocol), protocol);\n }\n //determine reference type\n if (components.scheme === undefined && components.userinfo === undefined && components.host === undefined && components.port === undefined && !components.path && components.query === undefined) {\n components.reference = \"same-document\";\n } else if (components.scheme === undefined) {\n components.reference = \"relative\";\n } else if (components.fragment === undefined) {\n components.reference = \"absolute\";\n } else {\n components.reference = \"uri\";\n }\n //check for reference errors\n if (options.reference && options.reference !== \"suffix\" && options.reference !== components.reference) {\n components.error = components.error || \"URI is not a \" + options.reference + \" reference.\";\n }\n //find scheme handler\n var schemeHandler = SCHEMES[(options.scheme || components.scheme || \"\").toLowerCase()];\n //check if scheme can't handle IRIs\n if (!options.unicodeSupport && (!schemeHandler || !schemeHandler.unicodeSupport)) {\n //if host component is a domain name\n if (components.host && (options.domainHost || schemeHandler && schemeHandler.domainHost)) {\n //convert Unicode IDN -> ASCII IDN\n try {\n components.host = punycode.toASCII(components.host.replace(protocol.PCT_ENCODED, pctDecChars).toLowerCase());\n } catch (e) {\n components.error = components.error || \"Host's domain name can not be converted to ASCII via punycode: \" + e;\n }\n }\n //convert IRI -> URI\n _normalizeComponentEncoding(components, URI_PROTOCOL);\n } else {\n //normalize encodings\n _normalizeComponentEncoding(components, protocol);\n }\n //perform scheme specific parsing\n if (schemeHandler && schemeHandler.parse) {\n schemeHandler.parse(components, options);\n }\n } else {\n components.error = components.error || \"URI can not be parsed.\";\n }\n return components;\n}\n\nfunction _recomposeAuthority(components, options) {\n var protocol = options.iri !== false ? IRI_PROTOCOL : URI_PROTOCOL;\n var uriTokens = [];\n if (components.userinfo !== undefined) {\n uriTokens.push(components.userinfo);\n uriTokens.push(\"@\");\n }\n if (components.host !== undefined) {\n //normalize IP hosts, add brackets and escape zone separator for IPv6\n uriTokens.push(_normalizeIPv6(_normalizeIPv4(String(components.host), protocol), protocol).replace(protocol.IPV6ADDRESS, function (_, $1, $2) {\n return \"[\" + $1 + ($2 ? \"%25\" + $2 : \"\") + \"]\";\n }));\n }\n if (typeof components.port === \"number\" || typeof components.port === \"string\") {\n uriTokens.push(\":\");\n uriTokens.push(String(components.port));\n }\n return uriTokens.length ? uriTokens.join(\"\") : undefined;\n}\n\nvar RDS1 = /^\\.\\.?\\//;\nvar RDS2 = /^\\/\\.(\\/|$)/;\nvar RDS3 = /^\\/\\.\\.(\\/|$)/;\nvar RDS5 = /^\\/?(?:.|\\n)*?(?=\\/|$)/;\nfunction removeDotSegments(input) {\n var output = [];\n while (input.length) {\n if (input.match(RDS1)) {\n input = input.replace(RDS1, \"\");\n } else if (input.match(RDS2)) {\n input = input.replace(RDS2, \"/\");\n } else if (input.match(RDS3)) {\n input = input.replace(RDS3, \"/\");\n output.pop();\n } else if (input === \".\" || input === \"..\") {\n input = \"\";\n } else {\n var im = input.match(RDS5);\n if (im) {\n var s = im[0];\n input = input.slice(s.length);\n output.push(s);\n } else {\n throw new Error(\"Unexpected dot segment condition\");\n }\n }\n }\n return output.join(\"\");\n}\n\nfunction serialize(components) {\n var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n var protocol = options.iri ? IRI_PROTOCOL : URI_PROTOCOL;\n var uriTokens = [];\n //find scheme handler\n var schemeHandler = SCHEMES[(options.scheme || components.scheme || \"\").toLowerCase()];\n //perform scheme specific serialization\n if (schemeHandler && schemeHandler.serialize) schemeHandler.serialize(components, options);\n if (components.host) {\n //if host component is an IPv6 address\n if (protocol.IPV6ADDRESS.test(components.host)) {}\n //TODO: normalize IPv6 address as per RFC 5952\n\n //if host component is a domain name\n else if (options.domainHost || schemeHandler && schemeHandler.domainHost) {\n //convert IDN via punycode\n try {\n components.host = !options.iri ? punycode.toASCII(components.host.replace(protocol.PCT_ENCODED, pctDecChars).toLowerCase()) : punycode.toUnicode(components.host);\n } catch (e) {\n components.error = components.error || \"Host's domain name can not be converted to \" + (!options.iri ? \"ASCII\" : \"Unicode\") + \" via punycode: \" + e;\n }\n }\n }\n //normalize encoding\n _normalizeComponentEncoding(components, protocol);\n if (options.reference !== \"suffix\" && components.scheme) {\n uriTokens.push(components.scheme);\n uriTokens.push(\":\");\n }\n var authority = _recomposeAuthority(components, options);\n if (authority !== undefined) {\n if (options.reference !== \"suffix\") {\n uriTokens.push(\"//\");\n }\n uriTokens.push(authority);\n if (components.path && components.path.charAt(0) !== \"/\") {\n uriTokens.push(\"/\");\n }\n }\n if (components.path !== undefined) {\n var s = components.path;\n if (!options.absolutePath && (!schemeHandler || !schemeHandler.absolutePath)) {\n s = removeDotSegments(s);\n }\n if (authority === undefined) {\n s = s.replace(/^\\/\\//, \"/%2F\"); //don't allow the path to start with \"//\"\n }\n uriTokens.push(s);\n }\n if (components.query !== undefined) {\n uriTokens.push(\"?\");\n uriTokens.push(components.query);\n }\n if (components.fragment !== undefined) {\n uriTokens.push(\"#\");\n uriTokens.push(components.fragment);\n }\n return uriTokens.join(\"\"); //merge tokens into a string\n}\n\nfunction resolveComponents(base, relative) {\n var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};\n var skipNormalization = arguments[3];\n\n var target = {};\n if (!skipNormalization) {\n base = parse(serialize(base, options), options); //normalize base components\n relative = parse(serialize(relative, options), options); //normalize relative components\n }\n options = options || {};\n if (!options.tolerant && relative.scheme) {\n target.scheme = relative.scheme;\n //target.authority = relative.authority;\n target.userinfo = relative.userinfo;\n target.host = relative.host;\n target.port = relative.port;\n target.path = removeDotSegments(relative.path || \"\");\n target.query = relative.query;\n } else {\n if (relative.userinfo !== undefined || relative.host !== undefined || relative.port !== undefined) {\n //target.authority = relative.authority;\n target.userinfo = relative.userinfo;\n target.host = relative.host;\n target.port = relative.port;\n target.path = removeDotSegments(relative.path || \"\");\n target.query = relative.query;\n } else {\n if (!relative.path) {\n target.path = base.path;\n if (relative.query !== undefined) {\n target.query = relative.query;\n } else {\n target.query = base.query;\n }\n } else {\n if (relative.path.charAt(0) === \"/\") {\n target.path = removeDotSegments(relative.path);\n } else {\n if ((base.userinfo !== undefined || base.host !== undefined || base.port !== undefined) && !base.path) {\n target.path = \"/\" + relative.path;\n } else if (!base.path) {\n target.path = relative.path;\n } else {\n target.path = base.path.slice(0, base.path.lastIndexOf(\"/\") + 1) + relative.path;\n }\n target.path = removeDotSegments(target.path);\n }\n target.query = relative.query;\n }\n //target.authority = base.authority;\n target.userinfo = base.userinfo;\n target.host = base.host;\n target.port = base.port;\n }\n target.scheme = base.scheme;\n }\n target.fragment = relative.fragment;\n return target;\n}\n\nfunction resolve(baseURI, relativeURI, options) {\n var schemelessOptions = assign({ scheme: 'null' }, options);\n return serialize(resolveComponents(parse(baseURI, schemelessOptions), parse(relativeURI, schemelessOptions), schemelessOptions, true), schemelessOptions);\n}\n\nfunction normalize(uri, options) {\n if (typeof uri === \"string\") {\n uri = serialize(parse(uri, options), options);\n } else if (typeOf(uri) === \"object\") {\n uri = parse(serialize(uri, options), options);\n }\n return uri;\n}\n\nfunction equal(uriA, uriB, options) {\n if (typeof uriA === \"string\") {\n uriA = serialize(parse(uriA, options), options);\n } else if (typeOf(uriA) === \"object\") {\n uriA = serialize(uriA, options);\n }\n if (typeof uriB === \"string\") {\n uriB = serialize(parse(uriB, options), options);\n } else if (typeOf(uriB) === \"object\") {\n uriB = serialize(uriB, options);\n }\n return uriA === uriB;\n}\n\nfunction escapeComponent(str, options) {\n return str && str.toString().replace(!options || !options.iri ? URI_PROTOCOL.ESCAPE : IRI_PROTOCOL.ESCAPE, pctEncChar);\n}\n\nfunction unescapeComponent(str, options) {\n return str && str.toString().replace(!options || !options.iri ? URI_PROTOCOL.PCT_ENCODED : IRI_PROTOCOL.PCT_ENCODED, pctDecChars);\n}\n\nvar handler = {\n scheme: \"http\",\n domainHost: true,\n parse: function parse(components, options) {\n //report missing host\n if (!components.host) {\n components.error = components.error || \"HTTP URIs must have a host.\";\n }\n return components;\n },\n serialize: function serialize(components, options) {\n var secure = String(components.scheme).toLowerCase() === \"https\";\n //normalize the default port\n if (components.port === (secure ? 443 : 80) || components.port === \"\") {\n components.port = undefined;\n }\n //normalize the empty path\n if (!components.path) {\n components.path = \"/\";\n }\n //NOTE: We do not parse query strings for HTTP URIs\n //as WWW Form Url Encoded query strings are part of the HTML4+ spec,\n //and not the HTTP spec.\n return components;\n }\n};\n\nvar handler$1 = {\n scheme: \"https\",\n domainHost: handler.domainHost,\n parse: handler.parse,\n serialize: handler.serialize\n};\n\nfunction isSecure(wsComponents) {\n return typeof wsComponents.secure === 'boolean' ? wsComponents.secure : String(wsComponents.scheme).toLowerCase() === \"wss\";\n}\n//RFC 6455\nvar handler$2 = {\n scheme: \"ws\",\n domainHost: true,\n parse: function parse(components, options) {\n var wsComponents = components;\n //indicate if the secure flag is set\n wsComponents.secure = isSecure(wsComponents);\n //construct resouce name\n wsComponents.resourceName = (wsComponents.path || '/') + (wsComponents.query ? '?' + wsComponents.query : '');\n wsComponents.path = undefined;\n wsComponents.query = undefined;\n return wsComponents;\n },\n serialize: function serialize(wsComponents, options) {\n //normalize the default port\n if (wsComponents.port === (isSecure(wsComponents) ? 443 : 80) || wsComponents.port === \"\") {\n wsComponents.port = undefined;\n }\n //ensure scheme matches secure flag\n if (typeof wsComponents.secure === 'boolean') {\n wsComponents.scheme = wsComponents.secure ? 'wss' : 'ws';\n wsComponents.secure = undefined;\n }\n //reconstruct path from resource name\n if (wsComponents.resourceName) {\n var _wsComponents$resourc = wsComponents.resourceName.split('?'),\n _wsComponents$resourc2 = slicedToArray(_wsComponents$resourc, 2),\n path = _wsComponents$resourc2[0],\n query = _wsComponents$resourc2[1];\n\n wsComponents.path = path && path !== '/' ? path : undefined;\n wsComponents.query = query;\n wsComponents.resourceName = undefined;\n }\n //forbid fragment component\n wsComponents.fragment = undefined;\n return wsComponents;\n }\n};\n\nvar handler$3 = {\n scheme: \"wss\",\n domainHost: handler$2.domainHost,\n parse: handler$2.parse,\n serialize: handler$2.serialize\n};\n\nvar O = {};\nvar isIRI = true;\n//RFC 3986\nvar UNRESERVED$$ = \"[A-Za-z0-9\\\\-\\\\.\\\\_\\\\~\" + (isIRI ? \"\\\\xA0-\\\\u200D\\\\u2010-\\\\u2029\\\\u202F-\\\\uD7FF\\\\uF900-\\\\uFDCF\\\\uFDF0-\\\\uFFEF\" : \"\") + \"]\";\nvar HEXDIG$$ = \"[0-9A-Fa-f]\"; //case-insensitive\nvar PCT_ENCODED$ = subexp(subexp(\"%[EFef]\" + HEXDIG$$ + \"%\" + HEXDIG$$ + HEXDIG$$ + \"%\" + HEXDIG$$ + HEXDIG$$) + \"|\" + subexp(\"%[89A-Fa-f]\" + HEXDIG$$ + \"%\" + HEXDIG$$ + HEXDIG$$) + \"|\" + subexp(\"%\" + HEXDIG$$ + HEXDIG$$)); //expanded\n//RFC 5322, except these symbols as per RFC 6068: @ : / ? # [ ] & ; =\n//const ATEXT$$ = \"[A-Za-z0-9\\\\!\\\\#\\\\$\\\\%\\\\&\\\\'\\\\*\\\\+\\\\-\\\\/\\\\=\\\\?\\\\^\\\\_\\\\`\\\\{\\\\|\\\\}\\\\~]\";\n//const WSP$$ = \"[\\\\x20\\\\x09]\";\n//const OBS_QTEXT$$ = \"[\\\\x01-\\\\x08\\\\x0B\\\\x0C\\\\x0E-\\\\x1F\\\\x7F]\"; //(%d1-8 / %d11-12 / %d14-31 / %d127)\n//const QTEXT$$ = merge(\"[\\\\x21\\\\x23-\\\\x5B\\\\x5D-\\\\x7E]\", OBS_QTEXT$$); //%d33 / %d35-91 / %d93-126 / obs-qtext\n//const VCHAR$$ = \"[\\\\x21-\\\\x7E]\";\n//const WSP$$ = \"[\\\\x20\\\\x09]\";\n//const OBS_QP$ = subexp(\"\\\\\\\\\" + merge(\"[\\\\x00\\\\x0D\\\\x0A]\", OBS_QTEXT$$)); //%d0 / CR / LF / obs-qtext\n//const FWS$ = subexp(subexp(WSP$$ + \"*\" + \"\\\\x0D\\\\x0A\") + \"?\" + WSP$$ + \"+\");\n//const QUOTED_PAIR$ = subexp(subexp(\"\\\\\\\\\" + subexp(VCHAR$$ + \"|\" + WSP$$)) + \"|\" + OBS_QP$);\n//const QUOTED_STRING$ = subexp('\\\\\"' + subexp(FWS$ + \"?\" + QCONTENT$) + \"*\" + FWS$ + \"?\" + '\\\\\"');\nvar ATEXT$$ = \"[A-Za-z0-9\\\\!\\\\$\\\\%\\\\'\\\\*\\\\+\\\\-\\\\^\\\\_\\\\`\\\\{\\\\|\\\\}\\\\~]\";\nvar QTEXT$$ = \"[\\\\!\\\\$\\\\%\\\\'\\\\(\\\\)\\\\*\\\\+\\\\,\\\\-\\\\.0-9\\\\<\\\\>A-Z\\\\x5E-\\\\x7E]\";\nvar VCHAR$$ = merge(QTEXT$$, \"[\\\\\\\"\\\\\\\\]\");\nvar SOME_DELIMS$$ = \"[\\\\!\\\\$\\\\'\\\\(\\\\)\\\\*\\\\+\\\\,\\\\;\\\\:\\\\@]\";\nvar UNRESERVED = new RegExp(UNRESERVED$$, \"g\");\nvar PCT_ENCODED = new RegExp(PCT_ENCODED$, \"g\");\nvar NOT_LOCAL_PART = new RegExp(merge(\"[^]\", ATEXT$$, \"[\\\\.]\", '[\\\\\"]', VCHAR$$), \"g\");\nvar NOT_HFNAME = new RegExp(merge(\"[^]\", UNRESERVED$$, SOME_DELIMS$$), \"g\");\nvar NOT_HFVALUE = NOT_HFNAME;\nfunction decodeUnreserved(str) {\n var decStr = pctDecChars(str);\n return !decStr.match(UNRESERVED) ? str : decStr;\n}\nvar handler$4 = {\n scheme: \"mailto\",\n parse: function parse$$1(components, options) {\n var mailtoComponents = components;\n var to = mailtoComponents.to = mailtoComponents.path ? mailtoComponents.path.split(\",\") : [];\n mailtoComponents.path = undefined;\n if (mailtoComponents.query) {\n var unknownHeaders = false;\n var headers = {};\n var hfields = mailtoComponents.query.split(\"&\");\n for (var x = 0, xl = hfields.length; x < xl; ++x) {\n var hfield = hfields[x].split(\"=\");\n switch (hfield[0]) {\n case \"to\":\n var toAddrs = hfield[1].split(\",\");\n for (var _x = 0, _xl = toAddrs.length; _x < _xl; ++_x) {\n to.push(toAddrs[_x]);\n }\n break;\n case \"subject\":\n mailtoComponents.subject = unescapeComponent(hfield[1], options);\n break;\n case \"body\":\n mailtoComponents.body = unescapeComponent(hfield[1], options);\n break;\n default:\n unknownHeaders = true;\n headers[unescapeComponent(hfield[0], options)] = unescapeComponent(hfield[1], options);\n break;\n }\n }\n if (unknownHeaders) mailtoComponents.headers = headers;\n }\n mailtoComponents.query = undefined;\n for (var _x2 = 0, _xl2 = to.length; _x2 < _xl2; ++_x2) {\n var addr = to[_x2].split(\"@\");\n addr[0] = unescapeComponent(addr[0]);\n if (!options.unicodeSupport) {\n //convert Unicode IDN -> ASCII IDN\n try {\n addr[1] = punycode.toASCII(unescapeComponent(addr[1], options).toLowerCase());\n } catch (e) {\n mailtoComponents.error = mailtoComponents.error || \"Email address's domain name can not be converted to ASCII via punycode: \" + e;\n }\n } else {\n addr[1] = unescapeComponent(addr[1], options).toLowerCase();\n }\n to[_x2] = addr.join(\"@\");\n }\n return mailtoComponents;\n },\n serialize: function serialize$$1(mailtoComponents, options) {\n var components = mailtoComponents;\n var to = toArray(mailtoComponents.to);\n if (to) {\n for (var x = 0, xl = to.length; x < xl; ++x) {\n var toAddr = String(to[x]);\n var atIdx = toAddr.lastIndexOf(\"@\");\n var localPart = toAddr.slice(0, atIdx).replace(PCT_ENCODED, decodeUnreserved).replace(PCT_ENCODED, toUpperCase).replace(NOT_LOCAL_PART, pctEncChar);\n var domain = toAddr.slice(atIdx + 1);\n //convert IDN via punycode\n try {\n domain = !options.iri ? punycode.toASCII(unescapeComponent(domain, options).toLowerCase()) : punycode.toUnicode(domain);\n } catch (e) {\n components.error = components.error || \"Email address's domain name can not be converted to \" + (!options.iri ? \"ASCII\" : \"Unicode\") + \" via punycode: \" + e;\n }\n to[x] = localPart + \"@\" + domain;\n }\n components.path = to.join(\",\");\n }\n var headers = mailtoComponents.headers = mailtoComponents.headers || {};\n if (mailtoComponents.subject) headers[\"subject\"] = mailtoComponents.subject;\n if (mailtoComponents.body) headers[\"body\"] = mailtoComponents.body;\n var fields = [];\n for (var name in headers) {\n if (headers[name] !== O[name]) {\n fields.push(name.replace(PCT_ENCODED, decodeUnreserved).replace(PCT_ENCODED, toUpperCase).replace(NOT_HFNAME, pctEncChar) + \"=\" + headers[name].replace(PCT_ENCODED, decodeUnreserved).replace(PCT_ENCODED, toUpperCase).replace(NOT_HFVALUE, pctEncChar));\n }\n }\n if (fields.length) {\n components.query = fields.join(\"&\");\n }\n return components;\n }\n};\n\nvar URN_PARSE = /^([^\\:]+)\\:(.*)/;\n//RFC 2141\nvar handler$5 = {\n scheme: \"urn\",\n parse: function parse$$1(components, options) {\n var matches = components.path && components.path.match(URN_PARSE);\n var urnComponents = components;\n if (matches) {\n var scheme = options.scheme || urnComponents.scheme || \"urn\";\n var nid = matches[1].toLowerCase();\n var nss = matches[2];\n var urnScheme = scheme + \":\" + (options.nid || nid);\n var schemeHandler = SCHEMES[urnScheme];\n urnComponents.nid = nid;\n urnComponents.nss = nss;\n urnComponents.path = undefined;\n if (schemeHandler) {\n urnComponents = schemeHandler.parse(urnComponents, options);\n }\n } else {\n urnComponents.error = urnComponents.error || \"URN can not be parsed.\";\n }\n return urnComponents;\n },\n serialize: function serialize$$1(urnComponents, options) {\n var scheme = options.scheme || urnComponents.scheme || \"urn\";\n var nid = urnComponents.nid;\n var urnScheme = scheme + \":\" + (options.nid || nid);\n var schemeHandler = SCHEMES[urnScheme];\n if (schemeHandler) {\n urnComponents = schemeHandler.serialize(urnComponents, options);\n }\n var uriComponents = urnComponents;\n var nss = urnComponents.nss;\n uriComponents.path = (nid || options.nid) + \":\" + nss;\n return uriComponents;\n }\n};\n\nvar UUID = /^[0-9A-Fa-f]{8}(?:\\-[0-9A-Fa-f]{4}){3}\\-[0-9A-Fa-f]{12}$/;\n//RFC 4122\nvar handler$6 = {\n scheme: \"urn:uuid\",\n parse: function parse(urnComponents, options) {\n var uuidComponents = urnComponents;\n uuidComponents.uuid = uuidComponents.nss;\n uuidComponents.nss = undefined;\n if (!options.tolerant && (!uuidComponents.uuid || !uuidComponents.uuid.match(UUID))) {\n uuidComponents.error = uuidComponents.error || \"UUID is not valid.\";\n }\n return uuidComponents;\n },\n serialize: function serialize(uuidComponents, options) {\n var urnComponents = uuidComponents;\n //normalize UUID\n urnComponents.nss = (uuidComponents.uuid || \"\").toLowerCase();\n return urnComponents;\n }\n};\n\nSCHEMES[handler.scheme] = handler;\nSCHEMES[handler$1.scheme] = handler$1;\nSCHEMES[handler$2.scheme] = handler$2;\nSCHEMES[handler$3.scheme] = handler$3;\nSCHEMES[handler$4.scheme] = handler$4;\nSCHEMES[handler$5.scheme] = handler$5;\nSCHEMES[handler$6.scheme] = handler$6;\n\nexports.SCHEMES = SCHEMES;\nexports.pctEncChar = pctEncChar;\nexports.pctDecChars = pctDecChars;\nexports.parse = parse;\nexports.removeDotSegments = removeDotSegments;\nexports.serialize = serialize;\nexports.resolveComponents = resolveComponents;\nexports.resolve = resolve;\nexports.normalize = normalize;\nexports.equal = equal;\nexports.escapeComponent = escapeComponent;\nexports.unescapeComponent = unescapeComponent;\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\n})));\n//# sourceMappingURL=uri.all.js.map\n","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nObject.defineProperty(exports, \"v1\", {\n enumerable: true,\n get: function () {\n return _v.default;\n }\n});\nObject.defineProperty(exports, \"v3\", {\n enumerable: true,\n get: function () {\n return _v2.default;\n }\n});\nObject.defineProperty(exports, \"v4\", {\n enumerable: true,\n get: function () {\n return _v3.default;\n }\n});\nObject.defineProperty(exports, \"v5\", {\n enumerable: true,\n get: function () {\n return _v4.default;\n }\n});\nObject.defineProperty(exports, \"NIL\", {\n enumerable: true,\n get: function () {\n return _nil.default;\n }\n});\nObject.defineProperty(exports, \"version\", {\n enumerable: true,\n get: function () {\n return _version.default;\n }\n});\nObject.defineProperty(exports, \"validate\", {\n enumerable: true,\n get: function () {\n return _validate.default;\n }\n});\nObject.defineProperty(exports, \"stringify\", {\n enumerable: true,\n get: function () {\n return _stringify.default;\n }\n});\nObject.defineProperty(exports, \"parse\", {\n enumerable: true,\n get: function () {\n return _parse.default;\n }\n});\n\nvar _v = _interopRequireDefault(require(\"./v1.js\"));\n\nvar _v2 = _interopRequireDefault(require(\"./v3.js\"));\n\nvar _v3 = _interopRequireDefault(require(\"./v4.js\"));\n\nvar _v4 = _interopRequireDefault(require(\"./v5.js\"));\n\nvar _nil = _interopRequireDefault(require(\"./nil.js\"));\n\nvar _version = _interopRequireDefault(require(\"./version.js\"));\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nvar _parse = _interopRequireDefault(require(\"./parse.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction md5(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return _crypto.default.createHash('md5').update(bytes).digest();\n}\n\nvar _default = md5;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nvar _default = '00000000-0000-0000-0000-000000000000';\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction parse(uuid) {\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n let v;\n const arr = new Uint8Array(16); // Parse ########-....-....-....-............\n\n arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24;\n arr[1] = v >>> 16 & 0xff;\n arr[2] = v >>> 8 & 0xff;\n arr[3] = v & 0xff; // Parse ........-####-....-....-............\n\n arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8;\n arr[5] = v & 0xff; // Parse ........-....-####-....-............\n\n arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8;\n arr[7] = v & 0xff; // Parse ........-....-....-####-............\n\n arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8;\n arr[9] = v & 0xff; // Parse ........-....-....-....-############\n // (Use \"/\" to avoid 32-bit truncation when bit-shifting high-order bytes)\n\n arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff;\n arr[11] = v / 0x100000000 & 0xff;\n arr[12] = v >>> 24 & 0xff;\n arr[13] = v >>> 16 & 0xff;\n arr[14] = v >>> 8 & 0xff;\n arr[15] = v & 0xff;\n return arr;\n}\n\nvar _default = parse;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nvar _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = rng;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate\n\nlet poolPtr = rnds8Pool.length;\n\nfunction rng() {\n if (poolPtr > rnds8Pool.length - 16) {\n _crypto.default.randomFillSync(rnds8Pool);\n\n poolPtr = 0;\n }\n\n return rnds8Pool.slice(poolPtr, poolPtr += 16);\n}","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction sha1(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return _crypto.default.createHash('sha1').update(bytes).digest();\n}\n\nvar _default = sha1;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\n/**\n * Convert array of 16 byte values to UUID string format of the form:\n * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX\n */\nconst byteToHex = [];\n\nfor (let i = 0; i < 256; ++i) {\n byteToHex.push((i + 0x100).toString(16).substr(1));\n}\n\nfunction stringify(arr, offset = 0) {\n // Note: Be careful editing this code! It's been tuned for performance\n // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434\n const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one\n // of the following:\n // - One or more input array values don't map to a hex octet (leading to\n // \"undefined\" in the uuid)\n // - Invalid input values for the RFC `version` or `variant` fields\n\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Stringified UUID is invalid');\n }\n\n return uuid;\n}\n\nvar _default = stringify;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _rng = _interopRequireDefault(require(\"./rng.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\n// **`v1()` - Generate time-based UUID**\n//\n// Inspired by https://github.com/LiosK/UUID.js\n// and http://docs.python.org/library/uuid.html\nlet _nodeId;\n\nlet _clockseq; // Previous uuid creation time\n\n\nlet _lastMSecs = 0;\nlet _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details\n\nfunction v1(options, buf, offset) {\n let i = buf && offset || 0;\n const b = buf || new Array(16);\n options = options || {};\n let node = options.node || _nodeId;\n let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not\n // specified. We do this lazily to minimize issues related to insufficient\n // system entropy. See #189\n\n if (node == null || clockseq == null) {\n const seedBytes = options.random || (options.rng || _rng.default)();\n\n if (node == null) {\n // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)\n node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]];\n }\n\n if (clockseq == null) {\n // Per 4.2.2, randomize (14 bit) clockseq\n clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff;\n }\n } // UUID timestamps are 100 nano-second units since the Gregorian epoch,\n // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so\n // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'\n // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.\n\n\n let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock\n // cycle to simulate higher resolution clock\n\n let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs)\n\n const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression\n\n if (dt < 0 && options.clockseq === undefined) {\n clockseq = clockseq + 1 & 0x3fff;\n } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new\n // time interval\n\n\n if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) {\n nsecs = 0;\n } // Per 4.2.1.2 Throw error if too many uuids are requested\n\n\n if (nsecs >= 10000) {\n throw new Error(\"uuid.v1(): Can't create more than 10M uuids/sec\");\n }\n\n _lastMSecs = msecs;\n _lastNSecs = nsecs;\n _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch\n\n msecs += 12219292800000; // `time_low`\n\n const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;\n b[i++] = tl >>> 24 & 0xff;\n b[i++] = tl >>> 16 & 0xff;\n b[i++] = tl >>> 8 & 0xff;\n b[i++] = tl & 0xff; // `time_mid`\n\n const tmh = msecs / 0x100000000 * 10000 & 0xfffffff;\n b[i++] = tmh >>> 8 & 0xff;\n b[i++] = tmh & 0xff; // `time_high_and_version`\n\n b[i++] = tmh >>> 24 & 0xf | 0x10; // include version\n\n b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)\n\n b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low`\n\n b[i++] = clockseq & 0xff; // `node`\n\n for (let n = 0; n < 6; ++n) {\n b[i + n] = node[n];\n }\n\n return buf || (0, _stringify.default)(b);\n}\n\nvar _default = v1;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _v = _interopRequireDefault(require(\"./v35.js\"));\n\nvar _md = _interopRequireDefault(require(\"./md5.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst v3 = (0, _v.default)('v3', 0x30, _md.default);\nvar _default = v3;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = _default;\nexports.URL = exports.DNS = void 0;\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nvar _parse = _interopRequireDefault(require(\"./parse.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction stringToBytes(str) {\n str = unescape(encodeURIComponent(str)); // UTF8 escape\n\n const bytes = [];\n\n for (let i = 0; i < str.length; ++i) {\n bytes.push(str.charCodeAt(i));\n }\n\n return bytes;\n}\n\nconst DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';\nexports.DNS = DNS;\nconst URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8';\nexports.URL = URL;\n\nfunction _default(name, version, hashfunc) {\n function generateUUID(value, namespace, buf, offset) {\n if (typeof value === 'string') {\n value = stringToBytes(value);\n }\n\n if (typeof namespace === 'string') {\n namespace = (0, _parse.default)(namespace);\n }\n\n if (namespace.length !== 16) {\n throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)');\n } // Compute hash of namespace and value, Per 4.3\n // Future: Use spread syntax when supported on all platforms, e.g. `bytes =\n // hashfunc([...namespace, ... value])`\n\n\n let bytes = new Uint8Array(16 + value.length);\n bytes.set(namespace);\n bytes.set(value, namespace.length);\n bytes = hashfunc(bytes);\n bytes[6] = bytes[6] & 0x0f | version;\n bytes[8] = bytes[8] & 0x3f | 0x80;\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = bytes[i];\n }\n\n return buf;\n }\n\n return (0, _stringify.default)(bytes);\n } // Function#name is not settable on some platforms (#270)\n\n\n try {\n generateUUID.name = name; // eslint-disable-next-line no-empty\n } catch (err) {} // For CommonJS default export support\n\n\n generateUUID.DNS = DNS;\n generateUUID.URL = URL;\n return generateUUID;\n}","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _rng = _interopRequireDefault(require(\"./rng.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction v4(options, buf, offset) {\n options = options || {};\n\n const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`\n\n\n rnds[6] = rnds[6] & 0x0f | 0x40;\n rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = rnds[i];\n }\n\n return buf;\n }\n\n return (0, _stringify.default)(rnds);\n}\n\nvar _default = v4;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _v = _interopRequireDefault(require(\"./v35.js\"));\n\nvar _sha = _interopRequireDefault(require(\"./sha1.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst v5 = (0, _v.default)('v5', 0x50, _sha.default);\nvar _default = v5;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _regex = _interopRequireDefault(require(\"./regex.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction validate(uuid) {\n return typeof uuid === 'string' && _regex.default.test(uuid);\n}\n\nvar _default = validate;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction version(uuid) {\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n return parseInt(uuid.substr(14, 1), 16);\n}\n\nvar _default = version;\nexports.default = _default;","module.exports = require(\"assert\");","module.exports = require(\"crypto\");","module.exports = require(\"events\");","module.exports = require(\"fs\");","module.exports = require(\"http\");","module.exports = require(\"https\");","module.exports = require(\"net\");","module.exports = require(\"os\");","module.exports = require(\"path\");","module.exports = require(\"tls\");","module.exports = require(\"util\");","// The module cache\nvar __webpack_module_cache__ = {};\n\n// The require function\nfunction __webpack_require__(moduleId) {\n\t// Check if module is in cache\n\tvar cachedModule = __webpack_module_cache__[moduleId];\n\tif (cachedModule !== undefined) {\n\t\treturn cachedModule.exports;\n\t}\n\t// Create a new module (and put it into the cache)\n\tvar module = __webpack_module_cache__[moduleId] = {\n\t\t// no module.id needed\n\t\t// no module.loaded needed\n\t\texports: {}\n\t};\n\n\t// Execute the module function\n\tvar threw = true;\n\ttry {\n\t\t__webpack_modules__[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\t\tthrew = false;\n\t} finally {\n\t\tif(threw) delete __webpack_module_cache__[moduleId];\n\t}\n\n\t// Return the exports of the module\n\treturn module.exports;\n}\n\n","\nif (typeof __webpack_require__ !== 'undefined') __webpack_require__.ab = __dirname + \"/\";","","// startup\n// Load entry module and return exports\n// This entry module is referenced by other modules so it can't be inlined\nvar __webpack_exports__ = __webpack_require__(3109);\n",""],"names":[],"sourceRoot":""} \ No newline at end of file diff --git a/dist/licenses.txt b/dist/licenses.txt index 747a90c..81f5db4 100644 --- a/dist/licenses.txt +++ b/dist/licenses.txt @@ -1,8 +1,71 @@ -node-fetch +@actions/core MIT The MIT License (MIT) -Copyright (c) 2016 - 2020 Node Fetch Team +Copyright 2019 GitHub + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +@actions/http-client +MIT +Actions Http Client for Node.js + +Copyright (c) GitHub, Inc. + +All rights reserved. + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and +associated documentation files (the "Software"), to deal in the Software without restriction, +including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + +ajv +MIT +The MIT License (MIT) + +Copyright (c) 2015-2021 Evgeny Poberezkin + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + + +fast-deep-equal +MIT +MIT License + +Copyright (c) 2017 Evgeny Poberezkin Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal @@ -21,3 +84,81 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + +json-schema-traverse +MIT +MIT License + +Copyright (c) 2017 Evgeny Poberezkin + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +tunnel +MIT +The MIT License (MIT) + +Copyright (c) 2012 Koichi Kobayashi + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +uri-js +BSD-2-Clause +Copyright 2011 Gary Court. All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY GARY COURT "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GARY COURT OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of Gary Court. + + +uuid +MIT +The MIT License (MIT) + +Copyright (c) 2010-2020 Robert Kieffer and other contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/src/github/common.ts b/src/github/common.ts index f0c6882..c0fdb82 100644 --- a/src/github/common.ts +++ b/src/github/common.ts @@ -1,44 +1,55 @@ -import fetch, {Response} from 'node-fetch' +import https from 'https' import {getEnvVar} from '../common/env' +const request = async ( + method: string, + url: string, + body: unknown +): Promise => + new Promise((resolve, reject) => { + const req = https.request(url, { + method, + headers: { + Authorization: `token ${getEnvVar('GITHUB_TOKEN')}`, + Accept: 'application/json', + 'User-Agent': 'Connehito/deploying-marker-with-issue' + }, + timeout: 10000 + }) + req.on('response', res => { + if (res.statusCode !== 200) { + throw new Error(`Invalid status code: ${res.statusCode}`) + } + try { + const chunks: Buffer[] = [] + res.on('data', chunk => { + chunks.push(chunk) + }) + res.on('end', () => { + resolve(JSON.parse(Buffer.concat(chunks).toString('utf-8'))) + }) + res.on('error', reject) + } catch (err) { + reject(err) + } + }) + req.on('error', reject) + req.write(body) + req.end() + }) + export const fetchGitHubGraphQL = async ( query: string, variables: Record = {} -): Promise => { - const response = await fetch('https://api.github.com/graphql', { - method: 'POST', - headers: { - Authorization: `token ${getEnvVar('GITHUB_TOKEN')}` - }, - body: JSON.stringify({query, variables}) - }) - if (response.status !== 200) - throw new Error(`Invalid status code: ${response.status}`) - const body = await response.json() - if (typeof body === 'object' && body != null && 'errors' in body) { - throw new Error(`Response has error: ${JSON.stringify(body)}`) - } - return body -} +): Promise => + request( + 'POST', + 'https://api.github.com/graphql', + JSON.stringify({query, variables}) + ) export const fetchGitHubApiV3 = async ( method: string, path: string, body = '' -): Promise => { - const response = await fetch(`https://api.github.com${path}`, { - method, - headers: { - Accept: 'application/vnd.github.v3+json', - Authorization: `token ${getEnvVar('GITHUB_TOKEN')}` - }, - body - }) - if (!response.ok) - throw new Error( - `Invalid status code: ${response.status} => ${ - (await response.text()) ?? '(Empty body)' - }` - ) - return response -} +): Promise => request(method, `https://api.github.com${path}`, body) diff --git a/src/github/label-create.ts b/src/github/label-create.ts index 10c64bc..2659b61 100644 --- a/src/github/label-create.ts +++ b/src/github/label-create.ts @@ -58,7 +58,7 @@ export const createLabel = async ( args: Args ): Promise> => { const {repoOwner, repoName, labelName, labelColor, labelDescription} = args - const response = await fetchGitHubApiV3( + const data = await fetchGitHubApiV3( 'POST', `/repos/${repoOwner}/${repoName}/labels`, JSON.stringify({ @@ -67,5 +67,5 @@ export const createLabel = async ( description: labelDescription ?? DefaultLabelDescription }) ) - return buildValidator(Schema)(await response.json()) + return buildValidator(Schema)(data) }